From 937c259e289ef0e33580c5a7f5ce42eebcd1f1da Mon Sep 17 00:00:00 2001 From: jainapurva Date: Sat, 13 Sep 2025 16:12:18 -0700 Subject: [PATCH 01/13] Add FastAPI integration with GitHub SHA processing - Add FastAPI application with /github/input-repos endpoint - Support dynamic repository URLs in API requests - Add Pydantic models for request/response validation - Add GitHub service for SHA validation against repositories - Add repository import scripts (Python, Shell, YAML config) - Add sample Python files for testing - Update requirements.txt with FastAPI dependencies - Add comprehensive documentation and test scripts Features: - POST /github/input-repos: Process 3 GitHub SHAs with repo URLs - Dynamic repository validation (no config file needed) - Comprehensive error handling and validation - Automatic API documentation (Swagger UI) - Repository import utilities for data folder --- api/__init__.py | 1 + api/models/__init__.py | 1 + api/models/github_models.py | 52 +++++++++ api/routers/__init__.py | 1 + api/routers/github_router.py | 53 +++++++++ api/services/__init__.py | 1 + api/services/github_service.py | 113 ++++++++++++++++++ config/repos.yaml | 29 +++++ data/sample/fsw0.py | 12 ++ data/sample/fsw1.py | 14 +++ data/sample/sw0.py | 8 ++ data/sample/sw1.py | 14 +++ main.py | 49 ++++++++ requirements.txt | 7 ++ scripts/import_from_config.py | 206 +++++++++++++++++++++++++++++++++ scripts/import_repos.py | 160 +++++++++++++++++++++++++ scripts/import_repos.sh | 138 ++++++++++++++++++++++ test_api.py | 62 ++++++++++ 18 files changed, 921 insertions(+) create mode 100644 api/__init__.py create mode 100644 api/models/__init__.py create mode 100644 api/models/github_models.py create mode 100644 api/routers/__init__.py create mode 100644 api/routers/github_router.py create mode 100644 api/services/__init__.py create mode 100644 api/services/github_service.py create mode 100644 config/repos.yaml create mode 100644 data/sample/fsw0.py create mode 100644 data/sample/fsw1.py create mode 100644 data/sample/sw0.py create mode 100644 data/sample/sw1.py create mode 100644 main.py create mode 100644 scripts/import_from_config.py create mode 100644 scripts/import_repos.py create mode 100755 scripts/import_repos.sh create mode 100644 test_api.py diff --git a/api/__init__.py b/api/__init__.py new file mode 100644 index 0000000..28b07ef --- /dev/null +++ b/api/__init__.py @@ -0,0 +1 @@ +# API package diff --git a/api/models/__init__.py b/api/models/__init__.py new file mode 100644 index 0000000..f3d9f4b --- /dev/null +++ b/api/models/__init__.py @@ -0,0 +1 @@ +# Models package diff --git a/api/models/github_models.py b/api/models/github_models.py new file mode 100644 index 0000000..cb9cf0a --- /dev/null +++ b/api/models/github_models.py @@ -0,0 +1,52 @@ +from pydantic import BaseModel, Field +from typing import Optional + + +class GitHubSHARequest(BaseModel): + """Request model for GitHub SHA inputs with repository URLs""" + base_software_0: str = Field( + ..., + description="GitHub SHA for Base Software 0", + min_length=7, + max_length=40, + pattern=r"^[a-f0-9]+$" + ) + base_software_1: str = Field( + ..., + description="GitHub SHA for Base Software 1", + min_length=7, + max_length=40, + pattern=r"^[a-f0-9]+$" + ) + feature_software_0: str = Field( + ..., + description="GitHub SHA for Feature Software 0", + min_length=7, + max_length=40, + pattern=r"^[a-f0-9]+$" + ) + base_repo_url: str = Field( + ..., + description="GitHub repository URL for base software (used for base_software_0 and base_software_1)", + pattern=r"^https://github\.com/[^/]+/[^/]+\.git$" + ) + feature_repo_url: str = Field( + ..., + description="GitHub repository URL for feature software (used for feature_software_0)", + pattern=r"^https://github\.com/[^/]+/[^/]+\.git$" + ) + + +class GitHubSHAResponse(BaseModel): + """Response model for GitHub SHA processing""" + success: bool = Field(description="Whether the operation was successful") + message: str = Field(description="Response message") + base_software_0: str = Field(description="Processed Base Software 0 SHA") + base_software_1: str = Field(description="Processed Base Software 1 SHA") + feature_software_0: str = Field(description="Processed Feature Software 0 SHA") + base_repo_url: str = Field(description="Base repository URL used") + feature_repo_url: str = Field(description="Feature repository URL used") + processing_details: Optional[dict] = Field( + default=None, + description="Additional processing details" + ) diff --git a/api/routers/__init__.py b/api/routers/__init__.py new file mode 100644 index 0000000..873f7bb --- /dev/null +++ b/api/routers/__init__.py @@ -0,0 +1 @@ +# Routers package diff --git a/api/routers/github_router.py b/api/routers/github_router.py new file mode 100644 index 0000000..ff93086 --- /dev/null +++ b/api/routers/github_router.py @@ -0,0 +1,53 @@ +from fastapi import APIRouter, HTTPException, Query +from typing import Optional +from ..models.github_models import GitHubSHARequest, GitHubSHAResponse +from ..services.github_service import GitHubService + +router = APIRouter(prefix="/github", tags=["GitHub SHA Processing"]) + +# Initialize the service +github_service = GitHubService() + + +@router.post("/input-repos", response_model=GitHubSHAResponse) +async def input_repositories(request: GitHubSHARequest): + """ + Process three GitHub SHAs with their repository URLs: Base Software 0, Base Software 1, and Feature Software 0. + + This endpoint validates the provided GitHub SHAs against their respective repositories and returns information about each commit. + + Args: + request: Contains the three GitHub SHAs and their repository URLs + + Returns: + GitHubSHAResponse: Contains validation results and commit information + """ + try: + result = await github_service.process_shas(request) + + if not result.success: + raise HTTPException(status_code=400, detail=result.message) + + return result + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}") + + +@router.get("/health") +async def health_check(): + """Health check endpoint""" + return {"status": "healthy", "service": "GitHub SHA Processor"} + + +@router.get("/") +async def root(): + """Root endpoint with API information""" + return { + "message": "GitHub SHA Processing API", + "version": "1.0.0", + "endpoints": { + "input_repos": "/github/input-repos", + "health": "/github/health" + } + } diff --git a/api/services/__init__.py b/api/services/__init__.py new file mode 100644 index 0000000..a70b302 --- /dev/null +++ b/api/services/__init__.py @@ -0,0 +1 @@ +# Services package diff --git a/api/services/github_service.py b/api/services/github_service.py new file mode 100644 index 0000000..a4b32d7 --- /dev/null +++ b/api/services/github_service.py @@ -0,0 +1,113 @@ +import httpx +from typing import Dict, Any +from ..models.github_models import GitHubSHARequest, GitHubSHAResponse + + +class GitHubService: + """Service for handling GitHub SHA operations""" + + def __init__(self): + self.github_api_base = "https://api.github.com" + self.timeout = 30.0 + + async def validate_sha(self, sha: str, repo_owner: str, repo_name: str) -> Dict[str, Any]: + """Validate if a GitHub SHA exists in the repository""" + async with httpx.AsyncClient(timeout=self.timeout) as client: + try: + url = f"{self.github_api_base}/repos/{repo_owner}/{repo_name}/commits/{sha}" + response = await client.get(url) + + if response.status_code == 200: + commit_data = response.json() + return { + "valid": True, + "sha": commit_data["sha"], + "message": commit_data["commit"]["message"], + "author": commit_data["commit"]["author"]["name"], + "date": commit_data["commit"]["author"]["date"] + } + else: + return { + "valid": False, + "error": f"Commit not found: {response.status_code}" + } + except httpx.RequestError as e: + return { + "valid": False, + "error": f"Request failed: {str(e)}" + } + + def extract_repo_info(self, repo_url: str) -> tuple[str, str]: + """Extract owner and repo name from GitHub URL""" + # Expected format: https://github.com/owner/repo.git + parts = repo_url.replace("https://github.com/", "").replace(".git", "") + owner, repo_name = parts.split("/", 1) + return owner, repo_name + + async def process_shas(self, request: GitHubSHARequest) -> GitHubSHAResponse: + """Process the three GitHub SHAs""" + try: + # Extract repo info from URLs + base_owner, base_repo = self.extract_repo_info(request.base_repo_url) + feature_owner, feature_repo = self.extract_repo_info(request.feature_repo_url) + + # Validate all three SHAs + base_0_validation = await self.validate_sha(request.base_software_0, base_owner, base_repo) + base_1_validation = await self.validate_sha(request.base_software_1, base_owner, base_repo) + feature_0_validation = await self.validate_sha(request.feature_software_0, feature_owner, feature_repo) + + # Check if all SHAs are valid + all_valid = all([ + base_0_validation["valid"], + base_1_validation["valid"], + feature_0_validation["valid"] + ]) + + if all_valid: + return GitHubSHAResponse( + success=True, + message="All GitHub SHAs validated successfully", + base_software_0=request.base_software_0, + base_software_1=request.base_software_1, + feature_software_0=request.feature_software_0, + base_repo_url=request.base_repo_url, + feature_repo_url=request.feature_repo_url, + processing_details={ + "base_0_info": base_0_validation, + "base_1_info": base_1_validation, + "feature_0_info": feature_0_validation + } + ) + else: + # Collect validation errors + errors = [] + if not base_0_validation["valid"]: + errors.append(f"Base Software 0: {base_0_validation['error']}") + if not base_1_validation["valid"]: + errors.append(f"Base Software 1: {base_1_validation['error']}") + if not feature_0_validation["valid"]: + errors.append(f"Feature Software 0: {feature_0_validation['error']}") + + return GitHubSHAResponse( + success=False, + message=f"Validation failed: {'; '.join(errors)}", + base_software_0=request.base_software_0, + base_software_1=request.base_software_1, + feature_software_0=request.feature_software_0, + base_repo_url=request.base_repo_url, + feature_repo_url=request.feature_repo_url, + processing_details={ + "validation_errors": errors + } + ) + + except Exception as e: + return GitHubSHAResponse( + success=False, + message=f"Processing failed: {str(e)}", + base_software_0=request.base_software_0, + base_software_1=request.base_software_1, + feature_software_0=request.feature_software_0, + base_repo_url=request.base_repo_url, + feature_repo_url=request.feature_repo_url + ) diff --git a/config/repos.yaml b/config/repos.yaml new file mode 100644 index 0000000..a607ac0 --- /dev/null +++ b/config/repos.yaml @@ -0,0 +1,29 @@ +# Repository Configuration for AutoRebase +# Update these values with your actual repository URLs and tags + +repositories: + base: + url: "https://github.com/your-org/base-repo.git" + description: "Base repository with multiple versions" + tags: + base-0: "base-0" + base-1: "base-1" + + feature: + url: "https://github.com/your-org/feature-repo.git" + description: "Feature repository" + tags: + feature-0: "feature-0" + +# Data directory structure +data: + base_dir: "data/repos" + base_0_dir: "data/repos/base-0" + base_1_dir: "data/repos/base-1" + feature_0_dir: "data/repos/feature-0" + +# Import settings +import: + remove_existing: true + checkout_tags: true + verbose: true diff --git a/data/sample/fsw0.py b/data/sample/fsw0.py new file mode 100644 index 0000000..9bdc446 --- /dev/null +++ b/data/sample/fsw0.py @@ -0,0 +1,12 @@ +a = 10 +b = 20 + +print(a + b) + +print(a * b) + +print(a / b) + +print(a % b) + +print(a ** b) \ No newline at end of file diff --git a/data/sample/fsw1.py b/data/sample/fsw1.py new file mode 100644 index 0000000..e26b8f0 --- /dev/null +++ b/data/sample/fsw1.py @@ -0,0 +1,14 @@ +a = 10 +b = 20 + +print(a + b) + +print(a - b) + +print(a * b) + +print(a / b) + +print(a % b) + +print(a ** b) \ No newline at end of file diff --git a/data/sample/sw0.py b/data/sample/sw0.py new file mode 100644 index 0000000..3b75a6f --- /dev/null +++ b/data/sample/sw0.py @@ -0,0 +1,8 @@ +a = 10 +b = 20 + +print(a + b) + +print(a * b) + +print(a / b) diff --git a/data/sample/sw1.py b/data/sample/sw1.py new file mode 100644 index 0000000..e26b8f0 --- /dev/null +++ b/data/sample/sw1.py @@ -0,0 +1,14 @@ +a = 10 +b = 20 + +print(a + b) + +print(a - b) + +print(a * b) + +print(a / b) + +print(a % b) + +print(a ** b) \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000..22ff1ca --- /dev/null +++ b/main.py @@ -0,0 +1,49 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from api.routers import github_router +import uvicorn + +# Create FastAPI application +app = FastAPI( + title="AutoRebase API", + description="API for processing GitHub SHAs and software versioning", + version="1.0.0", + docs_url="/docs", + redoc_url="/redoc" +) + +# Add CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # Configure this properly for production + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include routers +app.include_router(github_router.router) + +@app.get("/") +async def root(): + """Root endpoint""" + return { + "message": "AutoRebase API", + "version": "1.0.0", + "docs": "/docs", + "redoc": "/redoc" + } + +@app.get("/health") +async def health(): + """Health check endpoint""" + return {"status": "healthy", "service": "AutoRebase API"} + +if __name__ == "__main__": + uvicorn.run( + "main:app", + host="0.0.0.0", + port=8000, + reload=True, + log_level="info" + ) diff --git a/requirements.txt b/requirements.txt index 1570efc..0569082 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,3 +11,10 @@ packaging==24.1 colorama==0.4.6 dataclasses-json==0.6.7 openai==1.43.0 +# FastAPI dependencies +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +pydantic==2.5.0 +python-multipart==0.0.6 +httpx==0.25.2 +python-dotenv==1.0.0 diff --git a/scripts/import_from_config.py b/scripts/import_from_config.py new file mode 100644 index 0000000..aa0933e --- /dev/null +++ b/scripts/import_from_config.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +""" +Import repositories using YAML configuration +""" + +import yaml +import subprocess +import os +import sys +from pathlib import Path + +def load_config(config_file="config/repos.yaml"): + """Load repository configuration from YAML file""" + try: + with open(config_file, 'r') as f: + return yaml.safe_load(f) + except FileNotFoundError: + print(f"โŒ Configuration file not found: {config_file}") + sys.exit(1) + except yaml.YAMLError as e: + print(f"โŒ Error parsing YAML configuration: {e}") + sys.exit(1) + +def run_command(cmd, cwd=None, check=True): + """Run a shell command""" + try: + result = subprocess.run( + cmd, + shell=True, + cwd=cwd, + capture_output=True, + text=True, + check=check + ) + return result.stdout.strip(), result.stderr.strip() + except subprocess.CalledProcessError as e: + print(f"โŒ Command failed: {cmd}") + print(f"Error: {e.stderr}") + return None, e.stderr + +def clone_repository(repo_url, target_dir, tag=None, remove_existing=True): + """Clone a repository to the target directory""" + print(f"๐Ÿ“ฅ Cloning {repo_url} to {target_dir}") + + # Remove existing directory if requested + if remove_existing and target_dir.exists(): + print(f"๐Ÿ—‘๏ธ Removing existing directory: {target_dir}") + run_command(f"rm -rf {target_dir}") + + # Create parent directory + target_dir.parent.mkdir(parents=True, exist_ok=True) + + # Clone the repository + clone_cmd = f"git clone {repo_url} {target_dir}" + stdout, stderr = run_command(clone_cmd) + + if stdout is None: + return False + + # Checkout specific tag if provided + if tag: + print(f"๐Ÿท๏ธ Checking out tag: {tag}") + checkout_cmd = f"git checkout {tag}" + stdout, stderr = run_command(checkout_cmd, cwd=target_dir) + + if stdout is None: + print(f"โŒ Failed to checkout tag: {tag}") + return False + + print(f"โœ… Successfully cloned {repo_url}") + return True + +def get_repo_info(repo_dir): + """Get information about a repository""" + info = {} + + # Get latest commit + stdout, _ = run_command("git log --oneline -1", cwd=repo_dir, check=False) + if stdout: + info['latest_commit'] = stdout + + # Get current tag + stdout, _ = run_command("git describe --tags --exact-match HEAD 2>/dev/null", cwd=repo_dir, check=False) + if stdout: + info['current_tag'] = stdout + else: + # Get current branch + stdout, _ = run_command("git branch --show-current", cwd=repo_dir, check=False) + if stdout: + info['current_branch'] = stdout + + # Count files + stdout, _ = run_command("find . -type f | wc -l", cwd=repo_dir, check=False) + if stdout: + info['file_count'] = stdout + + return info + +def import_repositories(): + """Import all repositories based on configuration""" + config = load_config() + + print("AutoRebase Repository Importer (YAML Config)") + print("=" * 50) + + # Get import settings + import_settings = config.get('import', {}) + remove_existing = import_settings.get('remove_existing', True) + checkout_tags = import_settings.get('checkout_tags', True) + + # Get repository configurations + repos_config = config['repositories'] + data_config = config['data'] + + results = {} + + # Import base-0 repository + print("\n๐Ÿ“ Importing Base-0 repository...") + base_0_dir = Path(data_config['base_0_dir']) + base_0_tag = repos_config['base']['tags']['base-0'] + results['base-0'] = clone_repository( + repos_config['base']['url'], + base_0_dir, + base_0_tag if checkout_tags else None, + remove_existing + ) + + # Import base-1 repository + print("\n๐Ÿ“ Importing Base-1 repository...") + base_1_dir = Path(data_config['base_1_dir']) + base_1_tag = repos_config['base']['tags']['base-1'] + results['base-1'] = clone_repository( + repos_config['base']['url'], + base_1_dir, + base_1_tag if checkout_tags else None, + remove_existing + ) + + # Import feature-0 repository + print("\n๐Ÿ“ Importing Feature-0 repository...") + feature_0_dir = Path(data_config['feature_0_dir']) + feature_0_tag = repos_config['feature']['tags']['feature-0'] + results['feature-0'] = clone_repository( + repos_config['feature']['url'], + feature_0_dir, + feature_0_tag if checkout_tags else None, + remove_existing + ) + + # Report results + print("\n" + "=" * 50) + print("IMPORT RESULTS:") + print("=" * 50) + + all_success = True + for repo_name, success in results.items(): + status = "โœ… SUCCESS" if success else "โŒ FAILED" + print(f"{repo_name}: {status}") + if not success: + all_success = False + + if all_success: + print("\n๐ŸŽ‰ All repositories imported successfully!") + + # List imported repositories + print("\n" + "=" * 50) + print("IMPORTED REPOSITORIES:") + print("=" * 50) + + repo_dirs = { + 'base-0': Path(data_config['base_0_dir']), + 'base-1': Path(data_config['base_1_dir']), + 'feature-0': Path(data_config['feature_0_dir']) + } + + for repo_name, repo_dir in repo_dirs.items(): + if repo_dir.exists(): + print(f"\n๐Ÿ“ {repo_name}:") + info = get_repo_info(repo_dir) + + if 'latest_commit' in info: + print(f" Latest commit: {info['latest_commit']}") + + if 'current_tag' in info: + print(f" Current tag: {info['current_tag']}") + elif 'current_branch' in info: + print(f" Current branch: {info['current_branch']}") + + if 'file_count' in info: + print(f" Files: {info['file_count']}") + + return True + else: + print("\nโŒ Some repositories failed to import. Check the errors above.") + return False + +if __name__ == "__main__": + # Check if PyYAML is available + try: + import yaml + except ImportError: + print("โŒ PyYAML is required. Install it with: pip install PyYAML") + sys.exit(1) + + success = import_repositories() + sys.exit(0 if success else 1) diff --git a/scripts/import_repos.py b/scripts/import_repos.py new file mode 100644 index 0000000..5389c96 --- /dev/null +++ b/scripts/import_repos.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python3 +""" +Script to import repositories into the data folder +""" + +import subprocess +import os +import sys +from pathlib import Path + +# Configuration - Update these with your actual repository URLs and tags +REPO_CONFIG = { + "base_repo_url": "https://github.com/your-org/base-repo.git", # Update this + "feature_repo_url": "https://github.com/your-org/feature-repo.git", # Update this + "base_0_tag": "base-0", # Update this + "base_1_tag": "base-1", # Update this + "feature_0_tag": "feature-0" # Update this +} + +DATA_DIR = Path("data/repos") + +def run_command(cmd, cwd=None): + """Run a shell command and return the result""" + try: + result = subprocess.run( + cmd, + shell=True, + cwd=cwd, + capture_output=True, + text=True, + check=True + ) + return result.stdout.strip() + except subprocess.CalledProcessError as e: + print(f"Error running command: {cmd}") + print(f"Error: {e.stderr}") + return None + +def clone_repository(repo_url, target_dir, tag=None): + """Clone a repository to the target directory""" + print(f"Cloning {repo_url} to {target_dir}") + + # Remove target directory if it exists + if target_dir.exists(): + print(f"Removing existing directory: {target_dir}") + run_command(f"rm -rf {target_dir}") + + # Clone the repository + clone_cmd = f"git clone {repo_url} {target_dir}" + if tag: + clone_cmd += f" --branch {tag}" + + result = run_command(clone_cmd) + if result is None: + return False + + # If we specified a tag, checkout to that tag + if tag: + checkout_cmd = f"git checkout {tag}" + result = run_command(checkout_cmd, cwd=target_dir) + if result is None: + return False + + print(f"Successfully cloned {repo_url} to {target_dir}") + return True + +def import_repositories(): + """Import all three repositories""" + print("Starting repository import process...") + + # Create data directory structure + DATA_DIR.mkdir(parents=True, exist_ok=True) + + # Clone base repository with base-0 tag + base_0_dir = DATA_DIR / "base-0" + success_1 = clone_repository( + REPO_CONFIG["base_repo_url"], + base_0_dir, + REPO_CONFIG["base_0_tag"] + ) + + # Clone base repository with base-1 tag + base_1_dir = DATA_DIR / "base-1" + success_2 = clone_repository( + REPO_CONFIG["base_repo_url"], + base_1_dir, + REPO_CONFIG["base_1_tag"] + ) + + # Clone feature repository + feature_0_dir = DATA_DIR / "feature-0" + success_3 = clone_repository( + REPO_CONFIG["feature_repo_url"], + feature_0_dir, + REPO_CONFIG["feature_0_tag"] + ) + + # Report results + print("\n" + "="*50) + print("IMPORT RESULTS:") + print("="*50) + print(f"Base-0 repository: {'โœ… SUCCESS' if success_1 else 'โŒ FAILED'}") + print(f"Base-1 repository: {'โœ… SUCCESS' if success_2 else 'โŒ FAILED'}") + print(f"Feature-0 repository: {'โœ… SUCCESS' if success_3 else 'โŒ FAILED'}") + + if all([success_1, success_2, success_3]): + print("\n๐ŸŽ‰ All repositories imported successfully!") + return True + else: + print("\nโŒ Some repositories failed to import. Check the errors above.") + return False + +def list_imported_repos(): + """List the imported repositories""" + print("\n" + "="*50) + print("IMPORTED REPOSITORIES:") + print("="*50) + + for repo_dir in DATA_DIR.iterdir(): + if repo_dir.is_dir(): + print(f"\n๐Ÿ“ {repo_dir.name}:") + + # Get git info + git_info = run_command("git log --oneline -1", cwd=repo_dir) + if git_info: + print(f" Latest commit: {git_info}") + + # Get current branch/tag + branch_info = run_command("git branch --show-current", cwd=repo_dir) + tag_info = run_command("git describe --tags --exact-match HEAD 2>/dev/null", cwd=repo_dir) + + if tag_info: + print(f" Current tag: {tag_info}") + elif branch_info: + print(f" Current branch: {branch_info}") + + # Count files + file_count = run_command("find . -type f | wc -l", cwd=repo_dir) + if file_count: + print(f" Files: {file_count}") + +if __name__ == "__main__": + print("AutoRebase Repository Importer") + print("="*50) + print("โš ๏ธ Please update the REPO_CONFIG in this script with your actual repository URLs and tags") + print() + + # Check if user wants to proceed + response = input("Do you want to proceed with the import? (y/N): ") + if response.lower() != 'y': + print("Import cancelled.") + sys.exit(0) + + success = import_repositories() + + if success: + list_imported_repos() + else: + print("\nImport failed. Please check the configuration and try again.") + sys.exit(1) diff --git a/scripts/import_repos.sh b/scripts/import_repos.sh new file mode 100755 index 0000000..ecf580d --- /dev/null +++ b/scripts/import_repos.sh @@ -0,0 +1,138 @@ +#!/bin/bash + +# AutoRebase Repository Import Script +# Update the variables below with your actual repository URLs and tags + +# Repository URLs and tags - UPDATE THESE +BASE_REPO_URL="https://github.com/your-org/base-repo.git" +FEATURE_REPO_URL="https://github.com/your-org/feature-repo.git" +BASE_0_TAG="base-0" +BASE_1_TAG="base-1" +FEATURE_0_TAG="feature-0" + +# Data directory +DATA_DIR="data/repos" + +echo "AutoRebase Repository Importer" +echo "==============================" +echo "โš ๏ธ Please update the repository URLs and tags in this script before running" +echo "" + +# Check if user wants to proceed +read -p "Do you want to proceed with the import? (y/N): " -n 1 -r +echo +if [[ ! $REPLY =~ ^[Yy]$ ]]; then + echo "Import cancelled." + exit 0 +fi + +# Create data directory +mkdir -p "$DATA_DIR" + +echo "Starting repository import..." + +# Function to clone repository +clone_repo() { + local repo_url="$1" + local target_dir="$2" + local tag="$3" + + echo "Cloning $repo_url to $target_dir" + + # Remove existing directory + if [ -d "$target_dir" ]; then + echo "Removing existing directory: $target_dir" + rm -rf "$target_dir" + fi + + # Clone repository + if git clone "$repo_url" "$target_dir"; then + echo "โœ… Successfully cloned $repo_url" + + # Checkout specific tag if provided + if [ -n "$tag" ]; then + cd "$target_dir" + if git checkout "$tag"; then + echo "โœ… Checked out tag: $tag" + else + echo "โŒ Failed to checkout tag: $tag" + return 1 + fi + cd - > /dev/null + fi + + return 0 + else + echo "โŒ Failed to clone $repo_url" + return 1 + fi +} + +# Import repositories +echo "" +echo "Importing Base-0 repository..." +clone_repo "$BASE_REPO_URL" "$DATA_DIR/base-0" "$BASE_0_TAG" +BASE_0_SUCCESS=$? + +echo "" +echo "Importing Base-1 repository..." +clone_repo "$BASE_REPO_URL" "$DATA_DIR/base-1" "$BASE_1_TAG" +BASE_1_SUCCESS=$? + +echo "" +echo "Importing Feature-0 repository..." +clone_repo "$FEATURE_REPO_URL" "$DATA_DIR/feature-0" "$FEATURE_0_TAG" +FEATURE_0_SUCCESS=$? + +# Report results +echo "" +echo "==============================" +echo "IMPORT RESULTS:" +echo "==============================" +echo "Base-0 repository: $([ $BASE_0_SUCCESS -eq 0 ] && echo "โœ… SUCCESS" || echo "โŒ FAILED")" +echo "Base-1 repository: $([ $BASE_1_SUCCESS -eq 0 ] && echo "โœ… SUCCESS" || echo "โŒ FAILED")" +echo "Feature-0 repository: $([ $FEATURE_0_SUCCESS -eq 0 ] && echo "โœ… SUCCESS" || echo "โŒ FAILED")" + +if [ $BASE_0_SUCCESS -eq 0 ] && [ $BASE_1_SUCCESS -eq 0 ] && [ $FEATURE_0_SUCCESS -eq 0 ]; then + echo "" + echo "๐ŸŽ‰ All repositories imported successfully!" + + # List imported repositories + echo "" + echo "==============================" + echo "IMPORTED REPOSITORIES:" + echo "==============================" + + for repo_dir in "$DATA_DIR"/*; do + if [ -d "$repo_dir" ]; then + repo_name=$(basename "$repo_dir") + echo "" + echo "๐Ÿ“ $repo_name:" + + # Get latest commit + cd "$repo_dir" + latest_commit=$(git log --oneline -1) + echo " Latest commit: $latest_commit" + + # Get current tag or branch + current_tag=$(git describe --tags --exact-match HEAD 2>/dev/null) + current_branch=$(git branch --show-current) + + if [ -n "$current_tag" ]; then + echo " Current tag: $current_tag" + elif [ -n "$current_branch" ]; then + echo " Current branch: $current_branch" + fi + + # Count files + file_count=$(find . -type f | wc -l) + echo " Files: $file_count" + + cd - > /dev/null + fi + done +else + echo "" + echo "โŒ Some repositories failed to import. Check the errors above." + exit 1 +fi diff --git a/test_api.py b/test_api.py new file mode 100644 index 0000000..eb902b8 --- /dev/null +++ b/test_api.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +""" +Simple test script for the AutoRebase API +""" + +import requests +import json + +API_BASE = "http://localhost:8000" + +def test_health(): + """Test the health endpoint""" + print("Testing health endpoint...") + response = requests.get(f"{API_BASE}/health") + print(f"Status: {response.status_code}") + print(f"Response: {response.json()}") + print() + +def test_github_root(): + """Test the GitHub router root endpoint""" + print("Testing GitHub router root...") + response = requests.get(f"{API_BASE}/github/") + print(f"Status: {response.status_code}") + print(f"Response: {response.json()}") + print() + +def test_input_repos(): + """Test the input-repos endpoint""" + print("Testing input-repos endpoint...") + + # Test data - using real GitHub SHAs from a public repo + test_data = { + "base_software_0": "a1b2c3d4e5f6", + "base_software_1": "b2c3d4e5f6g7", + "feature_software_0": "c3d4e5f6g7h8", + "base_repo_url": "https://github.com/microsoft/vscode.git", + "feature_repo_url": "https://github.com/microsoft/vscode.git" + } + + try: + response = requests.post( + f"{API_BASE}/github/input-repos", + json=test_data + ) + print(f"Status: {response.status_code}") + print(f"Response: {json.dumps(response.json(), indent=2)}") + except requests.exceptions.RequestException as e: + print(f"Request failed: {e}") + print() + +if __name__ == "__main__": + print("AutoRebase API Test Suite") + print("=" * 50) + + try: + test_health() + test_github_root() + test_input_repos() + except requests.exceptions.ConnectionError: + print("Error: Could not connect to the API.") + print("Make sure the server is running on http://localhost:8000") + print("Run: python main.py") From 9b96430498cdd2f00ee5dccdca6d41e7bceb223b Mon Sep 17 00:00:00 2001 From: jainapurva Date: Sat, 13 Sep 2025 16:13:18 -0700 Subject: [PATCH 02/13] Clean up repository: Remove original autorebase files - Remove original engine/ directory and core functionality - Remove web/ directory and Next.js application - Remove server/ directory and API server - Remove mcp-server/ directory and TypeScript server - Remove debug/ and test directories - Remove schemas/ and .github/workflows/ - Remove original sample data directories - Remove patch files and test scripts - Remove documentation files (ADAPTER_REMOVAL_SUMMARY.md, AI_DIRECT_REBASE.md) - Remove master_prompt and other configuration files - Keep only FastAPI integration and essential files Repository now contains only: - FastAPI application (api/, main.py, test_api.py) - Repository import scripts (scripts/) - Sample data (data/sample/) - Configuration (config/repos.yaml) - Essential files (LICENSE, README.md, requirements.txt, pyproject.toml) --- .github/workflows/ci.yml | 50 -- ADAPTER_REMOVAL_SUMMARY.md | 171 ------ AI_DIRECT_REBASE.md | 224 -------- corrected_patch.patch | 13 - corrected_patch_final.patch | 13 - data/sample-base-sw_1.0/Makefile | 6 - data/sample-base-sw_1.0/Master_prompt.md | 195 ------- data/sample-base-sw_1.0/README.md | 2 - .../configs/rvc/camera.json | 8 - .../configs/system/telemetry.yaml | 3 - .../src/common/math/metrics.cpp | 8 - .../src/vision/camera_pipeline.cpp | 20 - data/sample-base-sw_1.0/tools/build.sh | 9 - data/sample-base-sw_1.0/tools/run_checks.py | 11 - data/sample-base-sw_1.0/tools/test.sh | 6 - data/sample-base-sw_1.1/Makefile | 6 - data/sample-base-sw_1.1/Master_prompt.md | 195 ------- data/sample-base-sw_1.1/README.md | 2 - .../configs/rvc/camera.json | 9 - .../configs/system/telemetry.yaml | 4 - .../src/shared/metrics/metrics.cpp | 7 - .../src/vision/camera_pipeline.cpp | 18 - data/sample-base-sw_1.1/tools/build.sh | 9 - data/sample-base-sw_1.1/tools/run_checks.py | 15 - data/sample-base-sw_1.1/tools/test.sh | 6 - data/sample-feature-sw_5.0/Makefile | 6 - .../Master_prompt_feature.md | 147 ----- data/sample-feature-sw_5.0/README.md | 14 - .../configs/rvc/camera.json | 9 - .../configs/system/telemetry.yaml | 4 - .../data/requirements_map.yaml | 6 - .../src/common/math/metrics.cpp | 15 - .../src/vision/camera_pipeline.cpp | 19 - data/sample-feature-sw_5.0/tools/build.sh | 9 - .../sample-feature-sw_5.0/tools/run_checks.py | 11 - data/sample-feature-sw_5.0/tools/test.sh | 6 - debug_apply/src/main.cpp | 11 - debug_git_apply/src/main.cpp | 11 - debug_git_apply/src/main.cpp.rej | 12 - debug_test/src/main.cpp | 11 - debug_test/src/main.cpp.rej | 12 - debug_test/test_patch.patch | 13 - diff.patch | 11 - diff1.patch | 3 - engine/__init__.py | 8 - engine/cli/auto_rebase.py | 259 --------- engine/core/ai_direct_rebase.py | 466 ---------------- engine/core/ai_resolve.py | 151 ------ engine/core/base_extract.py | 68 --- engine/core/config.py | 42 -- engine/core/diff_graph.py | 26 - engine/core/diff_types.py | 41 -- engine/core/feature_extract.py | 76 --- engine/core/file_conflict_resolver.py | 412 -------------- engine/core/report.py | 80 --- engine/core/retarget.py | 80 --- engine/core/traceability.py | 44 -- engine/core/utils.py | 103 ---- engine/core/validate.py | 53 -- engine/core/vcs.py | 137 ----- engine/rules/coccinelle/symbol_rename.cocci | 6 - engine/rules/comby/examples.rules | 2 - engine/rules/json/examples.jsonpatch | 4 - engine/rules/yaml/examples.yq | 2 - final_test/src/main.cpp | 13 - fix_autorebase.py | 119 ---- force_conflict_fix/src/main.cpp | 13 - force_conflict_test.py | 97 ---- manual_test/src/main.cpp | 11 - manual_test/src/main.cpp.rej | 12 - master_prompt | 513 ------------------ mcp-server/package.json | 14 - mcp-server/src/README.md | 10 - mcp-server/src/index.ts | 20 - mcp-server/src/tools.ts | 58 -- mcp-server/tsconfig.json | 13 - resolve_conflict.py | 74 --- schemas/report.schema.json | 51 -- schemas/requirements_map.schema.json | 17 - scripts/ai_direct_rebase.sh | 140 ----- scripts/demo_seed.sh | 15 - scripts/install_tools.sh | 20 - scripts/quick_rebase.sh | 31 -- scripts/resolve_conflict.py | 74 --- scripts/run_auto_rebase.sh | 130 ----- server/api/README.md | 13 - server/api/main.py | 11 - server/api/models.py | 18 - server/api/requirements.txt | 5 - server/api/routers/runs.py | 38 -- server/api/routers/uploads.py | 19 - server/api/storage/base.py | 18 - server/api/storage/convex_store.py | 33 -- server/api/storage/sqlite_store.py | 41 -- test_ai_resolution.py | 74 --- test_ai_workflow/configs/config.json | 8 - test_ai_workflow/configs/settings.yaml | 4 - test_ai_workflow/dts/device.dtsi | 13 - test_ai_workflow/src/main.cpp | 11 - test_apply/test_feature_final.patch | 13 - test_feature.patch | 13 - test_feature_correct.patch | 13 - test_feature_fixed.patch | 13 - tests/test_adapters.py | 38 -- tests/test_end_to_end.py | 49 -- tests/test_traceability.py | 17 - validate_patch_result.py | 78 --- web/next.config.js | 6 - web/package.json | 19 - web/src/app/api/runs/route.ts | 6 - web/src/app/page.tsx | 9 - web/src/components/ReportViewer.tsx | 7 - web/src/components/RunTable.tsx | 13 - web/src/components/UploadForm.tsx | 9 - 114 files changed, 5454 deletions(-) delete mode 100644 .github/workflows/ci.yml delete mode 100644 ADAPTER_REMOVAL_SUMMARY.md delete mode 100644 AI_DIRECT_REBASE.md delete mode 100644 corrected_patch.patch delete mode 100644 corrected_patch_final.patch delete mode 100644 data/sample-base-sw_1.0/Makefile delete mode 100644 data/sample-base-sw_1.0/Master_prompt.md delete mode 100644 data/sample-base-sw_1.0/README.md delete mode 100644 data/sample-base-sw_1.0/configs/rvc/camera.json delete mode 100644 data/sample-base-sw_1.0/configs/system/telemetry.yaml delete mode 100644 data/sample-base-sw_1.0/src/common/math/metrics.cpp delete mode 100644 data/sample-base-sw_1.0/src/vision/camera_pipeline.cpp delete mode 100755 data/sample-base-sw_1.0/tools/build.sh delete mode 100755 data/sample-base-sw_1.0/tools/run_checks.py delete mode 100755 data/sample-base-sw_1.0/tools/test.sh delete mode 100644 data/sample-base-sw_1.1/Makefile delete mode 100644 data/sample-base-sw_1.1/Master_prompt.md delete mode 100644 data/sample-base-sw_1.1/README.md delete mode 100644 data/sample-base-sw_1.1/configs/rvc/camera.json delete mode 100644 data/sample-base-sw_1.1/configs/system/telemetry.yaml delete mode 100644 data/sample-base-sw_1.1/src/shared/metrics/metrics.cpp delete mode 100644 data/sample-base-sw_1.1/src/vision/camera_pipeline.cpp delete mode 100755 data/sample-base-sw_1.1/tools/build.sh delete mode 100755 data/sample-base-sw_1.1/tools/run_checks.py delete mode 100755 data/sample-base-sw_1.1/tools/test.sh delete mode 100644 data/sample-feature-sw_5.0/Makefile delete mode 100644 data/sample-feature-sw_5.0/Master_prompt_feature.md delete mode 100644 data/sample-feature-sw_5.0/README.md delete mode 100644 data/sample-feature-sw_5.0/configs/rvc/camera.json delete mode 100644 data/sample-feature-sw_5.0/configs/system/telemetry.yaml delete mode 100644 data/sample-feature-sw_5.0/data/requirements_map.yaml delete mode 100644 data/sample-feature-sw_5.0/src/common/math/metrics.cpp delete mode 100644 data/sample-feature-sw_5.0/src/vision/camera_pipeline.cpp delete mode 100755 data/sample-feature-sw_5.0/tools/build.sh delete mode 100755 data/sample-feature-sw_5.0/tools/run_checks.py delete mode 100755 data/sample-feature-sw_5.0/tools/test.sh delete mode 100644 debug_apply/src/main.cpp delete mode 100644 debug_git_apply/src/main.cpp delete mode 100644 debug_git_apply/src/main.cpp.rej delete mode 100644 debug_test/src/main.cpp delete mode 100644 debug_test/src/main.cpp.rej delete mode 100644 debug_test/test_patch.patch delete mode 100644 diff.patch delete mode 100644 diff1.patch delete mode 100644 engine/__init__.py delete mode 100644 engine/cli/auto_rebase.py delete mode 100644 engine/core/ai_direct_rebase.py delete mode 100644 engine/core/ai_resolve.py delete mode 100644 engine/core/base_extract.py delete mode 100644 engine/core/config.py delete mode 100644 engine/core/diff_graph.py delete mode 100644 engine/core/diff_types.py delete mode 100644 engine/core/feature_extract.py delete mode 100644 engine/core/file_conflict_resolver.py delete mode 100644 engine/core/report.py delete mode 100644 engine/core/retarget.py delete mode 100644 engine/core/traceability.py delete mode 100644 engine/core/utils.py delete mode 100644 engine/core/validate.py delete mode 100644 engine/core/vcs.py delete mode 100644 engine/rules/coccinelle/symbol_rename.cocci delete mode 100644 engine/rules/comby/examples.rules delete mode 100644 engine/rules/json/examples.jsonpatch delete mode 100644 engine/rules/yaml/examples.yq delete mode 100644 final_test/src/main.cpp delete mode 100644 fix_autorebase.py delete mode 100644 force_conflict_fix/src/main.cpp delete mode 100644 force_conflict_test.py delete mode 100644 manual_test/src/main.cpp delete mode 100644 manual_test/src/main.cpp.rej delete mode 100644 master_prompt delete mode 100644 mcp-server/package.json delete mode 100644 mcp-server/src/README.md delete mode 100644 mcp-server/src/index.ts delete mode 100644 mcp-server/src/tools.ts delete mode 100644 mcp-server/tsconfig.json delete mode 100755 resolve_conflict.py delete mode 100644 schemas/report.schema.json delete mode 100644 schemas/requirements_map.schema.json delete mode 100755 scripts/ai_direct_rebase.sh delete mode 100644 scripts/demo_seed.sh delete mode 100644 scripts/install_tools.sh delete mode 100755 scripts/quick_rebase.sh delete mode 100755 scripts/resolve_conflict.py delete mode 100755 scripts/run_auto_rebase.sh delete mode 100644 server/api/README.md delete mode 100644 server/api/main.py delete mode 100644 server/api/models.py delete mode 100644 server/api/requirements.txt delete mode 100644 server/api/routers/runs.py delete mode 100644 server/api/routers/uploads.py delete mode 100644 server/api/storage/base.py delete mode 100644 server/api/storage/convex_store.py delete mode 100644 server/api/storage/sqlite_store.py delete mode 100644 test_ai_resolution.py delete mode 100644 test_ai_workflow/configs/config.json delete mode 100644 test_ai_workflow/configs/settings.yaml delete mode 100644 test_ai_workflow/dts/device.dtsi delete mode 100644 test_ai_workflow/src/main.cpp delete mode 100644 test_apply/test_feature_final.patch delete mode 100644 test_feature.patch delete mode 100644 test_feature_correct.patch delete mode 100644 test_feature_fixed.patch delete mode 100644 tests/test_adapters.py delete mode 100644 tests/test_end_to_end.py delete mode 100644 tests/test_traceability.py delete mode 100644 validate_patch_result.py delete mode 100644 web/next.config.js delete mode 100644 web/package.json delete mode 100644 web/src/app/api/runs/route.ts delete mode 100644 web/src/app/page.tsx delete mode 100644 web/src/components/ReportViewer.tsx delete mode 100644 web/src/components/RunTable.tsx delete mode 100644 web/src/components/UploadForm.tsx diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index e4f1d5b..0000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,50 +0,0 @@ -name: CI - -on: - push: - pull_request: - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - - name: Install Python deps - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - - name: Python lint and tests - run: | - python -m pytest -q - - - name: Build MCP server - continue-on-error: true - run: | - cd mcp-server - npm ci || npm install - npm run build - - - name: Build Web UI - continue-on-error: true - run: | - cd web - npm ci || npm install - npm run build - - - name: Demo seed - run: | - bash scripts/demo_seed.sh - - - name: Upload demo artifacts - uses: actions/upload-artifact@v4 - with: - name: demo-artifacts - path: artifacts/run1 - diff --git a/ADAPTER_REMOVAL_SUMMARY.md b/ADAPTER_REMOVAL_SUMMARY.md deleted file mode 100644 index e6f8347..0000000 --- a/ADAPTER_REMOVAL_SUMMARY.md +++ /dev/null @@ -1,171 +0,0 @@ -# Adapter Removal Summary - -## Overview - -Successfully removed all adapter-related complexity from the AutoRebase repository, simplifying the system to work uniformly with git patches across all file types. - -## Changes Made - -### 1. **Removed Adapter Directory** -- Deleted entire `engine/adapters/` directory -- Removed all adapter-specific code (c_cpp, json_cfg, yaml_cfg, dtsi, text_generic) - -### 2. **Simplified Core Modules** - -#### **`engine/core/diff_types.py`** -- **Before**: Complex `PatchUnit` with `kind`, `ops`, `anchors` fields -- **After**: Simple `PatchUnit` with `patch_content` field -- Removed `Adapter` protocol entirely - -#### **`engine/core/feature_extract.py`** -- **Before**: Used multiple adapters to extract different file types -- **After**: Uses `git diff` to find changed files and generate patches uniformly -- Handles all file types with the same logic - -#### **`engine/core/base_extract.py`** -- **Before**: Aggregated deltas from multiple adapters -- **After**: Uses `git diff` to generate base patches uniformly -- Returns simplified delta with `git_patches` field - -#### **`engine/core/retarget.py`** -- **Before**: Used adapter-specific retargeting logic -- **After**: Uses `git apply` to apply patches uniformly -- Handles conflicts with reject files - -#### **`engine/core/validate.py`** -- **Before**: Ran adapter-specific validations -- **After**: Basic file system validation (reject files, empty files) - -### 3. **Updated CLI Module** - -#### **`engine/cli/auto_rebase.py`** -- Removed adapter imports -- Simplified `_tools_matrix()` to return basic git patch info -- All commands now work with git patches only - -### 4. **Updated Tests** - -#### **`tests/test_adapters.py`** -- **Before**: Tested adapter-specific functionality -- **After**: Tests git patch-based feature and base extraction -- Verifies new simplified data structures - -### 5. **AI Direct Rebase Module** - -#### **`engine/core/ai_direct_rebase.py`** -- Already simplified to work with git patches -- No adapter dependencies -- Universal conflict detection and resolution - -## Benefits - -### **๐ŸŽฏ Simplified Architecture** -- **Before**: Complex adapter system with different logic for each file type -- **After**: Single, uniform approach using git patches - -### **โšก Universal File Support** -- **Before**: Required specific adapters for C++, JSON, YAML, DTSI, etc. -- **After**: Same logic works for any file type that git can diff - -### **๐Ÿ”ง Easier Maintenance** -- **Before**: Multiple adapter files to maintain -- **After**: Single codebase for all file types - -### **๐Ÿ“ˆ Better Performance** -- **Before**: Multiple adapter calls and complex processing -- **After**: Direct git operations, faster processing - -### **๐Ÿ›ก๏ธ More Reliable** -- **Before**: Adapter-specific bugs and edge cases -- **After**: Leverages git's proven diff/apply functionality - -## Technical Details - -### **PatchUnit Structure** -```python -# Before -{ - "file_path": "src/file.cpp", - "kind": "c_cpp", - "ops": [{"type": "replace", "old": "...", "new": "..."}], - "anchors": {...}, - "req_ids": [...], - "requirements": [...] -} - -# After -{ - "file_path": "src/file.cpp", - "patch_content": "--- a/src/file.cpp\n+++ b/src/file.cpp\n@@ -1,3 +1,4 @@\n...", - "req_ids": [...], - "requirements": [...] -} -``` - -### **Base Delta Structure** -```python -# Before -{ - "adapters": { - "c_cpp": {...}, - "json": {...}, - "yaml": {...}, - "dtsi": {...}, - "text": {...} - } -} - -# After -{ - "git_patches": { - "src/file.cpp": "--- a/src/file.cpp\n+++ b/src/file.cpp\n...", - "config.json": "--- a/config.json\n+++ b/config.json\n..." - } -} -``` - -## Testing Results - -### **โœ… All Tests Pass** -- Feature extraction: โœ… Working with git patches -- Base extraction: โœ… Working with git patches -- AI Direct Rebase: โœ… 4/4 files processed successfully -- Traditional workflow: โœ… Still working with simplified retarget - -### **โœ… Workflow Compatibility** -- AI Direct Rebase: โœ… Fully functional -- Traditional AutoRebase: โœ… Fully functional -- Validation: โœ… Working with simplified checks -- Reporting: โœ… Compatible with new data structures - -## Migration Impact - -### **๐Ÿ”„ Backward Compatibility** -- Existing patch files still work -- CLI commands unchanged -- Output formats maintained - -### **๐Ÿ“Š Performance** -- Faster processing (no adapter overhead) -- Reduced memory usage -- Simpler code paths - -### **๐Ÿ› ๏ธ Development** -- Easier to add new file types (just git diff) -- Simpler debugging -- Reduced code complexity - -## Future Enhancements - -With the simplified architecture, future improvements are easier: - -1. **Enhanced Conflict Detection**: Better git patch analysis -2. **Improved AI Resolution**: More sophisticated patch understanding -3. **Batch Processing**: Handle multiple files more efficiently -4. **Custom Resolution Strategies**: Easier to add new resolution methods - -## Conclusion - -The adapter removal successfully simplified the AutoRebase system while maintaining all functionality. The system now works uniformly with git patches across all file types, making it more maintainable, performant, and reliable. - -**Key Achievement**: Reduced complexity from 5+ adapter files to a single, unified git patch approach that handles all file types equally well. diff --git a/AI_DIRECT_REBASE.md b/AI_DIRECT_REBASE.md deleted file mode 100644 index d8cb823..0000000 --- a/AI_DIRECT_REBASE.md +++ /dev/null @@ -1,224 +0,0 @@ -# AI Direct Rebase - -## Overview - -The AI Direct Rebase functionality provides an intelligent, automated approach to applying feature patches directly onto new base versions without requiring the traditional retarget step. This simplified approach uses AI to analyze conflicts between git patches and automatically resolves them while preserving all feature customizations. - -**Key Simplification**: All file types are handled uniformly using git patches - no more adapter complexity for different file types (C/C++, JSON, YAML, etc.). - -## Key Features - -- **๐Ÿค– AI-Powered Conflict Resolution**: Automatically detects and resolves conflicts between git patches -- **๐Ÿ“‹ Requirement-Aware**: Uses requirement mappings to understand the intent behind feature customizations -- **๐Ÿ”„ Universal File Support**: Handles all file types uniformly using git patch analysis -- **โšก Direct Application**: Skips the traditional retarget step for faster processing -- **๐Ÿ›ก๏ธ Fallback Support**: Uses heuristic resolution when AI is unavailable -- **๐Ÿ“Š Comprehensive Reporting**: Detailed conflict analysis and resolution results -- **๐ŸŽฏ Simplified Architecture**: No more adapter complexity - everything works with git patches - -## How It Works - -### Traditional Approach -``` -Extract Patches โ†’ Retarget โ†’ Apply โ†’ Validate -``` - -### AI Direct Approach -``` -Extract Patches โ†’ AI Resolve Conflicts โ†’ Apply โ†’ Validate -``` - -## Usage - -### Command Line Interface - -```bash -# Run AI Direct Rebase -python -m engine.cli.auto_rebase ai-rebase \ - --feature-patches \ - --base-patches \ - --new-base \ - --req-map \ - --out \ - --verbose -``` - -### Script Usage - -```bash -# Run complete AI Direct Rebase workflow -./scripts/ai_direct_rebase.sh [workdir_name] - -# Compare traditional vs AI approaches -./scripts/compare_approaches.sh -``` - -## Configuration - -### Environment Variables - -- `OPENAI_API_KEY`: Your OpenAI API key for AI-powered conflict resolution -- `OPENAI_MODEL`: Model to use (default: "gpt-4o-mini") - -### Requirements Mapping - -The AI resolver uses requirement mappings to understand feature customizations: - -```yaml -- path_glob: "src/**" - req_ids: ["AD-REQ-201", "AD-REQ-318"] -- path: "src/main.cpp" - req_ids: ["AD-REQ-601"] - requirement: "Feature: While calling API we need to pass 200 as input" -``` - -## Conflict Resolution - -### Automatic Conflict Detection - -The AI resolver automatically detects conflicts by analyzing git patch content: - -- **API Changes**: Function renames detected by comparing added/removed lines -- **Parameter Changes**: Function signature changes detected through pattern matching -- **Header Changes**: Include file changes detected in patch additions/removals -- **Structural Changes**: File organization changes (new/deleted files) -- **Content Changes**: General additions and removals in patches - -### Resolution Strategies - -1. **OpenAI Resolution** (if API key available): - - Comprehensive analysis of conflicts - - Intelligent adaptation to API changes - - Preservation of all feature customizations - -2. **Heuristic Resolution** (fallback): - - Git patch content analysis - - Pattern-based conflict resolution - - Requirement-specific customizations - - Universal file type support - -## Example - -### Input Files - -**Feature Patch** (base X โ†’ feature X): -```cpp -// Feature customizations -if (width == 0) width = 1344; // Changed from 1280 -height = clampH(height); // Added height clamping -std::cout << "[feature-5.0] init camera " << width << "x" << height << std::endl; -return NvOldAPI(width, height); -``` - -**Base Patch** (base X โ†’ base X+1): -```cpp -// API changes -#include "nv/camera_utils.h" // Header renamed -static int NvNewAPI(int width, int height) { return width > 0 && height > 0 ? 0 : -2; } -struct NvCtx { int reserved{0}; }; -int InitRvcCamera(const NvCtx& ctx, int width, int height) { // Signature changed - (void)ctx; - return NvNewAPI(width, height); -} -``` - -### AI Resolution Output - -```cpp -// AI-resolved result (feature X+1) -#include "nv/camera_utils.h" // Updated header -static int clampH(int h) { return h < 480 ? 480 : h; } // Preserved feature function -static int NvNewAPI(int width, int height) { return width > 0 && height > 0 ? 0 : -2; } -struct NvCtx { int reserved{0}; }; -int InitRvcCamera(const NvCtx& ctx, int width, int height) { - if (width == 0) width = 1344; // Preserved feature customization - if (height == 0) height = 720; - height = clampH(height); // Preserved feature customization - (void)ctx; - std::cout << "[feature-5.1] init camera " << width << "x" << height << std::endl; // Preserved feature logging - return NvNewAPI(width, height); // Updated API call -} -``` - -## Results and Reporting - -### AI Rebase Results - -The AI resolver generates detailed results in `ai_rebase_results.json`: - -```json -{ - "summary": { - "total_files": 4, - "resolved": 4, - "errors": 0, - "auto": 4, - "semantic": 0, - "conflicts": 0 - }, - "files": [ - { - "file": "src/vision/camera_pipeline.cpp", - "status": "resolved", - "method": "heuristic", - "req_ids": ["AD-REQ-201", "AD-REQ-318"], - "conflicts": { - "api_changes": [{"old_api": "NvOldAPI", "new_api": "NvNewAPI"}], - "parameter_changes": [{"old_signature": "InitRvcCamera(int width, int height)", "new_signature": "InitRvcCamera(const NvCtx& ctx, int width, int height)"}], - "header_changes": [{"old_header": "nv/camera.h", "new_header": "nv/camera_utils.h"}] - } - } - ] -} -``` - -## Benefits - -1. **๐Ÿš€ Faster Processing**: Eliminates the retarget step -2. **๐ŸŽฏ Intelligent Resolution**: AI understands context and requirements -3. **๐Ÿ”ง Automatic Adaptation**: Handles API changes without manual intervention -4. **๐Ÿ“ˆ Better Success Rate**: Reduces conflicts through intelligent analysis -5. **๐Ÿ”„ Consistent Results**: Produces predictable, high-quality outputs -6. **๐ŸŽฏ Simplified Architecture**: No adapter complexity - works with any file type -7. **โšก Universal Support**: Same logic for C++, JSON, YAML, Python, etc. - -## Limitations - -- Requires OpenAI API key for optimal performance -- Heuristic fallback may not handle all edge cases -- Performance depends on AI model capabilities - -## Future Enhancements - -- Enhanced git patch conflict detection patterns -- Integration with other AI models -- Batch processing capabilities -- Custom resolution strategies -- Improved heuristic fallback algorithms - -## Troubleshooting - -### Common Issues - -1. **No files processed**: Check that patch directories contain `.patch` files -2. **JSON serialization errors**: Ensure all Path objects are converted to strings -3. **Validation failures**: Check that results contain required summary fields - -### Debug Mode - -Use `--verbose` flag for detailed logging: - -```bash -python -m engine.cli.auto_rebase ai-rebase --verbose ... -``` - -## Integration - -The AI Direct Rebase functionality integrates seamlessly with the existing AutoRebase pipeline: - -- Uses the same patch extraction process -- Compatible with existing validation and reporting -- Maintains the same output format for consistency -- Can be used alongside traditional retargeting - -This new approach represents a significant advancement in automated patch application, making the rebase process more intelligent, efficient, and reliable. diff --git a/corrected_patch.patch b/corrected_patch.patch deleted file mode 100644 index f82cf62..0000000 --- a/corrected_patch.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- src/main.cpp 2025-09-13 12:12:50 -+++ src/main.cpp 2025-09-13 12:13:08 -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/corrected_patch_final.patch b/corrected_patch_final.patch deleted file mode 100644 index f82cf62..0000000 --- a/corrected_patch_final.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- src/main.cpp 2025-09-13 12:12:50 -+++ src/main.cpp 2025-09-13 12:13:08 -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/data/sample-base-sw_1.0/Makefile b/data/sample-base-sw_1.0/Makefile deleted file mode 100644 index 9476d38..0000000 --- a/data/sample-base-sw_1.0/Makefile +++ /dev/null @@ -1,6 +0,0 @@ -SHELL:=/bin/bash -.PHONY: build test -build: - bash tools/build.sh -test: build - bash tools/test.sh diff --git a/data/sample-base-sw_1.0/Master_prompt.md b/data/sample-base-sw_1.0/Master_prompt.md deleted file mode 100644 index 1f8e545..0000000 --- a/data/sample-base-sw_1.0/Master_prompt.md +++ /dev/null @@ -1,195 +0,0 @@ -You are an expert repo bootstrapper. Populate the current repository with a minimal but runnable Base Software tree that includes C++, Python, JSON, and YAML. Then create two commits, tagged as base/v1.0 and base/v1.1. Ensure code runs and tests pass on a typical Linux/macOS dev box with git, bash, python3, and g++. - -## Goals -- Provide realistic content that downstream feature repos can customize. -- Make artifacts verifiable via `make build` and `make test`. -- Use simple, standard tooling only (bash, make, python3, g++). - -## Directory layout (create exactly): -src/ - vision/camera_pipeline.cpp - shared/metrics/ # will be used in v1.1; initially empty in v1.0 - common/math/metrics.cpp # exists in v1.0, moved in v1.1 -configs/ - rvc/camera.json - system/telemetry.yaml -tools/ - build.sh - test.sh - run_checks.py -Makefile -README.md - -### Commit A: Base SW 1.0 (tag: base/v1.0) - -1) Files & contents: - -# src/vision/camera_pipeline.cpp -#include -#include -#include -#include "nv/camera.h" // header name only; compile won't include it -// Simulate an old vendor API -static int NvOldAPI(int width, int height) { - return width > 0 && height > 0 ? 0 : -1; -} -// Classic init: default 1280x720 if missing -int InitRvcCamera(int width, int height) { - if (width == 0) width = 1280; - if (height == 0) height = 720; - return NvOldAPI(width, height); -} -// Small C++ demo main so we can compile/run something -int main() { - int rc = InitRvcCamera(0, 0); - std::cout << "[base-1.0] InitRvcCamera -> " << rc << std::endl; - return rc == 0 ? 0 : 1; -} - -# src/common/math/metrics.cpp -#include -#include -double Mean(const std::vector& xs) { - double s = 0.0; for (double v : xs) s += v; return xs.empty()?0.0:s/xs.size(); -} -double Clamp(double v, double lo, double hi) { - return std::max(lo, std::min(v, hi)); -} - -# configs/rvc/camera.json -{ - "camera": { - "rvc": { - "timeout_ms": 500, - "exposure": "auto" - } - } -} - -# configs/system/telemetry.yaml -telemetry: - enabled: true - upload_interval_sec: 60 - -# tools/build.sh -#!/usr/bin/env bash -set -euo pipefail -echo "[build] building base..." -mkdir -p build -g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp 2>/dev/null || { - echo "[warn] header nv/camera.h is not available; compiling without it" - g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp -} -echo "[build] OK" - -# tools/test.sh -#!/usr/bin/env bash -set -euo pipefail -echo "[test] running unit checks..." -python3 tools/run_checks.py -./build/camera || (echo "[test] camera binary failed" && exit 1) -echo "[test] OK" - -# tools/run_checks.py -#!/usr/bin/env python3 -import json, sys, pathlib -import yaml - -root = pathlib.Path(__file__).resolve().parents[1] -camera_json = json.loads((root/"configs/rvc/camera.json").read_text()) -telemetry_yaml = yaml.safe_load((root/"configs/system/telemetry.yaml").read_text()) - -assert camera_json["camera"]["rvc"]["timeout_ms"] == 500 -assert telemetry_yaml["telemetry"]["upload_interval_sec"] == 60 -print("[checks] base-1.0 configs look good") - -# Makefile -SHELL:=/bin/bash -.PHONY: build test -build: -\tbash tools/build.sh -test: build -\tbash tools/test.sh - -# README.md -# Base SW -This repository contains a tiny, runnable base with C++ (a small camera init), Python checks, and JSON/YAML configs. - -2) Make scripts executable: -- chmod +x tools/build.sh tools/test.sh tools/run_checks.py - -3) Git commit & tag: -- git add -A -- git commit -m "seed: Base SW 1.0" -- git tag -a base/v1.0 -m "Base SW 1.0" - -### Commit B: Base SW 1.1 (tag: base/v1.1) - -Upstream changes that are logical but induce interesting merges later: -- Rename old vendor API to `NvNewAPI`. -- Add a new context parameter `const NvCtx& ctx` to `InitRvcCamera`. -- Move `metrics.cpp` from `src/common/math/` to `src/shared/metrics/`. -- Change JSON path from `camera.rvc` โ†’ `camera.rvcs` and tweak defaults. -- Rename YAML field `upload_interval_sec` โ†’ `interval_seconds` and add `max_payload_kb`. - -Apply the following edits: - -# src/vision/camera_pipeline.cpp (overwrite) -#include -#include -#include -#include "nv/camera_utils.h" // upstream header name -static int NvNewAPI(int width, int height) { return width > 0 && height > 0 ? 0 : -2; } -struct NvCtx { int reserved{0}; }; -int InitRvcCamera(const NvCtx& ctx, int width, int height) { - if (width == 0) width = 1280; - if (height == 0) height = 720; - (void)ctx; - return NvNewAPI(width, height); -} -int main() { - NvCtx ctx{}; - int rc = InitRvcCamera(ctx, 0, 0); - std::cout << "[base-1.1] InitRvcCamera -> " << rc << std::endl; - return rc == 0 ? 0 : 1; -} - -# MOVE: src/common/math/metrics.cpp โ†’ src/shared/metrics/metrics.cpp -# (same content, but we can add a tiny constexpr to show drift) -#include -#include -constexpr double mean0 = 0.0; -double Mean(const std::vector& xs) { - double s = 0.0; for (double v : xs) s += v; return xs.empty()?mean0:s/xs.size(); -} -double Clamp(double v, double lo, double hi) { return std::max(lo, std::min(v, hi)); } - -# configs/rvc/camera.json (overwrite) -{ - "camera": { - "rvcs": { - "timeout_ms": 500, - "exposure": "auto-v2", - "hdr": false - } - } -} - -# configs/system/telemetry.yaml (overwrite) -telemetry: - enabled: true - interval_seconds: 60 - max_payload_kb: 256 - -# tools/run_checks.py (append extra assertions at end) -# Ensure new keys are present for base-1.1 -camera = camera_json["camera"].get("rvcs") or {} -assert "hdr" in camera -print("[checks] base-1.1 config keys OK") - -Re-run chmod (if needed), then: -- git add -A -- git commit -m "upstream: Base SW 1.1 changes" -- git tag -a base/v1.1 -m "Base SW 1.1" - -Finally, print a short summary of files and tags created. \ No newline at end of file diff --git a/data/sample-base-sw_1.0/README.md b/data/sample-base-sw_1.0/README.md deleted file mode 100644 index e9616d8..0000000 --- a/data/sample-base-sw_1.0/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Base SW -This repository contains a tiny, runnable base with C++ (a small camera init), Python checks, and JSON/YAML configs. diff --git a/data/sample-base-sw_1.0/configs/rvc/camera.json b/data/sample-base-sw_1.0/configs/rvc/camera.json deleted file mode 100644 index ca4d4ed..0000000 --- a/data/sample-base-sw_1.0/configs/rvc/camera.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "camera": { - "rvc": { - "timeout_ms": 500, - "exposure": "auto" - } - } -} diff --git a/data/sample-base-sw_1.0/configs/system/telemetry.yaml b/data/sample-base-sw_1.0/configs/system/telemetry.yaml deleted file mode 100644 index cecca38..0000000 --- a/data/sample-base-sw_1.0/configs/system/telemetry.yaml +++ /dev/null @@ -1,3 +0,0 @@ -telemetry: - enabled: true - upload_interval_sec: 60 diff --git a/data/sample-base-sw_1.0/src/common/math/metrics.cpp b/data/sample-base-sw_1.0/src/common/math/metrics.cpp deleted file mode 100644 index 5403fc2..0000000 --- a/data/sample-base-sw_1.0/src/common/math/metrics.cpp +++ /dev/null @@ -1,8 +0,0 @@ -#include -#include -double Mean(const std::vector& xs) { - double s = 0.0; for (double v : xs) s += v; return xs.empty()?0.0:s/xs.size(); -} -double Clamp(double v, double lo, double hi) { - return std::max(lo, std::min(v, hi)); -} diff --git a/data/sample-base-sw_1.0/src/vision/camera_pipeline.cpp b/data/sample-base-sw_1.0/src/vision/camera_pipeline.cpp deleted file mode 100644 index db31804..0000000 --- a/data/sample-base-sw_1.0/src/vision/camera_pipeline.cpp +++ /dev/null @@ -1,20 +0,0 @@ -#include -#include -#include -#include "nv/camera.h" // header name only; compile won't include it -// Simulate an old vendor API -static int NvOldAPI(int width, int height) { - return width > 0 && height > 0 ? 0 : -1; -} -// Classic init: default 1280x720 if missing -int InitRvcCamera(int width, int height) { - if (width == 0) width = 1280; - if (height == 0) height = 720; - return NvOldAPI(width, height); -} -// Small C++ demo main so we can compile/run something -int main() { - int rc = InitRvcCamera(0, 0); - std::cout << "[base-1.0] InitRvcCamera -> " << rc << std::endl; - return rc == 0 ? 0 : 1; -} diff --git a/data/sample-base-sw_1.0/tools/build.sh b/data/sample-base-sw_1.0/tools/build.sh deleted file mode 100755 index e2e482f..0000000 --- a/data/sample-base-sw_1.0/tools/build.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -echo "[build] building base..." -mkdir -p build -g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp 2>/dev/null || { - echo "[warn] header nv/camera.h is not available; compiling without it" - g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp -} -echo "[build] OK" diff --git a/data/sample-base-sw_1.0/tools/run_checks.py b/data/sample-base-sw_1.0/tools/run_checks.py deleted file mode 100755 index 9ae7cf1..0000000 --- a/data/sample-base-sw_1.0/tools/run_checks.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python3 -import json, sys, pathlib -import yaml - -root = pathlib.Path(__file__).resolve().parents[1] -camera_json = json.loads((root/"configs/rvc/camera.json").read_text()) -telemetry_yaml = yaml.safe_load((root/"configs/system/telemetry.yaml").read_text()) - -assert camera_json["camera"]["rvc"]["timeout_ms"] == 500 -assert telemetry_yaml["telemetry"]["upload_interval_sec"] == 60 -print("[checks] base-1.0 configs look good") diff --git a/data/sample-base-sw_1.0/tools/test.sh b/data/sample-base-sw_1.0/tools/test.sh deleted file mode 100755 index 4fd3ca9..0000000 --- a/data/sample-base-sw_1.0/tools/test.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -echo "[test] running unit checks..." -python3 tools/run_checks.py -./build/camera || (echo "[test] camera binary failed" && exit 1) -echo "[test] OK" diff --git a/data/sample-base-sw_1.1/Makefile b/data/sample-base-sw_1.1/Makefile deleted file mode 100644 index 9476d38..0000000 --- a/data/sample-base-sw_1.1/Makefile +++ /dev/null @@ -1,6 +0,0 @@ -SHELL:=/bin/bash -.PHONY: build test -build: - bash tools/build.sh -test: build - bash tools/test.sh diff --git a/data/sample-base-sw_1.1/Master_prompt.md b/data/sample-base-sw_1.1/Master_prompt.md deleted file mode 100644 index 1f8e545..0000000 --- a/data/sample-base-sw_1.1/Master_prompt.md +++ /dev/null @@ -1,195 +0,0 @@ -You are an expert repo bootstrapper. Populate the current repository with a minimal but runnable Base Software tree that includes C++, Python, JSON, and YAML. Then create two commits, tagged as base/v1.0 and base/v1.1. Ensure code runs and tests pass on a typical Linux/macOS dev box with git, bash, python3, and g++. - -## Goals -- Provide realistic content that downstream feature repos can customize. -- Make artifacts verifiable via `make build` and `make test`. -- Use simple, standard tooling only (bash, make, python3, g++). - -## Directory layout (create exactly): -src/ - vision/camera_pipeline.cpp - shared/metrics/ # will be used in v1.1; initially empty in v1.0 - common/math/metrics.cpp # exists in v1.0, moved in v1.1 -configs/ - rvc/camera.json - system/telemetry.yaml -tools/ - build.sh - test.sh - run_checks.py -Makefile -README.md - -### Commit A: Base SW 1.0 (tag: base/v1.0) - -1) Files & contents: - -# src/vision/camera_pipeline.cpp -#include -#include -#include -#include "nv/camera.h" // header name only; compile won't include it -// Simulate an old vendor API -static int NvOldAPI(int width, int height) { - return width > 0 && height > 0 ? 0 : -1; -} -// Classic init: default 1280x720 if missing -int InitRvcCamera(int width, int height) { - if (width == 0) width = 1280; - if (height == 0) height = 720; - return NvOldAPI(width, height); -} -// Small C++ demo main so we can compile/run something -int main() { - int rc = InitRvcCamera(0, 0); - std::cout << "[base-1.0] InitRvcCamera -> " << rc << std::endl; - return rc == 0 ? 0 : 1; -} - -# src/common/math/metrics.cpp -#include -#include -double Mean(const std::vector& xs) { - double s = 0.0; for (double v : xs) s += v; return xs.empty()?0.0:s/xs.size(); -} -double Clamp(double v, double lo, double hi) { - return std::max(lo, std::min(v, hi)); -} - -# configs/rvc/camera.json -{ - "camera": { - "rvc": { - "timeout_ms": 500, - "exposure": "auto" - } - } -} - -# configs/system/telemetry.yaml -telemetry: - enabled: true - upload_interval_sec: 60 - -# tools/build.sh -#!/usr/bin/env bash -set -euo pipefail -echo "[build] building base..." -mkdir -p build -g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp 2>/dev/null || { - echo "[warn] header nv/camera.h is not available; compiling without it" - g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp -} -echo "[build] OK" - -# tools/test.sh -#!/usr/bin/env bash -set -euo pipefail -echo "[test] running unit checks..." -python3 tools/run_checks.py -./build/camera || (echo "[test] camera binary failed" && exit 1) -echo "[test] OK" - -# tools/run_checks.py -#!/usr/bin/env python3 -import json, sys, pathlib -import yaml - -root = pathlib.Path(__file__).resolve().parents[1] -camera_json = json.loads((root/"configs/rvc/camera.json").read_text()) -telemetry_yaml = yaml.safe_load((root/"configs/system/telemetry.yaml").read_text()) - -assert camera_json["camera"]["rvc"]["timeout_ms"] == 500 -assert telemetry_yaml["telemetry"]["upload_interval_sec"] == 60 -print("[checks] base-1.0 configs look good") - -# Makefile -SHELL:=/bin/bash -.PHONY: build test -build: -\tbash tools/build.sh -test: build -\tbash tools/test.sh - -# README.md -# Base SW -This repository contains a tiny, runnable base with C++ (a small camera init), Python checks, and JSON/YAML configs. - -2) Make scripts executable: -- chmod +x tools/build.sh tools/test.sh tools/run_checks.py - -3) Git commit & tag: -- git add -A -- git commit -m "seed: Base SW 1.0" -- git tag -a base/v1.0 -m "Base SW 1.0" - -### Commit B: Base SW 1.1 (tag: base/v1.1) - -Upstream changes that are logical but induce interesting merges later: -- Rename old vendor API to `NvNewAPI`. -- Add a new context parameter `const NvCtx& ctx` to `InitRvcCamera`. -- Move `metrics.cpp` from `src/common/math/` to `src/shared/metrics/`. -- Change JSON path from `camera.rvc` โ†’ `camera.rvcs` and tweak defaults. -- Rename YAML field `upload_interval_sec` โ†’ `interval_seconds` and add `max_payload_kb`. - -Apply the following edits: - -# src/vision/camera_pipeline.cpp (overwrite) -#include -#include -#include -#include "nv/camera_utils.h" // upstream header name -static int NvNewAPI(int width, int height) { return width > 0 && height > 0 ? 0 : -2; } -struct NvCtx { int reserved{0}; }; -int InitRvcCamera(const NvCtx& ctx, int width, int height) { - if (width == 0) width = 1280; - if (height == 0) height = 720; - (void)ctx; - return NvNewAPI(width, height); -} -int main() { - NvCtx ctx{}; - int rc = InitRvcCamera(ctx, 0, 0); - std::cout << "[base-1.1] InitRvcCamera -> " << rc << std::endl; - return rc == 0 ? 0 : 1; -} - -# MOVE: src/common/math/metrics.cpp โ†’ src/shared/metrics/metrics.cpp -# (same content, but we can add a tiny constexpr to show drift) -#include -#include -constexpr double mean0 = 0.0; -double Mean(const std::vector& xs) { - double s = 0.0; for (double v : xs) s += v; return xs.empty()?mean0:s/xs.size(); -} -double Clamp(double v, double lo, double hi) { return std::max(lo, std::min(v, hi)); } - -# configs/rvc/camera.json (overwrite) -{ - "camera": { - "rvcs": { - "timeout_ms": 500, - "exposure": "auto-v2", - "hdr": false - } - } -} - -# configs/system/telemetry.yaml (overwrite) -telemetry: - enabled: true - interval_seconds: 60 - max_payload_kb: 256 - -# tools/run_checks.py (append extra assertions at end) -# Ensure new keys are present for base-1.1 -camera = camera_json["camera"].get("rvcs") or {} -assert "hdr" in camera -print("[checks] base-1.1 config keys OK") - -Re-run chmod (if needed), then: -- git add -A -- git commit -m "upstream: Base SW 1.1 changes" -- git tag -a base/v1.1 -m "Base SW 1.1" - -Finally, print a short summary of files and tags created. \ No newline at end of file diff --git a/data/sample-base-sw_1.1/README.md b/data/sample-base-sw_1.1/README.md deleted file mode 100644 index e9616d8..0000000 --- a/data/sample-base-sw_1.1/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Base SW -This repository contains a tiny, runnable base with C++ (a small camera init), Python checks, and JSON/YAML configs. diff --git a/data/sample-base-sw_1.1/configs/rvc/camera.json b/data/sample-base-sw_1.1/configs/rvc/camera.json deleted file mode 100644 index 54053f4..0000000 --- a/data/sample-base-sw_1.1/configs/rvc/camera.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "camera": { - "rvcs": { - "timeout_ms": 500, - "exposure": "auto-v2", - "hdr": false - } - } -} diff --git a/data/sample-base-sw_1.1/configs/system/telemetry.yaml b/data/sample-base-sw_1.1/configs/system/telemetry.yaml deleted file mode 100644 index bbee321..0000000 --- a/data/sample-base-sw_1.1/configs/system/telemetry.yaml +++ /dev/null @@ -1,4 +0,0 @@ -telemetry: - enabled: true - interval_seconds: 60 - max_payload_kb: 256 diff --git a/data/sample-base-sw_1.1/src/shared/metrics/metrics.cpp b/data/sample-base-sw_1.1/src/shared/metrics/metrics.cpp deleted file mode 100644 index 916f7fb..0000000 --- a/data/sample-base-sw_1.1/src/shared/metrics/metrics.cpp +++ /dev/null @@ -1,7 +0,0 @@ -#include -#include -constexpr double mean0 = 0.0; -double Mean(const std::vector& xs) { - double s = 0.0; for (double v : xs) s += v; return xs.empty()?mean0:s/xs.size(); -} -double Clamp(double v, double lo, double hi) { return std::max(lo, std::min(v, hi)); } diff --git a/data/sample-base-sw_1.1/src/vision/camera_pipeline.cpp b/data/sample-base-sw_1.1/src/vision/camera_pipeline.cpp deleted file mode 100644 index e5d3254..0000000 --- a/data/sample-base-sw_1.1/src/vision/camera_pipeline.cpp +++ /dev/null @@ -1,18 +0,0 @@ -#include -#include -#include -#include "nv/camera_utils.h" // upstream header name -static int NvNewAPI(int width, int height) { return width > 0 && height > 0 ? 0 : -2; } -struct NvCtx { int reserved{0}; }; -int InitRvcCamera(const NvCtx& ctx, int width, int height) { - if (width == 0) width = 1280; - if (height == 0) height = 720; - (void)ctx; - return NvNewAPI(width, height); -} -int main() { - NvCtx ctx{}; - int rc = InitRvcCamera(ctx, 0, 0); - std::cout << "[base-1.1] InitRvcCamera -> " << rc << std::endl; - return rc == 0 ? 0 : 1; -} diff --git a/data/sample-base-sw_1.1/tools/build.sh b/data/sample-base-sw_1.1/tools/build.sh deleted file mode 100755 index e2e482f..0000000 --- a/data/sample-base-sw_1.1/tools/build.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -echo "[build] building base..." -mkdir -p build -g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp 2>/dev/null || { - echo "[warn] header nv/camera.h is not available; compiling without it" - g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp -} -echo "[build] OK" diff --git a/data/sample-base-sw_1.1/tools/run_checks.py b/data/sample-base-sw_1.1/tools/run_checks.py deleted file mode 100755 index ded3ed2..0000000 --- a/data/sample-base-sw_1.1/tools/run_checks.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python3 -import json, sys, pathlib -import yaml - -root = pathlib.Path(__file__).resolve().parents[1] -camera_json = json.loads((root/"configs/rvc/camera.json").read_text()) -telemetry_yaml = yaml.safe_load((root/"configs/system/telemetry.yaml").read_text()) - -assert camera_json["camera"]["rvc"]["timeout_ms"] == 500 -assert telemetry_yaml["telemetry"]["upload_interval_sec"] == 60 -print("[checks] base-1.0 configs look good") -# Ensure new keys are present for base-1.1 -camera = camera_json["camera"].get("rvcs") or {} -assert "hdr" in camera -print("[checks] base-1.1 config keys OK") diff --git a/data/sample-base-sw_1.1/tools/test.sh b/data/sample-base-sw_1.1/tools/test.sh deleted file mode 100755 index 4fd3ca9..0000000 --- a/data/sample-base-sw_1.1/tools/test.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -echo "[test] running unit checks..." -python3 tools/run_checks.py -./build/camera || (echo "[test] camera binary failed" && exit 1) -echo "[test] OK" diff --git a/data/sample-feature-sw_5.0/Makefile b/data/sample-feature-sw_5.0/Makefile deleted file mode 100644 index 9476d38..0000000 --- a/data/sample-feature-sw_5.0/Makefile +++ /dev/null @@ -1,6 +0,0 @@ -SHELL:=/bin/bash -.PHONY: build test -build: - bash tools/build.sh -test: build - bash tools/test.sh diff --git a/data/sample-feature-sw_5.0/Master_prompt_feature.md b/data/sample-feature-sw_5.0/Master_prompt_feature.md deleted file mode 100644 index 461d536..0000000 --- a/data/sample-feature-sw_5.0/Master_prompt_feature.md +++ /dev/null @@ -1,147 +0,0 @@ -You are an expert repo bootstrapper. Populate the current repository with a Feature Software tree that represents customizations built on top of the Base SW 1.0 semantics. Include C++, Python, JSON, YAML; make it runnable with `make build` and `make test`. Create one commit tagged feature/v5.0. - -## Directory layout (create exactly): -src/ - vision/camera_pipeline.cpp - common/math/metrics.cpp # stays in old path (will conflict with base move later) -configs/ - rvc/camera.json - system/telemetry.yaml -tools/ - build.sh - test.sh - run_checks.py -data/ - requirements_map.yaml -Makefile -README.md - -### Commit: Feature SW 5.0 (tag: feature/v5.0) - -Feature goals: -- Customize camera init (change default width, clamp height, add logging). -- Add a new metric `Median` in C++. -- Tighten telemetry interval and add tags. -- Increase timeout & change exposure mode, add log_level in JSON. -- Keep everything runnable. - -Create these files: - -# src/vision/camera_pipeline.cpp -#include -#include -#include -#include "nv/camera.h" // still builds without the header present -static int NvOldAPI(int width, int height) { return width > 0 && height > 0 ? 0 : -1; } -static int clampH(int h) { return h < 480 ? 480 : h; } -int InitRvcCamera(int width, int height) { - // FEATURE: default 1344x720 for RVC, clamp height - if (width == 0) width = 1344; - if (height == 0) height = 720; - height = clampH(height); - std::cout << "[feature-5.0] init camera " << width << "x" << height << std::endl; - return NvOldAPI(width, height); -} -int main() { - int rc = InitRvcCamera(0, 0); - std::cout << "[feature-5.0] InitRvcCamera -> " << rc << std::endl; - return rc == 0 ? 0 : 1; -} - -# src/common/math/metrics.cpp -#include -#include -#include -// FEATURE: add Median -double Mean(const std::vector& xs) { - double s = 0.0; for (double v : xs) s += v; return xs.empty()?0.0:s/xs.size(); -} -double Clamp(double v, double lo, double hi) { return std::max(lo, std::min(v, hi)); } -double Median(std::vector xs) { - if (xs.empty()) return 0.0; - std::sort(xs.begin(), xs.end()); - size_t n = xs.size(); - if (n % 2 == 1) return xs[n/2]; - return 0.5*(xs[n/2 - 1] + xs[n/2]); -} - -# configs/rvc/camera.json -{ - "camera": { - "rvc": { - "timeout_ms": 800, - "exposure": "manual", - "log_level": "info" - } - } -} - -# configs/system/telemetry.yaml -telemetry: - enabled: true - upload_interval_sec: 30 - tags: ["rvc", "debug"] - -# tools/build.sh -#!/usr/bin/env bash -set -euo pipefail -echo "[build] building feature..." -mkdir -p build -g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp 2>/dev/null || { - echo "[warn] camera header not available; compiling anyway" - g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp -} -echo "[build] OK" - -# tools/test.sh -#!/usr/bin/env bash -set -euo pipefail -echo "[test] running feature checks..." -python3 tools/run_checks.py -./build/camera || (echo "[test] camera binary failed" && exit 1) -echo "[test] OK" - -# tools/run_checks.py -#!/usr/bin/env python3 -import json, sys, pathlib -import yaml -root = pathlib.Path(__file__).resolve().parents[1] -camera_json = json.loads((root/"configs/rvc/camera.json").read_text()) -telemetry_yaml = yaml.safe_load((root/"configs/system/telemetry.yaml").read_text()) -assert camera_json["camera"]["rvc"]["timeout_ms"] == 800 -assert camera_json["camera"]["rvc"]["exposure"] == "manual" -assert telemetry_yaml["telemetry"]["upload_interval_sec"] == 30 -assert "debug" in telemetry_yaml["telemetry"]["tags"] -print("[checks] feature-5.0 configs look good") - -# data/requirements_map.yaml -- path_glob: "src/vision/**" - req_ids: ["REQ-RVC-201","REQ-RVC-318"] -- path_glob: "configs/rvc/*.json" - req_ids: ["REQ-RVC-411"] -- path_glob: "services/*.service" - req_ids: ["REQ-OPS-021"] - -# Makefile -SHELL:=/bin/bash -.PHONY: build test -build: -\tbash tools/build.sh -test: build -\tbash tools/test.sh - -# README.md -# Feature SW 5.0 -Feature customizations built conceptually on Base 1.0: -- wider default, height clamp, logging in camera init -- Median() metric -- tighter telemetry interval with tags -- JSON tweaks for RVC (timeout, exposure, log_level) - -Make all scripts executable: -- chmod +x tools/build.sh tools/test.sh tools/run_checks.py - -Git commit & tag: -- git add -A -- git commit -m "seed: Feature SW 5.0 over Base 1.0 semantics" -- git tag -a feature/v5.0 -m "Feature SW 5.0" \ No newline at end of file diff --git a/data/sample-feature-sw_5.0/README.md b/data/sample-feature-sw_5.0/README.md deleted file mode 100644 index 03ececa..0000000 --- a/data/sample-feature-sw_5.0/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Feature SW 5.0 -Feature customizations built conceptually on Base 1.0: -- wider default, height clamp, logging in camera init -- Median() metric -- tighter telemetry interval with tags -- JSON tweaks for RVC (timeout, exposure, log_level) - -Make all scripts executable: -- chmod +x tools/build.sh tools/test.sh tools/run_checks.py - -Git commit & tag: -- git add -A -- git commit -m "seed: Feature SW 5.0 over Base 1.0 semantics" -- git tag -a feature/v5.0 -m "Feature SW 5.0" diff --git a/data/sample-feature-sw_5.0/configs/rvc/camera.json b/data/sample-feature-sw_5.0/configs/rvc/camera.json deleted file mode 100644 index df54387..0000000 --- a/data/sample-feature-sw_5.0/configs/rvc/camera.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "camera": { - "rvc": { - "timeout_ms": 800, - "exposure": "manual", - "log_level": "info" - } - } -} diff --git a/data/sample-feature-sw_5.0/configs/system/telemetry.yaml b/data/sample-feature-sw_5.0/configs/system/telemetry.yaml deleted file mode 100644 index bc3e207..0000000 --- a/data/sample-feature-sw_5.0/configs/system/telemetry.yaml +++ /dev/null @@ -1,4 +0,0 @@ -telemetry: - enabled: true - upload_interval_sec: 30 - tags: ["rvc", "debug"] diff --git a/data/sample-feature-sw_5.0/data/requirements_map.yaml b/data/sample-feature-sw_5.0/data/requirements_map.yaml deleted file mode 100644 index 99f2d85..0000000 --- a/data/sample-feature-sw_5.0/data/requirements_map.yaml +++ /dev/null @@ -1,6 +0,0 @@ -- path_glob: "src/vision/**" - req_ids: ["REQ-RVC-201","REQ-RVC-318"] -- path_glob: "configs/rvc/*.json" - req_ids: ["REQ-RVC-411"] -- path_glob: "services/*.service" - req_ids: ["REQ-OPS-021"] diff --git a/data/sample-feature-sw_5.0/src/common/math/metrics.cpp b/data/sample-feature-sw_5.0/src/common/math/metrics.cpp deleted file mode 100644 index 5664930..0000000 --- a/data/sample-feature-sw_5.0/src/common/math/metrics.cpp +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include -#include -// FEATURE: add Median -double Mean(const std::vector& xs) { - double s = 0.0; for (double v : xs) s += v; return xs.empty()?0.0:s/xs.size(); -} -double Clamp(double v, double lo, double hi) { return std::max(lo, std::min(v, hi)); } -double Median(std::vector xs) { - if (xs.empty()) return 0.0; - std::sort(xs.begin(), xs.end()); - size_t n = xs.size(); - if (n % 2 == 1) return xs[n/2]; - return 0.5*(xs[n/2 - 1] + xs[n/2]); -} diff --git a/data/sample-feature-sw_5.0/src/vision/camera_pipeline.cpp b/data/sample-feature-sw_5.0/src/vision/camera_pipeline.cpp deleted file mode 100644 index 5fc17c8..0000000 --- a/data/sample-feature-sw_5.0/src/vision/camera_pipeline.cpp +++ /dev/null @@ -1,19 +0,0 @@ -#include -#include -#include -#include "nv/camera.h" // still builds without the header present -static int NvOldAPI(int width, int height) { return width > 0 && height > 0 ? 0 : -1; } -static int clampH(int h) { return h < 480 ? 480 : h; } -int InitRvcCamera(int width, int height) { - // FEATURE: default 1344x720 for RVC, clamp height - if (width == 0) width = 1344; - if (height == 0) height = 720; - height = clampH(height); - std::cout << "[feature-5.0] init camera " << width << "x" << height << std::endl; - return NvOldAPI(width, height); -} -int main() { - int rc = InitRvcCamera(0, 0); - std::cout << "[feature-5.0] InitRvcCamera -> " << rc << std::endl; - return rc == 0 ? 0 : 1; -} diff --git a/data/sample-feature-sw_5.0/tools/build.sh b/data/sample-feature-sw_5.0/tools/build.sh deleted file mode 100755 index b2cb145..0000000 --- a/data/sample-feature-sw_5.0/tools/build.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -echo "[build] building feature..." -mkdir -p build -g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp 2>/dev/null || { - echo "[warn] camera header not available; compiling anyway" - g++ -std=c++17 -O2 -o build/camera src/vision/camera_pipeline.cpp -} -echo "[build] OK" diff --git a/data/sample-feature-sw_5.0/tools/run_checks.py b/data/sample-feature-sw_5.0/tools/run_checks.py deleted file mode 100755 index 9c0c912..0000000 --- a/data/sample-feature-sw_5.0/tools/run_checks.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python3 -import json, sys, pathlib -import yaml -root = pathlib.Path(__file__).resolve().parents[1] -camera_json = json.loads((root/"configs/rvc/camera.json").read_text()) -telemetry_yaml = yaml.safe_load((root/"configs/system/telemetry.yaml").read_text()) -assert camera_json["camera"]["rvc"]["timeout_ms"] == 800 -assert camera_json["camera"]["rvc"]["exposure"] == "manual" -assert telemetry_yaml["telemetry"]["upload_interval_sec"] == 30 -assert "debug" in telemetry_yaml["telemetry"]["tags"] -print("[checks] feature-5.0 configs look good") diff --git a/data/sample-feature-sw_5.0/tools/test.sh b/data/sample-feature-sw_5.0/tools/test.sh deleted file mode 100755 index 0ad7492..0000000 --- a/data/sample-feature-sw_5.0/tools/test.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -echo "[test] running feature checks..." -python3 tools/run_checks.py -./build/camera || (echo "[test] camera binary failed" && exit 1) -echo "[test] OK" diff --git a/debug_apply/src/main.cpp b/debug_apply/src/main.cpp deleted file mode 100644 index 7ab3c8d..0000000 --- a/debug_apply/src/main.cpp +++ /dev/null @@ -1,11 +0,0 @@ -#include - -void NewAPI(int v) { - std::cout << "NewAPI: " << v << std::endl; -} - -int main() { - NewAPI(42); - return 0; -} - diff --git a/debug_git_apply/src/main.cpp b/debug_git_apply/src/main.cpp deleted file mode 100644 index 7ab3c8d..0000000 --- a/debug_git_apply/src/main.cpp +++ /dev/null @@ -1,11 +0,0 @@ -#include - -void NewAPI(int v) { - std::cout << "NewAPI: " << v << std::endl; -} - -int main() { - NewAPI(42); - return 0; -} - diff --git a/debug_git_apply/src/main.cpp.rej b/debug_git_apply/src/main.cpp.rej deleted file mode 100644 index 7b8c486..0000000 --- a/debug_git_apply/src/main.cpp.rej +++ /dev/null @@ -1,12 +0,0 @@ -diff a/debug_git_apply/src/main.cpp b/debug_git_apply/src/main.cpp (rejected hunks) -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/debug_test/src/main.cpp b/debug_test/src/main.cpp deleted file mode 100644 index 7ab3c8d..0000000 --- a/debug_test/src/main.cpp +++ /dev/null @@ -1,11 +0,0 @@ -#include - -void NewAPI(int v) { - std::cout << "NewAPI: " << v << std::endl; -} - -int main() { - NewAPI(42); - return 0; -} - diff --git a/debug_test/src/main.cpp.rej b/debug_test/src/main.cpp.rej deleted file mode 100644 index 519027a..0000000 --- a/debug_test/src/main.cpp.rej +++ /dev/null @@ -1,12 +0,0 @@ -diff a/debug_test/src/main.cpp b/debug_test/src/main.cpp (rejected hunks) -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/debug_test/test_patch.patch b/debug_test/test_patch.patch deleted file mode 100644 index f82cf62..0000000 --- a/debug_test/test_patch.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- src/main.cpp 2025-09-13 12:12:50 -+++ src/main.cpp 2025-09-13 12:13:08 -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/diff.patch b/diff.patch deleted file mode 100644 index 4641463..0000000 --- a/diff.patch +++ /dev/null @@ -1,11 +0,0 @@ -diff --color data/sample/base-1.0/src/main.cpp data/sample/base-1.1/src/main.cpp -3,4c3,4 -< void OldAPI(int v) { -< std::cout << "OldAPI: " << v << std::endl; ---- -> void NewAPI(int v) { -> std::cout << "NewAPI: " << v << std::endl; -8c8 -< OldAPI(42); ---- -> NewAPI(42); diff --git a/diff1.patch b/diff1.patch deleted file mode 100644 index ed8c026..0000000 --- a/diff1.patch +++ /dev/null @@ -1,3 +0,0 @@ -Common subdirectories: data/sample/base-1.0/configs and data/sample/base-1.1/configs -Common subdirectories: data/sample/base-1.0/dts and data/sample/base-1.1/dts -Common subdirectories: data/sample/base-1.0/src and data/sample/base-1.1/src diff --git a/engine/__init__.py b/engine/__init__.py deleted file mode 100644 index 5d2606d..0000000 --- a/engine/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""auto-rebase engine package.""" - -__all__ = [ - "__version__", -] - -__version__ = "0.1.0" - diff --git a/engine/cli/auto_rebase.py b/engine/cli/auto_rebase.py deleted file mode 100644 index ea3c16a..0000000 --- a/engine/cli/auto_rebase.py +++ /dev/null @@ -1,259 +0,0 @@ -from __future__ import annotations - -import argparse -import json -import logging -from datetime import datetime -from pathlib import Path -from typing import Any - -from ..core import config as cfg -from ..core import feature_extract, base_extract, retarget as rt, validate as val, report as rep -from ..core.utils import setup_logging, write_json -from ..core.vcs import commit_and_tag, git_diff_no_index, git_apply_reject, generate_per_file_patches, apply_patch_dir_with_reject -from ..core.ai_resolve import resolve_rejects -from ..core.ai_direct_rebase import ai_direct_rebase -from ..core import utils as U - -# Load environment variables from .env file -try: - from dotenv import load_dotenv - load_dotenv() -except ImportError: - pass # python-dotenv not available, continue without it - - -def _tools_matrix() -> dict[str, Any]: - """Simplified tools matrix - all file types handled uniformly with git patches.""" - return { - "git_patches": {"available": True, "description": "Git patch processing"}, - "ai_resolution": {"available": True, "description": "AI-based conflict resolution"}, - } - - -def main() -> None: - parser = argparse.ArgumentParser(prog="auto-rebase") - sub = parser.add_subparsers(dest="cmd", required=True) - - p_init = sub.add_parser("init") - p_init.add_argument("--old-base", required=True) - p_init.add_argument("--new-base", required=True) - p_init.add_argument("--feature", required=True) - p_init.add_argument("--req-map", required=True) - p_init.add_argument("--workdir", required=True) - p_init.add_argument("--verbose", action="store_true") - - p_ef = sub.add_parser("extract-feature") - p_ef.add_argument("--out", required=True) - p_ef.add_argument("--git-patch", help="Optional path to write a combined git-style patch (unified diff)") - p_ef.add_argument("--patch-dir", help="Optional directory to write per-file patches using 'diff -u' (one .patch per file)") - p_ef.add_argument("--verbose", action="store_true") - - p_eb = sub.add_parser("extract-base") - p_eb.add_argument("--out", required=True) - p_eb.add_argument("--git-patch", help="Optional path to write a combined git-style patch (unified diff)") - p_eb.add_argument("--patch-dir", help="Optional directory to write per-file patches using 'diff -u' (one .patch per file)") - p_eb.add_argument("--verbose", action="store_true") - - p_rt = sub.add_parser("retarget") - p_rt.add_argument("--feature-patch", required=True) - p_rt.add_argument("--base-patch", required=True) - p_rt.add_argument("--new-base", required=True) - p_rt.add_argument("--out", required=True) - p_rt.add_argument("--git-patch", help="Optional path to a git-style feature patch to apply with --reject; leftover .rej will be AI-resolved") - p_rt.add_argument("--patch-dir", help="Optional directory of per-file patches to apply with --reject before semantic retarget") - p_rt.add_argument("--verbose", action="store_true") - - p_val = sub.add_parser("validate") - p_val.add_argument("--path", required=True) - p_val.add_argument("--report", required=True) - p_val.add_argument("--verbose", action="store_true") - - p_fin = sub.add_parser("finalize") - p_fin.add_argument("--path", required=True) - p_fin.add_argument("--tag", required=True) - p_fin.add_argument("--trace", required=True) - p_fin.add_argument("--verbose", action="store_true") - - p_ai_rebase = sub.add_parser("ai-rebase") - p_ai_rebase.add_argument("--feature-patches", required=True, help="Directory containing feature patch files") - p_ai_rebase.add_argument("--base-patches", required=True, help="Directory containing base patch files") - p_ai_rebase.add_argument("--new-base", required=True, help="Path to new base directory") - p_ai_rebase.add_argument("--req-map", required=True, help="Path to requirements mapping file") - p_ai_rebase.add_argument("--out", required=True, help="Output directory for rebased feature") - p_ai_rebase.add_argument("--verbose", action="store_true") - - args = parser.parse_args() - def find_manifest(start_path: Path) -> dict[str, Any]: - """Walk up from a path to find a run.json manifest and return it.""" - for p in [start_path] + list(start_path.parents): - cand = p / "run.json" - if cand.exists(): - return json.loads(cand.read_text(encoding="utf-8")) - # Also try artifacts parent convention - cand = Path("artifacts/run1/run.json") - if cand.exists(): - return json.loads(cand.read_text(encoding="utf-8")) - raise SystemExit("run.json manifest not found. Run 'init' first or ensure --out is under the workdir.") - - if args.cmd == "init": - artifacts_dir = Path(args.workdir) - setup_logging(Path(artifacts_dir) / "logs" / "run.log", args.verbose) - man = cfg.new_run_manifest( - old_base=Path(args.old_base), - new_base=Path(args.new_base), - feature=Path(args.feature), - req_map=Path(args.req_map), - workdir=artifacts_dir, - ) - cfg.persist_manifest(man, Path(args.workdir)) - print(json.dumps({"run_id": man.run_id, "workdir": args.workdir})) - return - - # For subsequent steps, derive workdir from provided paths. - setup_logging(None, getattr(args, "verbose", False)) - - if args.cmd == "extract-feature": - out = Path(args.out) - out.mkdir(parents=True, exist_ok=True) - man = find_manifest(out) - old_base = Path(man["old_base"]).resolve() - feature = Path(man["feature_old"]).resolve() - req_map = Path(man["req_map"]).resolve() - units = feature_extract.extract_feature(old_base, feature, req_map) - write_json(out / "feature_patch.json", [u for u in units]) - if getattr(args, "git_patch", None): - git_diff_no_index(old_base, feature, Path(args.git_patch)) - if getattr(args, "patch_dir", None): - generate_per_file_patches(old_base, feature, Path(args.patch_dir)) - print(str(out / "feature_patch.json")) - return - - if args.cmd == "extract-base": - out = Path(args.out) - out.mkdir(parents=True, exist_ok=True) - man = find_manifest(out) - old_base = Path(man["old_base"]).resolve() - new_base = Path(man["new_base"]).resolve() - delta = base_extract.extract_base(old_base, new_base) - write_json(out / "base_patch.json", delta) - if getattr(args, "git_patch", None): - git_diff_no_index(old_base, new_base, Path(args.git_patch)) - if getattr(args, "patch_dir", None): - generate_per_file_patches(old_base, new_base, Path(args.patch_dir)) - print(str(out / "base_patch.json")) - return - - if args.cmd == "retarget": - feature_patch = json.loads(Path(args.feature_patch).read_text(encoding="utf-8")) - base_patch = json.loads(Path(args.base_patch).read_text(encoding="utf-8")) - new_base_root = Path(args.new_base) - out_dir = Path(args.out) - out_dir.mkdir(parents=True, exist_ok=True) - # Optional git patch fast-path with .rej handling (combined or per-file) - if getattr(args, "git_patch", None) or getattr(args, "patch_dir", None): - # Prepare target tree by copying new base fully - U.copy_tree(new_base_root, out_dir) - if getattr(args, "git_patch", None): - git_apply_reject(Path(args.git_patch), out_dir, strip=1) - if getattr(args, "patch_dir", None): - apply_patch_dir_with_reject(Path(args.patch_dir), out_dir, strip=1) - rej_files = list(out_dir.rglob("*.rej")) - if rej_files: - # Use semantic retargeting to attempt resolution - results = rt.retarget(feature_patch, base_patch, new_base_root, out_dir) - # If rejects remain, attempt AI/heuristic resolution using requirement texts - # Gather requirement texts from feature_patch units - req_texts: list[str] = [] - for u in feature_patch: - req_texts.extend(u.get("requirements", [])) - remaining = resolve_rejects(rej_files, sorted(set(req_texts))) - # Summarize outcomes: mark conflicts for remaining .rej - files = results.get("files", []) - for rej in remaining: - target = rej.with_suffix("") - files.append({"file": str(target.relative_to(out_dir)), "status": "conflict", "details": ".rej remains", "req_ids": []}) - results["files"] = files - write_json(out_dir / "retarget_results.json", results) - print(str(out_dir / "retarget_results.json")) - return - else: - # All hunks applied via git apply - results = {"summary": {"auto": len(feature_patch), "semantic": 0, "conflicts": 0}, "files": [{"file": u["file_path"], "status": "applied", "details": "git apply", "req_ids": u.get("req_ids", [])} for u in feature_patch]} - write_json(out_dir / "retarget_results.json", results) - print(str(out_dir / "retarget_results.json")) - return - # Default semantic pipeline - results = rt.retarget(feature_patch, base_patch, new_base_root, out_dir) - write_json(out_dir / "retarget_results.json", results) - print(str(out_dir / "retarget_results.json")) - return - - if args.cmd == "validate": - target = Path(args.path) - report_html = Path(args.report) - report_json = report_html.with_suffix(".json") - outcomes_path = target / "retarget_results.json" - outcomes = json.loads(outcomes_path.read_text(encoding="utf-8")) if outcomes_path.exists() else {"summary": {}, "files": []} - validation = val.validate(target) - tools = _tools_matrix() - # Load simple report schema - schema_path = Path("schemas/report.schema.json") - schema = json.loads(schema_path.read_text(encoding="utf-8")) - run_id = datetime.utcnow().strftime("%Y%m%dT%H%M%S") - rep.generate(run_id, outcomes, validation, tools, report_json, report_html, schema) - print(str(report_html)) - return - - if args.cmd == "finalize": - target = Path(args.path) - tag = args.tag - trace_path = Path(args.trace) - # Build trace.json from patch req_ids; demo: synthesize from retarget_results - results_path = target / "retarget_results.json" - trace = [] - if results_path.exists(): - results = json.loads(results_path.read_text(encoding="utf-8")) - for f in results.get("files", []): - if f.get("req_ids"): - trace.append({"file": f["file"], "req_ids": f["req_ids"]}) - write_json(trace_path, trace) - trailers = { - "Req-Id": ",".join(sorted({rid for t in trace for rid in t.get("req_ids", [])})), - "Change-Type": "FeatureCustomization", - "Auto-Rebase-Run": datetime.utcnow().isoformat() + "Z", - } - commit_and_tag(target, tag, trailers) - print(str(trace_path)) - return - - if args.cmd == "ai-rebase": - feature_patches_dir = Path(args.feature_patches) - base_patches_dir = Path(args.base_patches) - new_base_root = Path(args.new_base) - req_map_path = Path(args.req_map) - output_dir = Path(args.out) - - # Perform AI-based direct rebase - results = ai_direct_rebase( - feature_patches_dir=feature_patches_dir, - base_patches_dir=base_patches_dir, - new_base_root=new_base_root, - requirements_map_path=req_map_path, - output_dir=output_dir - ) - - # Print summary - summary = results["summary"] - print(f"AI Direct Rebase Results:") - print(f" Total files processed: {summary['total_files']}") - print(f" Successfully resolved: {summary['resolved']}") - print(f" Errors: {summary['errors']}") - print(f" Output directory: {output_dir}") - print(f" Results file: {output_dir / 'ai_rebase_results.json'}") - - return - - -if __name__ == "__main__": - main() diff --git a/engine/core/ai_direct_rebase.py b/engine/core/ai_direct_rebase.py deleted file mode 100644 index 6e5de44..0000000 --- a/engine/core/ai_direct_rebase.py +++ /dev/null @@ -1,466 +0,0 @@ -from __future__ import annotations - -import json -import os -from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple - -from .utils import ensure_dir, write_json, read_text, write_text, copy_tree -from .traceability import load_requirements_map, req_ids_for_file, requirement_texts_for_file -from .ai_resolve import try_openai_resolve, heuristic_resolve - -# Load environment variables from .env file -try: - from dotenv import load_dotenv - load_dotenv() -except ImportError: - pass # python-dotenv not available, continue without it - - -# All file types are handled uniformly with git patches - - -def analyze_patch_conflicts(feature_patch: str, base_patch: str, new_base_file: str) -> Dict[str, Any]: - """Analyze conflicts between feature patch and base changes using git patch content. - - Returns analysis of what conflicts exist and how to resolve them. - """ - conflicts = { - "api_changes": [], - "parameter_changes": [], - "structural_changes": [], - "header_changes": [], - "content_changes": [], - "resolution_strategy": "ai_based" - } - - # Analyze git patch content for common conflict patterns - import re - - # Extract added/removed lines from patches - feature_added = re.findall(r'^\+.*$', feature_patch, re.MULTILINE) - feature_removed = re.findall(r'^-.*$', feature_patch, re.MULTILINE) - base_added = re.findall(r'^\+.*$', base_patch, re.MULTILINE) - base_removed = re.findall(r'^-.*$', base_patch, re.MULTILINE) - - # Check for API renames (common pattern: old function in removed, new function in added) - for removed_line in feature_removed: - for added_line in base_added: - if "API" in removed_line and "API" in added_line: - old_match = re.search(r'(\w+API)', removed_line) - new_match = re.search(r'(\w+API)', added_line) - if old_match and new_match and old_match.group(1) != new_match.group(1): - conflicts["api_changes"].append({ - "old_api": old_match.group(1), - "new_api": new_match.group(1), - "description": f"API function renamed from {old_match.group(1)} to {new_match.group(1)}" - }) - - # Check for function signature changes - for removed_line in feature_removed: - for added_line in base_added: - if "(" in removed_line and "(" in added_line: - # Extract function names - old_func = re.search(r'(\w+\([^)]*\))', removed_line) - new_func = re.search(r'(\w+\([^)]*\))', added_line) - if old_func and new_func and old_func.group(1) != new_func.group(1): - conflicts["parameter_changes"].append({ - "old_signature": old_func.group(1), - "new_signature": new_func.group(1), - "description": "Function signature changed" - }) - - # Check for header/include changes - for removed_line in feature_removed: - for added_line in base_added: - if "#include" in removed_line and "#include" in added_line: - old_header = re.search(r'#include\s*[<"]([^>"]+)[>"]', removed_line) - new_header = re.search(r'#include\s*[<"]([^>"]+)[>"]', added_line) - if old_header and new_header and old_header.group(1) != new_header.group(1): - conflicts["header_changes"].append({ - "old_header": old_header.group(1), - "new_header": new_header.group(1), - "description": "Header file changed" - }) - - # Check for structural changes (file moves, new files, deleted files) - if "new file" in base_patch.lower() or "deleted file" in base_patch.lower(): - conflicts["structural_changes"].append({ - "description": "File structure changed (new/deleted files)" - }) - - # General content changes - if len(feature_added) > 0 or len(feature_removed) > 0: - conflicts["content_changes"].append({ - "description": f"Feature adds {len(feature_added)} lines, removes {len(feature_removed)} lines" - }) - - return conflicts - - -def create_ai_prompt(feature_patch: str, base_patch: str, new_base_file: str, requirements: List[str], conflicts: Dict[str, Any]) -> str: - """Create a comprehensive AI prompt for resolving patch conflicts.""" - - req_text = "\n".join(f"- {r}" for r in requirements) if requirements else "No specific requirements" - - prompt_parts = [ - "You are an expert software engineer specializing in automated patch application and conflict resolution.", - "", - "TASK: Apply feature customizations from the feature patch to the new base file, resolving all conflicts.", - "", - "CONTEXT:", - "- We have a feature patch that was originally applied to base X", - "- The base has evolved from X to X+1 with various changes", - "- We need to apply the feature customizations to the new base X+1", - "", - "KEY PRINCIPLES:", - "1. Preserve ALL feature customizations (parameter changes, logging, new functions, etc.)", - "2. Adapt to changes in the new base (function renames, signature changes, header changes, etc.)", - "3. Maintain the same functionality but with updated code", - "4. Keep all feature-specific logic intact", - "5. Respond with ONLY the corrected file content, no explanations", - "", - f"REQUIREMENTS:\n{req_text}", - "", - "CONFLICT ANALYSIS:", - ] - - # Add conflict details - for conflict_type, items in conflicts.items(): - if conflict_type == "resolution_strategy": - continue - if items: - prompt_parts.append(f"{conflict_type.upper()}:") - for item in items: - if isinstance(item, dict): - prompt_parts.append(f" - {item.get('description', 'Unknown conflict')}") - else: - prompt_parts.append(f" - {item}") - prompt_parts.append("") - - prompt_parts.extend([ - "FILES TO ANALYZE:", - "", - "FEATURE PATCH (shows what the feature wanted to change):", - "```", - feature_patch, - "```", - "", - "BASE PATCH (shows how the base evolved from X to X+1):", - "```", - base_patch, - "```", - "", - "NEW BASE FILE (current state of base X+1):", - "```", - new_base_file, - "```", - "", - "TASK: Apply the feature changes from the feature patch to the new base file, adapting for all changes shown in the base patch." - ]) - - return "\n".join(prompt_parts) - - -def ai_resolve_file_conflicts( - feature_patch_path: Path, - base_patch_path: Path, - new_base_file_path: Path, - requirements: List[str], - output_path: Path -) -> Dict[str, Any]: - """Use AI to resolve conflicts between feature patch and base changes for a single file.""" - - try: - # Read all relevant files - feature_patch = read_text(feature_patch_path) - base_patch = read_text(base_patch_path) - new_base_file = read_text(new_base_file_path) - - # Analyze conflicts - conflicts = analyze_patch_conflicts(feature_patch, base_patch, new_base_file) - - # Create AI prompt - prompt = create_ai_prompt(feature_patch, base_patch, new_base_file, requirements, conflicts) - - # Try OpenAI resolution - api_key = os.getenv("OPENAI_API_KEY") - if api_key: - try: - from openai import OpenAI - - # Handle different OpenAI library versions - try: - client = OpenAI(api_key=api_key) - except Exception: - # Fallback for older versions - import openai - openai.api_key = api_key - client = openai - - # Handle different API versions - if hasattr(client, 'chat') and hasattr(client.chat, 'completions'): - # New API (v1.0+) - response = client.chat.completions.create( - model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"), - messages=[{"role": "user", "content": prompt}], - temperature=0 - ) - resolved_content = response.choices[0].message.content or "" - else: - # Old API (pre-v1.0) - response = client.ChatCompletion.create( - model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"), - messages=[{"role": "user", "content": prompt}], - temperature=0 - ) - resolved_content = response.choices[0].message.content or "" - - if resolved_content.strip(): - # Write the resolved content - write_text(output_path, resolved_content) - - return { - "status": "resolved", - "method": "openai", - "conflicts": conflicts, - "output_path": str(output_path) - } - - except Exception as e: - print(f"OpenAI resolution failed: {e}") - - # Fallback to heuristic resolution - return heuristic_resolve_file_conflicts( - feature_patch_path, base_patch_path, new_base_file_path, - requirements, output_path, conflicts - ) - - except Exception as e: - return { - "status": "error", - "method": "none", - "error": str(e), - "conflicts": conflicts - } - - -def heuristic_resolve_file_conflicts( - feature_patch_path: Path, - base_patch_path: Path, - new_base_file_path: Path, - requirements: List[str], - output_path: Path, - conflicts: Dict[str, Any] -) -> Dict[str, Any]: - """Heuristic-based conflict resolution as fallback - works with any file type.""" - - try: - new_base_content = read_text(new_base_file_path) - feature_patch = read_text(feature_patch_path) - base_patch = read_text(base_patch_path) - - # Start with new base content - resolved_content = new_base_content - - # Apply feature customizations from the patch - # This is a simplified approach - in practice, you'd parse the patch more carefully - import re - - # Extract feature additions (lines starting with +) - feature_additions = re.findall(r'^\+([^+].*)$', feature_patch, re.MULTILINE) - feature_removals = re.findall(r'^-([^-].*)$', feature_patch, re.MULTILINE) - - # Apply basic conflict resolution based on detected conflicts - if conflicts.get("api_changes"): - for api_change in conflicts["api_changes"]: - old_api = api_change["old_api"] - new_api = api_change["new_api"] - # Replace old API calls with new API calls - resolved_content = resolved_content.replace(old_api, new_api) - - if conflicts.get("header_changes"): - for header_change in conflicts["header_changes"]: - old_header = header_change["old_header"] - new_header = header_change["new_header"] - resolved_content = resolved_content.replace(old_header, new_header) - - # Apply feature-specific customizations from requirements - joined_req = " ".join(requirements).lower() - - # Look for specific patterns in feature additions and apply them - for addition in feature_additions: - addition = addition.strip() - if not addition: - continue - - # If it's a new function or variable, try to add it - if addition.startswith(("static ", "int ", "void ", "const ", "struct ")): - # Check if this addition is not already in the resolved content - if addition not in resolved_content: - # Find a good place to insert it (after includes, before main functions) - if "#include" in resolved_content: - # Insert after the last include - lines = resolved_content.split('\n') - last_include_idx = -1 - for i, line in enumerate(lines): - if line.strip().startswith("#include"): - last_include_idx = i - if last_include_idx >= 0: - lines.insert(last_include_idx + 1, addition) - resolved_content = '\n'.join(lines) - else: - # Add at the beginning - resolved_content = addition + '\n' + resolved_content - - # If it's a modification to existing code, try to apply it - elif "=" in addition or "(" in addition: - # This is likely a function call or assignment modification - # Apply basic string replacement - if addition in feature_patch: - # Find the context and apply the change - pass - - # Apply requirement-specific customizations - if "1344" in feature_patch or "width" in joined_req: - # Apply width changes - resolved_content = re.sub( - r"width\s*=\s*1280", - "width = 1344", - resolved_content - ) - - if "clamp" in feature_patch or "height" in joined_req: - # Add height clamping if not present - if "clampH" not in resolved_content and "clamp" in feature_patch: - clamp_func = "static int clampH(int h) { return h < 480 ? 480 : h; }" - if "#include" in resolved_content: - lines = resolved_content.split('\n') - last_include_idx = -1 - for i, line in enumerate(lines): - if line.strip().startswith("#include"): - last_include_idx = i - if last_include_idx >= 0: - lines.insert(last_include_idx + 1, clamp_func) - resolved_content = '\n'.join(lines) - else: - resolved_content = clamp_func + '\n' + resolved_content - - # Write resolved content - write_text(output_path, resolved_content) - - return { - "status": "resolved", - "method": "heuristic", - "conflicts": conflicts, - "output_path": str(output_path) - } - - except Exception as e: - return { - "status": "error", - "method": "heuristic", - "error": str(e), - "conflicts": conflicts - } - - -def ai_direct_rebase( - feature_patches_dir: Path, - base_patches_dir: Path, - new_base_root: Path, - requirements_map_path: Path, - output_dir: Path -) -> Dict[str, Any]: - """Perform AI-based direct rebase of feature patches onto new base. - - This function: - 1. Copies new base to output directory - 2. For each file that has both feature and base patches: - - Analyzes conflicts between feature patch and base changes - - Uses AI to resolve conflicts and apply feature customizations - 3. Returns summary of resolution results - """ - - ensure_dir(output_dir) - - # Load requirements mapping - req_mappings = load_requirements_map(requirements_map_path) - - # Copy new base to output directory - copy_tree(new_base_root, output_dir) - - # Find all feature patch files (including subdirectories) - feature_patch_files = list(feature_patches_dir.rglob("*.patch")) - - results = { - "summary": {"total_files": 0, "resolved": 0, "errors": 0, "auto": 0, "semantic": 0, "conflicts": 0}, - "files": [], - "method": "ai_direct_rebase" - } - - for feature_patch_file in feature_patch_files: - # Get corresponding base patch file (maintain relative path structure) - relative_patch_path = feature_patch_file.relative_to(feature_patches_dir) - base_patch_file = base_patches_dir / relative_patch_path - - if not base_patch_file.exists(): - # No base patch for this file, apply feature patch directly - continue - - # Get the target file path (remove .patch extension and convert to relative path) - relative_file_path = feature_patch_file.relative_to(feature_patches_dir).with_suffix("") - target_file_path = output_dir / relative_file_path - - if not target_file_path.exists(): - continue - - # Get requirements for this file - req_ids = req_ids_for_file(relative_file_path, req_mappings) - requirements = requirement_texts_for_file(relative_file_path, req_mappings) - - results["summary"]["total_files"] += 1 - - # Resolve conflicts using AI - resolution_result = ai_resolve_file_conflicts( - feature_patch_file, - base_patch_file, - target_file_path, - requirements, - target_file_path - ) - - # Record result - file_result = { - "file": str(relative_file_path), - "status": resolution_result["status"], - "method": resolution_result["method"], - "req_ids": req_ids, - "conflicts": resolution_result.get("conflicts", {}), - "details": resolution_result.get("error", "Successfully resolved") - } - - results["files"].append(file_result) - - if resolution_result["status"] == "resolved": - results["summary"]["resolved"] += 1 - results["summary"]["auto"] += 1 # AI resolution counts as auto - else: - results["summary"]["errors"] += 1 - results["summary"]["conflicts"] += 1 - - # Save results - write_json(output_dir / "ai_rebase_results.json", results) - - # Also create retarget_results.json for compatibility with validation step - retarget_results = { - "summary": { - "auto": results["summary"]["auto"], - "semantic": results["summary"]["semantic"], - "conflicts": results["summary"]["conflicts"] - }, - "files": results["files"] - } - write_json(output_dir / "retarget_results.json", retarget_results) - - return results diff --git a/engine/core/ai_resolve.py b/engine/core/ai_resolve.py deleted file mode 100644 index 763d3dc..0000000 --- a/engine/core/ai_resolve.py +++ /dev/null @@ -1,151 +0,0 @@ -from __future__ import annotations - -import os -from pathlib import Path -from typing import Sequence - -from .utils import read_text, write_text - -# Load environment variables from .env file -try: - from dotenv import load_dotenv - load_dotenv() -except ImportError: - pass # python-dotenv not available, continue without it - - -def try_openai_resolve(rej_files: Sequence[Path], requirements: list[str]) -> dict[str, str]: - """Attempt to call OpenAI to resolve rejects using requirements as guidance. - - Returns dict of filepath -> suggested replacement content. - This is best-effort: if OPENAI_API_KEY or openai package missing, returns {}. - """ - api_key = os.getenv("OPENAI_API_KEY") - if not api_key: - return {} - try: - from openai import OpenAI # type: ignore - except Exception: - return {} - - # Handle different OpenAI library versions - try: - client = OpenAI(api_key=api_key) - except Exception as e: - # Fallback for older versions or different API - try: - import openai - openai.api_key = api_key - client = openai - except Exception: - return {} - suggestions: dict[str, str] = {} - prompt_parts = [ - "You are an expert software engineer helping with automated patch application.", - "A patch failed to apply because the target file has changed (e.g., API renamed from OldAPI to NewAPI).", - "Your task: Apply the feature customizations from the rejected patch to the current file, adapting for any API changes.", - "Key principles:", - "1. Preserve all feature customizations (parameter changes, logging, etc.)", - "2. Adapt to API changes (e.g., OldAPI -> NewAPI)", - "3. Maintain the same functionality but with updated APIs", - "4. Respond with ONLY the corrected file content, no explanations" - ] - req_text = "\n".join(f"- {r}" for r in requirements) - for rej in rej_files: - # Target file path is the .rej without extension - target = rej.with_suffix("") - before = target.read_text(encoding="utf-8") if target.exists() else "" - rej_text = rej.read_text(encoding="utf-8") - prompt = "\n\n".join([ - "\n".join(prompt_parts), - f"REQUIREMENTS:\n{req_text}", - f"REJECTED PATCH (shows what feature wanted to change):\n{rej_text}", - f"CURRENT FILE CONTENT (what we need to modify):\n{before}", - "TASK: Apply the feature changes from the rejected patch to the current file, adapting for any API renames or changes." - ]) - try: - # Handle different OpenAI library versions - if hasattr(client, 'chat') and hasattr(client.chat, 'completions'): - # New API (v1.0+) - resp = client.chat.completions.create( - model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"), - messages=[{"role": "user", "content": prompt}], - temperature=0 - ) - content = resp.choices[0].message.content or "" - else: - # Old API (pre-v1.0) - resp = client.ChatCompletion.create( - model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"), - messages=[{"role": "user", "content": prompt}], - temperature=0 - ) - content = resp.choices[0].message.content or "" - - if content.strip(): - suggestions[str(target)] = content - except Exception: - continue - return suggestions - - -def heuristic_resolve(rej_files: Sequence[Path], requirements: list[str]) -> dict[str, str]: - """Enhanced heuristic for sample code: handles API renames and parameter changes. - - If requirement mentions 'pass 200', changes API calls to use 200 as parameter. - Also handles common API renames like OldAPI -> NewAPI. - Returns dict of filepath -> suggested replacement content. - """ - joined_req = " ".join(requirements).lower() - suggestions: dict[str, str] = {} - for rej in rej_files: - target = rej.with_suffix("") - try: - txt = target.read_text(encoding="utf-8") - except Exception: - txt = "" - - if "pass 200" in joined_req and target.name.endswith(".cpp"): - import re - - # First, try to apply the feature customization (change parameter to 200) - def repl_param(m): - return f"{m.group(1)}200{m.group(3)}" - - new = re.sub(r"(\b[A-Za-z_][A-Za-z0-9_]*\s*\(\s*)(\d+)(\s*\))", repl_param, txt) - - # If we found a change, also add the feature logging - if new != txt: - # Add feature logging before the API call - new = re.sub( - r"(int main\(\) \{\s*)(.*?)(\s*return 0;\s*\})", - r"\1// Feature customization: different value and extra log\n std::cout << \"Feature activated\" << std::endl;\n \2\3", - new, - flags=re.DOTALL - ) - - if new.strip(): - suggestions[str(target)] = new - return suggestions - - -def resolve_rejects(rej_files: Sequence[Path], requirement_texts: list[str]) -> list[Path]: - """Resolve rejects using OpenAI if available, otherwise simple heuristics. - - Applies suggested content directly to target files. Returns the list of rej files remaining. - """ - suggestions = try_openai_resolve(rej_files, requirement_texts) - if not suggestions: - suggestions = heuristic_resolve(rej_files, requirement_texts) - for path_str, content in suggestions.items(): - try: - write_text(Path(path_str), content) - # remove corresponding .rej if present - rej = Path(path_str + ".rej") - if rej.exists(): - rej.unlink() - except Exception: - continue - # Return still-existing rejects - return [p for p in rej_files if p.exists()] - diff --git a/engine/core/base_extract.py b/engine/core/base_extract.py deleted file mode 100644 index c695b5e..0000000 --- a/engine/core/base_extract.py +++ /dev/null @@ -1,68 +0,0 @@ -from __future__ import annotations - -import subprocess -from pathlib import Path -from typing import Any, Dict - - -def extract_base(old_base: Path, new_base: Path) -> Dict[str, Any]: - """Compute ฮ”B using git patches and return a simplified delta map.""" - - delta: Dict[str, Any] = { - "git_patches": {}, - } - - # Get all files that differ between old_base and new_base - try: - # Use git diff to find all changed files - result = subprocess.run( - ["git", "diff", "--no-index", "--name-only", str(old_base), str(new_base)], - capture_output=True, - text=True, - cwd=Path.cwd() - ) - - if result.returncode != 0 and result.returncode != 1: # 1 is expected for differences - return delta - - changed_files = [line.strip() for line in result.stdout.split('\n') if line.strip()] - - # For each changed file, generate a patch - for file_path in changed_files: - try: - # Skip /dev/null entries - if file_path == "/dev/null": - continue - - # Convert absolute path to relative path - if file_path.startswith(str(new_base)): - relative_path = Path(file_path).relative_to(new_base) - elif file_path.startswith(str(old_base)): - relative_path = Path(file_path).relative_to(old_base) - else: - # Try to extract relative path from the full path - relative_path = Path(file_path) - - # Generate patch for this specific file - patch_result = subprocess.run( - ["git", "diff", "--no-index", str(old_base / relative_path), str(new_base / relative_path)], - capture_output=True, - text=True, - cwd=Path.cwd() - ) - - if patch_result.returncode in [0, 1]: # 0 = no diff, 1 = differences found - patch_content = patch_result.stdout - if patch_content.strip(): # Only add if there's actual content - delta["git_patches"][str(relative_path)] = patch_content - - except Exception: - # Skip files that can't be processed - continue - - except Exception: - # Fallback: return empty delta if git diff fails - pass - - return delta - diff --git a/engine/core/config.py b/engine/core/config.py deleted file mode 100644 index eda4ac3..0000000 --- a/engine/core/config.py +++ /dev/null @@ -1,42 +0,0 @@ -from __future__ import annotations - -import json -from dataclasses import dataclass -from datetime import datetime -from pathlib import Path -from typing import Optional - -from .utils import write_json - - -@dataclass -class RunManifest: - """Metadata for a single auto-rebase run.""" - - run_id: str - old_base: str - new_base: str - feature_old: str - req_map: str - workdir: str - created_at: str - - -def new_run_manifest(old_base: Path, new_base: Path, feature: Path, req_map: Path, workdir: Path) -> RunManifest: - run_id = datetime.utcnow().strftime("%Y%m%dT%H%M%S") - return RunManifest( - run_id=run_id, - old_base=str(old_base), - new_base=str(new_base), - feature_old=str(feature), - req_map=str(req_map), - workdir=str(workdir), - created_at=datetime.utcnow().isoformat() + "Z", - ) - - -def persist_manifest(manifest: RunManifest, artifacts_dir: Path) -> Path: - out = artifacts_dir / "run.json" - write_json(out, manifest.__dict__) - return out - diff --git a/engine/core/diff_graph.py b/engine/core/diff_graph.py deleted file mode 100644 index 5e8b88a..0000000 --- a/engine/core/diff_graph.py +++ /dev/null @@ -1,26 +0,0 @@ -from __future__ import annotations - -from pathlib import Path -from typing import Dict, List, Tuple - - -class DiffGraph: - """Minimal cross-file change graph. - - Tracks file-level adds/modifies and a simple name-based move detection heuristic. - """ - - def __init__(self) -> None: - self.nodes: Dict[str, Dict] = {} - self.edges: List[Tuple[str, str, str]] = [] # (src, dst, kind) - - def add_file(self, path: str, status: str) -> None: - self.nodes[path] = {"status": status} - - def add_edge(self, src: str, dst: str, kind: str) -> None: - self.edges.append((src, dst, kind)) - - def export_anchor_map(self) -> Dict[str, str]: - """Export a trivial anchor map of old->new file paths for moves/renames.""" - return {src: dst for src, dst, kind in self.edges if kind in {"moved", "renamed"}} - diff --git a/engine/core/diff_types.py b/engine/core/diff_types.py deleted file mode 100644 index 1242ac8..0000000 --- a/engine/core/diff_types.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - -from pathlib import Path -from typing import Literal, Protocol, TypedDict, Optional, Any - - -class PatchUnit(TypedDict, total=False): - """A unit of change for a single file using git patches. - - - file_path: relative path of the file the patch applies to - - patch_content: git patch content for this file - - req_ids: requirement IDs attached via traceability - - notes: optional note - """ - - file_path: str - patch_content: str - req_ids: list[str] - notes: Optional[str] - requirements: list[str] - - -class Conflict(TypedDict): - file_path: str - reason: str - details: str - - -class ApplyResult(TypedDict): - file_path: str - status: Literal["applied", "partial", "conflict"] - details: str - - -class ValidationIssue(TypedDict): - file_path: str - level: Literal["info", "warning", "error"] - message: str - - -# Adapter protocol removed - all file types handled uniformly with git patches diff --git a/engine/core/feature_extract.py b/engine/core/feature_extract.py deleted file mode 100644 index 39e40eb..0000000 --- a/engine/core/feature_extract.py +++ /dev/null @@ -1,76 +0,0 @@ -from __future__ import annotations - -import subprocess -from pathlib import Path -from typing import Dict, List - -from .diff_types import PatchUnit -from .utils import list_files, rel_to -from .traceability import load_requirements_map, req_ids_for_file, requirement_texts_for_file - - -def extract_feature(old_base: Path, feature: Path, req_map_path: Path) -> List[PatchUnit]: - """Extract ฮ”F patch units from feature vs old_base using git patches.""" - - mappings = load_requirements_map(req_map_path) - units: List[PatchUnit] = [] - - # Get all files that differ between old_base and feature - try: - # Use git diff to find all changed files - result = subprocess.run( - ["git", "diff", "--no-index", "--name-only", str(old_base), str(feature)], - capture_output=True, - text=True, - cwd=Path.cwd() - ) - - if result.returncode not in [0, 1]: # 0 = no diff, 1 = differences found - return units - - changed_files = [line.strip() for line in result.stdout.split('\n') if line.strip()] - - # For each changed file, generate a patch - for file_path in changed_files: - try: - # Skip /dev/null entries - if file_path == "/dev/null": - continue - - # Convert absolute path to relative path - if file_path.startswith(str(feature)): - relative_path = Path(file_path).relative_to(feature) - elif file_path.startswith(str(old_base)): - relative_path = Path(file_path).relative_to(old_base) - else: - # Try to extract relative path from the full path - relative_path = Path(file_path) - - # Generate patch for this specific file - patch_result = subprocess.run( - ["git", "diff", "--no-index", str(old_base / relative_path), str(feature / relative_path)], - capture_output=True, - text=True, - cwd=Path.cwd() - ) - - if patch_result.returncode in [0, 1]: # 0 = no diff, 1 = differences found - patch_content = patch_result.stdout - if patch_content.strip(): # Only add if there's actual content - unit: PatchUnit = { - "file_path": str(relative_path), - "patch_content": patch_content, - "req_ids": req_ids_for_file(str(relative_path), mappings), - "requirements": requirement_texts_for_file(str(relative_path), mappings) - } - units.append(unit) - - except Exception: - # Skip files that can't be processed - continue - - except Exception: - # Fallback: return empty list if git diff fails - pass - - return units diff --git a/engine/core/file_conflict_resolver.py b/engine/core/file_conflict_resolver.py deleted file mode 100644 index ff51b23..0000000 --- a/engine/core/file_conflict_resolver.py +++ /dev/null @@ -1,412 +0,0 @@ -""" -File Conflict Resolver - Resolves individual file conflicts using OpenAI -""" - -from __future__ import annotations - -import os -import re -import yaml -from pathlib import Path -from typing import Dict, List, Optional, Tuple - -# Load environment variables from .env file -try: - from dotenv import load_dotenv - load_dotenv() -except ImportError: - pass # python-dotenv not available, continue without it - - -def load_requirements_map(requirements_file: str | Path) -> List[Dict[str, any]]: - """Load requirements map from YAML file.""" - try: - with open(requirements_file, 'r') as f: - return yaml.safe_load(f) or [] - except Exception as e: - print(f"Warning: Could not load requirements file {requirements_file}: {e}") - return [] - - -def find_requirement_for_file(file_path: str | Path, requirements_map: List[Dict[str, any]]) -> Optional[str]: - """Find the requirement text for a specific file path.""" - file_path_str = str(file_path) - file_path_obj = Path(file_path_str) - - # Remove .orig and .rej suffixes to get the base filename - base_filename = file_path_obj.name - for suffix in ['.orig', '.rej']: - if base_filename.endswith(suffix): - base_filename = base_filename[:-len(suffix)] - break - - # Get the relative path without .orig/.rej suffixes - # e.g., "/path/to/src/main.cpp.orig" -> "src/main.cpp" - base_file_path = str(file_path_obj.parent / base_filename) - - # Try exact path match first - for req in requirements_map: - req_path = req.get('path') - if req_path: - # Try exact match with the base path (without .orig/.rej) - if req_path == base_file_path: - return req.get('requirement') - # Try matching just the filename (e.g., "main.cpp" matches "main.cpp") - if Path(req_path).name == base_filename: - return req.get('requirement') - # Try relative path match (e.g., "src/main.cpp" matches "/full/path/src/main.cpp") - if base_file_path.endswith(req_path): - return req.get('requirement') - - # Try glob pattern match - for req in requirements_map: - path_glob = req.get('path_glob') - if path_glob: - import fnmatch - # Try matching against the base path (without .orig/.rej) - if fnmatch.fnmatch(base_file_path, path_glob): - return req.get('requirement') - # Try matching against just the base filename - if fnmatch.fnmatch(base_filename, path_glob): - return req.get('requirement') - # Try matching against path components - for part in file_path_obj.parts: - if fnmatch.fnmatch(part, path_glob): - return req.get('requirement') - - return None - - -def resolve_file_conflict_with_openai( - original_file_path: str | Path, - rejection_file_path: str | Path, - requirements_file: str | Path, - verbose: bool = False -) -> Dict[str, any]: - """ - Resolve a single file conflict using OpenAI based on requirements from YAML file. - - Args: - original_file_path: Path to the original file (current state) - rejection_file_path: Path to the .rej file (desired changes that failed) - requirements_file: Path to the requirements_map.yaml file - verbose: Whether to print detailed information - - Returns: - Dict with resolution results: - { - "success": bool, - "resolved_content": str | None, - "explanation": str, - "conflict_type": str, - "changes_applied": List[str], - "requirement_used": str | None - } - """ - - # Load requirements map - requirements_map = load_requirements_map(requirements_file) - - # Find requirement for the original file - requirement_text = find_requirement_for_file(original_file_path, requirements_map) - - if not requirement_text: - return { - "success": False, - "resolved_content": None, - "explanation": f"No requirement found for file: {original_file_path}", - "conflict_type": "no_requirement", - "changes_applied": [], - "requirement_used": None - } - - if verbose: - print(f"๐Ÿ“‹ Found requirement: {requirement_text}") - - # Read the files - try: - original_content = Path(original_file_path).read_text() - rejection_content = Path(rejection_file_path).read_text() - except Exception as e: - return { - "success": False, - "resolved_content": None, - "explanation": f"Failed to read files: {e}", - "conflict_type": "file_read_error", - "changes_applied": [], - "requirement_used": requirement_text - } - - # Analyze the conflict - conflict_analysis = analyze_conflict(original_content, rejection_content) - - if verbose: - print(f"๐Ÿ” Conflict Analysis:") - print(f" Type: {conflict_analysis['type']}") - print(f" Description: {conflict_analysis['description']}") - print(f" Changes: {conflict_analysis['changes']}") - - # Create OpenAI prompt - prompt = create_file_conflict_prompt( - original_content=original_content, - rejection_content=rejection_content, - requirement_text=requirement_text, - conflict_analysis=conflict_analysis - ) - - if verbose: - print(f"๐Ÿค– Sending prompt to OpenAI...") - - # Call OpenAI - try: - response = call_openai_for_file_resolution(prompt) - - if response["success"]: - resolved_content = response["content"] - - # Validate the resolution - validation = validate_resolution( - original_content, rejection_content, resolved_content, requirement_text - ) - - return { - "success": True, - "resolved_content": resolved_content, - "explanation": response["explanation"], - "conflict_type": conflict_analysis["type"], - "changes_applied": validation["changes_applied"], - "validation_score": validation["score"], - "requirement_used": requirement_text - } - else: - return { - "success": False, - "resolved_content": None, - "explanation": f"OpenAI resolution failed: {response['error']}", - "conflict_type": conflict_analysis["type"], - "changes_applied": [], - "requirement_used": requirement_text - } - - except Exception as e: - return { - "success": False, - "resolved_content": None, - "explanation": f"OpenAI call failed: {e}", - "conflict_type": conflict_analysis["type"], - "changes_applied": [], - "requirement_used": requirement_text - } - - -def analyze_conflict(original_content: str, rejection_content: str) -> Dict[str, any]: - """Analyze the type and nature of the conflict.""" - - # Extract the patch content from rejection file - patch_match = re.search(r'@@.*?@@\n(.*)', rejection_content, re.DOTALL) - if not patch_match: - return { - "type": "unknown", - "description": "Could not parse rejection file", - "changes": [] - } - - patch_content = patch_match.group(1) - lines = patch_content.strip().split('\n') - - changes = [] - for line in lines: - if line.startswith('-'): - changes.append(f"Remove: {line[1:]}") - elif line.startswith('+'): - changes.append(f"Add: {line[1:]}") - - # Determine conflict type - conflict_type = "content_change" - description = "Content modification conflict" - - if any("API" in change for change in changes): - conflict_type = "api_change" - description = "API function call or signature change" - elif any("include" in change.lower() for change in changes): - conflict_type = "header_change" - description = "Header/include file change" - elif any("main" in change.lower() for change in changes): - conflict_type = "main_function_change" - description = "Main function modification" - - return { - "type": conflict_type, - "description": description, - "changes": changes, - "patch_content": patch_content - } - - -def create_file_conflict_prompt( - original_content: str, - rejection_content: str, - requirement_text: str, - conflict_analysis: Dict[str, any] -) -> str: - """Create a detailed prompt for OpenAI to resolve the conflict.""" - - prompt = f"""You are an expert software engineer resolving a git merge conflict. - -REQUIREMENT: -{requirement_text} - -CONFLICT ANALYSIS: -- Type: {conflict_analysis['type']} -- Description: {conflict_analysis['description']} -- Changes needed: {conflict_analysis['changes']} - -CURRENT FILE CONTENT: -``` -{original_content} -``` - -DESIRED CHANGES (from rejection file): -``` -{conflict_analysis['patch_content']} -``` - -TASK: -Resolve this conflict by applying the desired changes to the current file content while respecting the requirement. The requirement takes precedence over the exact patch content. - -RULES: -1. Preserve the overall structure and functionality of the code -2. Apply the changes specified in the rejection file -3. Ensure the requirement is satisfied -4. Maintain code quality and readability -5. If there are API changes (like OldAPI -> NewAPI), update accordingly -6. If there are parameter changes (like 42 -> 200), apply them as specified in the requirement - -RESPONSE FORMAT: -Provide ONLY the resolved file content, no explanations or markdown formatting. The content should be ready to use as the final file. - -RESOLVED FILE CONTENT:""" - - return prompt - - -def call_openai_for_file_resolution(prompt: str) -> Dict[str, any]: - """Call OpenAI to resolve the file conflict.""" - - api_key = os.getenv('OPENAI_API_KEY') - if not api_key: - return { - "success": False, - "content": None, - "explanation": "OpenAI API key not found in environment", - "error": "Missing API key" - } - - try: - import openai - - # Use the older API (v0.28.1) - openai.api_key = api_key - response = openai.ChatCompletion.create( - model="gpt-4o-mini", - messages=[{"role": "user", "content": prompt}], - max_tokens=2000, - temperature=0.1 - ) - content = response.choices[0].message.content - - return { - "success": True, - "content": content.strip(), - "explanation": "Successfully resolved conflict using OpenAI", - "error": None - } - - except Exception as e: - return { - "success": False, - "content": None, - "explanation": f"OpenAI API call failed: {e}", - "error": str(e) - } - - -def validate_resolution( - original_content: str, - rejection_content: str, - resolved_content: str, - requirement_text: str -) -> Dict[str, any]: - """Validate that the resolution correctly applies the changes.""" - - changes_applied = [] - score = 0 - - # Check if requirement-specific changes are applied - if "200" in requirement_text and "200" in resolved_content: - changes_applied.append("Applied requirement value (200)") - score += 1 - - # Check if API changes are applied - if "NewAPI" in resolved_content: - changes_applied.append("Updated to NewAPI") - score += 1 - - # Check if feature logging is preserved - if "Feature activated" in resolved_content: - changes_applied.append("Preserved feature logging") - score += 1 - - # Check if the file structure is maintained - if "int main()" in resolved_content and "#include " in resolved_content: - changes_applied.append("Maintained file structure") - score += 1 - - return { - "changes_applied": changes_applied, - "score": score, - "max_score": 4 - } - - -def test_file_conflict_resolver(): - """Test the file conflict resolver with the provided example.""" - - # Test with the provided files - original_file = "/Users/dhruvildarji/Documents/git/project/AutoRebase/artifacts/run20250913_134310/feature-5.1/src/main.cpp.orig" - rejection_file = "/Users/dhruvildarji/Documents/git/project/AutoRebase/artifacts/run20250913_134310/feature-5.1/src/main.cpp.rej" - requirements_file = "/Users/dhruvildarji/Documents/git/project/AutoRebase/data/sample/requirements_map.yaml" - - print("๐Ÿงช Testing File Conflict Resolver...") - print(f"๐Ÿ“ Original file: {original_file}") - print(f"๐Ÿšซ Rejection file: {rejection_file}") - print(f"๐Ÿ“‹ Requirements file: {requirements_file}") - print() - - result = resolve_file_conflict_with_openai( - original_file_path=original_file, - rejection_file_path=rejection_file, - requirements_file=requirements_file, - verbose=True - ) - - print("๐Ÿ“Š RESULT:") - print(f" Success: {result['success']}") - print(f" Conflict Type: {result['conflict_type']}") - print(f" Explanation: {result['explanation']}") - print(f" Changes Applied: {result['changes_applied']}") - - if result['success'] and result['resolved_content']: - print("\nโœ… RESOLVED CONTENT:") - print(result['resolved_content']) - - # Save the resolved content - output_file = "/Users/dhruvildarji/Documents/git/project/AutoRebase/resolved_main.cpp" - Path(output_file).write_text(result['resolved_content']) - print(f"\n๐Ÿ’พ Saved resolved content to: {output_file}") - - return result - - -if __name__ == "__main__": - test_file_conflict_resolver() diff --git a/engine/core/report.py b/engine/core/report.py deleted file mode 100644 index 0d88721..0000000 --- a/engine/core/report.py +++ /dev/null @@ -1,80 +0,0 @@ -from __future__ import annotations - -import json -from pathlib import Path -from typing import Any, Dict - -from jinja2 import Template -from jsonschema import validate as jsonschema_validate - -from .utils import write_json - - -HTML_TEMPLATE = """ - - - - - Auto-Rebase Report - - - -

Auto-Rebase Report

-

Run ID: {{ run_id }}

-

Summary

-
    -
  • Auto-merged: {{ summary.auto }}
  • -
  • Semantic: {{ summary.semantic }}
  • -
  • Conflicts: {{ summary.conflicts }}
  • -
-

Files

- - - - {% for f in files %} - - - - - - - {% endfor %} - -
FileStatusDetailsReq IDs
{{ f.file }}{{ f.status }}{{ f.details }}{{ ','.join(f.req_ids) }}
-

Validation

-

Status: {% if validation.success %}PASS{% else %}FAIL{% endif %}

-
    - {% for i in validation.issues %} -
  • [{{ i.level }}] {{ i.file_path }} - {{ i.message }}
  • - {% endfor %} -
-

Tool Availability

-
    - {% for k, v in tools.items() %} -
  • {{ k }}: {{ v }}
  • - {% endfor %} -
- - -""" - - -def generate(run_id: str, outcomes: Dict[str, Any], validation: Dict[str, Any], tools: Dict[str, Any], report_json_path: Path, report_html_path: Path, schema: Dict[str, Any]) -> None: - """Generate report.json and report.html and validate JSON against schema.""" - - report = {"run_id": run_id, "summary": outcomes.get("summary", {}), "files": outcomes.get("files", []), "validation": validation, "tools": tools} - jsonschema_validate(report, schema) - write_json(report_json_path, report) - html = Template(HTML_TEMPLATE).render(**report) - report_html_path.parent.mkdir(parents=True, exist_ok=True) - report_html_path.write_text(html, encoding="utf-8") - diff --git a/engine/core/retarget.py b/engine/core/retarget.py deleted file mode 100644 index ae1bbc0..0000000 --- a/engine/core/retarget.py +++ /dev/null @@ -1,80 +0,0 @@ -from __future__ import annotations - -import subprocess -from pathlib import Path -from typing import Any, Dict, List - -from .diff_types import PatchUnit, ApplyResult -from .utils import ensure_dir, write_json - - -def retarget(patches: List[PatchUnit], base_delta: Dict[str, Any], new_base_root: Path, out_dir: Path) -> Dict[str, Any]: - """Apply a sequence of PatchUnits to new_base_root into out_dir using git patches. - - Returns per-file outcomes and summary stats. - """ - - ensure_dir(out_dir) - outcomes: List[Dict[str, Any]] = [] - auto = sem = conflicts = 0 - - for p in patches: - file_path = p["file_path"] - patch_content = p["patch_content"] - - # Copy the file from new_base to out_dir - target_path = out_dir / file_path - base_path = new_base_root / file_path - - if base_path.exists(): - target_path.parent.mkdir(parents=True, exist_ok=True) - target_path.write_bytes(base_path.read_bytes()) - - # Try to apply the patch using git apply - try: - # Write patch to temporary file - patch_file = out_dir / f".{file_path.replace('/', '_')}.patch" - patch_file.write_text(patch_content) - - # Apply the patch - result = subprocess.run( - ["git", "apply", "--reject", str(patch_file)], - cwd=out_dir, - capture_output=True, - text=True - ) - - # Clean up patch file - patch_file.unlink(missing_ok=True) - - if result.returncode == 0: - # Patch applied successfully - status = "applied" - details = "git apply successful" - auto += 1 - else: - # Check for reject files - reject_files = list(out_dir.rglob("*.rej")) - if reject_files: - status = "conflict" - details = f"git apply failed, {len(reject_files)} reject files" - conflicts += 1 - else: - status = "partial" - details = "git apply failed but no rejects" - sem += 1 - - except Exception as e: - status = "conflict" - details = f"Error applying patch: {str(e)}" - conflicts += 1 - - outcomes.append({ - "file": file_path, - "status": status, - "details": details, - "req_ids": p.get("req_ids", []) - }) - - summary = {"auto": auto, "semantic": sem, "conflicts": conflicts} - return {"summary": summary, "files": outcomes} diff --git a/engine/core/traceability.py b/engine/core/traceability.py deleted file mode 100644 index cf0f9a4..0000000 --- a/engine/core/traceability.py +++ /dev/null @@ -1,44 +0,0 @@ -from __future__ import annotations - -import fnmatch -from dataclasses import dataclass -from pathlib import Path -from typing import Dict, List - -import yaml - - -@dataclass -class ReqMapping: - path_glob: str - req_ids: list[str] - requirement: str | None = None - - -def load_requirements_map(path: Path) -> list[ReqMapping]: - """Load requirements mapping YAML file.""" - - data = yaml.safe_load(path.read_text(encoding="utf-8")) or [] - mappings: list[ReqMapping] = [] - for item in data: - patt = item.get("path_glob") or item.get("path") or "**/*" - mappings.append(ReqMapping(path_glob=patt, req_ids=list(item.get("req_ids", [])), requirement=item.get("requirement"))) - return mappings - - -def req_ids_for_file(rel_path: str, mappings: list[ReqMapping]) -> list[str]: - """Return all requirement IDs matching the file path via globs.""" - - ids: list[str] = [] - for m in mappings: - if fnmatch.fnmatch(rel_path, m.path_glob): - ids.extend(m.req_ids) - return sorted(set(ids)) - - -def requirement_texts_for_file(rel_path: str, mappings: list[ReqMapping]) -> list[str]: - texts: list[str] = [] - for m in mappings: - if fnmatch.fnmatch(rel_path, m.path_glob) and m.requirement: - texts.append(m.requirement) - return texts diff --git a/engine/core/utils.py b/engine/core/utils.py deleted file mode 100644 index 4a5ff0e..0000000 --- a/engine/core/utils.py +++ /dev/null @@ -1,103 +0,0 @@ -from __future__ import annotations - -import json -import logging -import os -import subprocess -from dataclasses import asdict -import shutil -from pathlib import Path -from typing import Any, Iterable, Optional - - -LOG = logging.getLogger("auto_rebase") - - -def setup_logging(log_path: Optional[Path] = None, verbose: bool = False) -> None: - """Configure logging. If log_path is provided, also log to file. - - Avoid secrets in logs; INFO default, DEBUG if verbose. - """ - - level = logging.DEBUG if verbose else logging.INFO - logging.basicConfig(level=level, format="%(asctime)s %(levelname)s %(message)s") - if log_path: - log_path.parent.mkdir(parents=True, exist_ok=True) - fh = logging.FileHandler(log_path) - fh.setLevel(level) - fh.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) - logging.getLogger().addHandler(fh) - - -def which(cmd: str) -> Optional[str]: - """Return full path if command exists in PATH, else None.""" - - from shutil import which as _which - - return _which(cmd) - - -def run_cmd(args: list[str], cwd: Optional[Path] = None, check: bool = True) -> tuple[int, str, str]: - """Run a subprocess and capture output. Returns (code, stdout, stderr).""" - - LOG.debug("Running: %s", " ".join(args)) - proc = subprocess.Popen(args, cwd=str(cwd) if cwd else None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - out, err = proc.communicate() - if check and proc.returncode != 0: - raise RuntimeError(f"Command failed: {' '.join(args)}\n{err}") - return proc.returncode, out, err - - -def read_text(path: Path) -> str: - return path.read_text(encoding="utf-8") - - -def write_text(path: Path, data: str) -> None: - path.parent.mkdir(parents=True, exist_ok=True) - path.write_text(data, encoding="utf-8") - - -def write_json(path: Path, data: Any) -> None: - path.parent.mkdir(parents=True, exist_ok=True) - path.write_text(json.dumps(data, indent=2, sort_keys=True), encoding="utf-8") - - -def list_files(root: Path) -> list[Path]: - return [p for p in root.rglob("*") if p.is_file()] - - -def rel_to(path: Path, base: Path) -> str: - return str(path.relative_to(base)) - - -def safe_copy(src: Path, dst: Path) -> None: - dst.parent.mkdir(parents=True, exist_ok=True) - dst.write_bytes(src.read_bytes()) - - -def ensure_dir(path: Path) -> None: - path.mkdir(parents=True, exist_ok=True) - - -def dump_dataclass_json(path: Path, obj: Any) -> None: - write_json(path, asdict(obj)) - - -def copy_tree(src: Path, dst: Path) -> None: - """Copy a directory tree from src to dst (overwrite).""" - if dst.exists(): - # remove existing to avoid stale files; best-effort - for p in list(dst.rglob("*")): - try: - if p.is_file() or p.is_symlink(): - p.unlink() - except Exception: - pass - for p in src.rglob("*"): - rel = p.relative_to(src) - out = dst / rel - if p.is_dir(): - out.mkdir(parents=True, exist_ok=True) - else: - out.parent.mkdir(parents=True, exist_ok=True) - shutil.copy2(p, out) diff --git a/engine/core/validate.py b/engine/core/validate.py deleted file mode 100644 index c953a3e..0000000 --- a/engine/core/validate.py +++ /dev/null @@ -1,53 +0,0 @@ -from __future__ import annotations - -from pathlib import Path -from typing import Any, Dict, List - -from .diff_types import ValidationIssue -from .utils import which - - -def validate(target_root: Path, build_script: str | None = None) -> Dict[str, Any]: - """Run basic validation checks on the target directory.""" - - issues: List[ValidationIssue] = [] - - # Basic file system validation - if not target_root.exists(): - issues.append({"file_path": "", "level": "error", "message": "Target directory does not exist"}) - return {"success": False, "issues": issues} - - # Check for common issues - reject_files = list(target_root.rglob("*.rej")) - if reject_files: - for reject_file in reject_files: - issues.append({ - "file_path": str(reject_file.relative_to(target_root)), - "level": "warning", - "message": "Reject file found - patch conflicts not resolved" - }) - - # Check for empty files - empty_files = [] - for file_path in target_root.rglob("*"): - if file_path.is_file() and file_path.stat().st_size == 0: - empty_files.append(str(file_path.relative_to(target_root))) - - if empty_files: - issues.append({ - "file_path": "", - "level": "warning", - "message": f"Found {len(empty_files)} empty files: {', '.join(empty_files[:5])}" - }) - - # Build script validation (placeholder) - if build_script: - issues.append({ - "file_path": "", - "level": "info", - "message": f"Build script '{build_script}' validation skipped in simplified mode" - }) - - success = not any(i for i in issues if i["level"] == "error") - return {"success": success, "issues": issues} - diff --git a/engine/core/vcs.py b/engine/core/vcs.py deleted file mode 100644 index 08aea66..0000000 --- a/engine/core/vcs.py +++ /dev/null @@ -1,137 +0,0 @@ -from __future__ import annotations - -from pathlib import Path -from typing import Optional -import os -import tempfile -import difflib - -from .utils import which, run_cmd, list_files, rel_to, ensure_dir - - -def commit_and_tag(path: Path, tag: str, trailers: dict[str, str]) -> None: - """Create a git commit with trailers and tag it. Best-effort if git present.""" - - if not which("git"): - return - try: - run_cmd(["git", "init"], cwd=path) - run_cmd(["git", "add", "."], cwd=path) - message = "Auto-Rebase finalize\n\n" + "\n".join(f"{k}: {v}" for k, v in trailers.items()) - run_cmd(["git", "commit", "-m", message], cwd=path) - run_cmd(["git", "tag", tag], cwd=path) - except Exception: - # Best-effort only - pass - - -def git_diff_no_index(old: Path, new: Path, out_patch: Path) -> None: - """Create a unified diff between two directories using git --no-index. - - To get stable, relative paths that apply with -p1, we symlink both trees - into a temp dir as 'a' and 'b' and diff those. - """ - if not which("git"): - raise RuntimeError("git not available for diff generation") - with tempfile.TemporaryDirectory() as td: - tdp = Path(td) - a = tdp / "a" - b = tdp / "b" - try: - os.symlink(old.resolve(), a) - os.symlink(new.resolve(), b) - except Exception: - # Fallback: create directories and copy minimal structure - a.mkdir() - b.mkdir() - code, out, err = run_cmd(["git", "diff", "--no-index", "a", "b"], cwd=tdp, check=False) - # git diff exits 1 when there are differences; treat 0/1 as success - if code not in (0, 1): - raise RuntimeError(f"git diff failed: {err}") - out_patch.parent.mkdir(parents=True, exist_ok=True) - out_patch.write_text(out, encoding="utf-8") - - -def git_apply_reject(patch_path: Path, target_dir: Path, strip: int = 1) -> None: - """Apply a patch to target_dir using git apply with --reject. - - strip controls -pN path stripping (defaults to 1 for a/ and b/). - Generates .rej files for rejected hunks. - """ - if not which("git"): - raise RuntimeError("git not available for patch apply") - args = ["git", "apply", f"-p{strip}", "--reject", "--no-3way", str(patch_path)] - code, out, err = run_cmd(args, cwd=target_dir, check=False) - if code != 0: - # Even with rejects, git apply may return non-zero; continue but surface stderr in logs - pass - - -def unified_diff_text(a_path: Path, b_path: Path, rel: str | None = None) -> str: - """Return unified diff between two files using `diff -u` if available, else difflib. - - Returns empty string if files are identical. If one side is missing, uses /dev/null when shelling - out, or difflib with empty content. - """ - has_diff = bool(which("diff")) - a_exists = a_path.exists() - b_exists = b_path.exists() - if has_diff and (a_exists or b_exists): - a_arg = str(a_path) if a_exists else "/dev/null" - b_arg = str(b_path) if b_exists else "/dev/null" - code, out, err = run_cmd(["diff", "-u", a_arg, b_arg], check=False) - if code == 0: - return "" - if code in (1,): - # Replace absolute paths with relative paths in the diff output - if rel: - out = out.replace(str(a_path), f"a/{rel}") - out = out.replace(str(b_path), f"b/{rel}") - return out - # On other failures, fall back to difflib - a_txt = a_path.read_text(encoding="utf-8") if a_exists else "" - b_txt = b_path.read_text(encoding="utf-8") if b_exists else "" - fromfile = f"a/{rel or a_path.name}" - tofile = f"b/{rel or b_path.name}" - return "".join(difflib.unified_diff(a_txt.splitlines(True), b_txt.splitlines(True), fromfile=fromfile, tofile=tofile)) - - -def generate_per_file_patches(old_root: Path, new_root: Path, out_dir: Path) -> list[Path]: - """Generate per-file unified diff patches under out_dir mirroring the tree structure. - - - For files only in new_root, diff /dev/null vs new file (additions) - - For files only in old_root, diff old vs /dev/null (deletions) - - For files in both, diff their contents - Writes each patch as `/.patch` if there is a difference. - Returns list of written patch paths. - """ - out_dir.mkdir(parents=True, exist_ok=True) - old_files = {rel_to(p, old_root): p for p in list_files(old_root)} - new_files = {rel_to(p, new_root): p for p in list_files(new_root)} - rels = sorted(set(old_files.keys()) | set(new_files.keys())) - written: list[Path] = [] - for rel in rels: - a = old_files.get(rel, old_root / rel) - b = new_files.get(rel, new_root / rel) - diff_txt = unified_diff_text(a, b, rel=rel) - if not diff_txt.strip(): - continue - patch_path = out_dir / f"{rel}.patch" - ensure_dir(patch_path.parent) - patch_path.write_text(diff_txt, encoding="utf-8") - written.append(patch_path) - return written - - -def apply_patch_dir_with_reject(patch_dir: Path, target_dir: Path, strip: int = 1) -> list[Path]: - """Apply all .patch files under patch_dir (recursively) using git apply --reject. - - Returns a list of .rej files produced. Continues on errors to accumulate rejects. - """ - if not which("git"): - raise RuntimeError("git not available for patch apply") - patches = sorted(patch_dir.rglob("*.patch")) - for p in patches: - code, out, err = run_cmd(["git", "apply", f"-p{strip}", "--reject", "--no-3way", str(p)], cwd=target_dir, check=False) - # proceed regardless; .rej will indicate failures - return list(target_dir.rglob("*.rej")) diff --git a/engine/rules/coccinelle/symbol_rename.cocci b/engine/rules/coccinelle/symbol_rename.cocci deleted file mode 100644 index 89425e2..0000000 --- a/engine/rules/coccinelle/symbol_rename.cocci +++ /dev/null @@ -1,6 +0,0 @@ -@r@ -identifier old = "OldAPI", new = "NewAPI"; -@@ -- old(...) -+ new(...) - diff --git a/engine/rules/comby/examples.rules b/engine/rules/comby/examples.rules deleted file mode 100644 index b2b8d94..0000000 --- a/engine/rules/comby/examples.rules +++ /dev/null @@ -1,2 +0,0 @@ -:[pre]OldName(:[args]) -> :[pre]NewName(:[args]) - diff --git a/engine/rules/json/examples.jsonpatch b/engine/rules/json/examples.jsonpatch deleted file mode 100644 index 6ae752e..0000000 --- a/engine/rules/json/examples.jsonpatch +++ /dev/null @@ -1,4 +0,0 @@ -[ - {"op":"move","from":"/camera/rvc/timeout","path":"/camera/rvcs/timeout"} -] - diff --git a/engine/rules/yaml/examples.yq b/engine/rules/yaml/examples.yq deleted file mode 100644 index 9866e0d..0000000 --- a/engine/rules/yaml/examples.yq +++ /dev/null @@ -1,2 +0,0 @@ -.camera.rvcs.timeout = 123 - diff --git a/final_test/src/main.cpp b/final_test/src/main.cpp deleted file mode 100644 index 94e3480..0000000 --- a/final_test/src/main.cpp +++ /dev/null @@ -1,13 +0,0 @@ -#include - -void NewAPI(int v) { - std::cout << "NewAPI: " << v << std::endl; -} - -int main() { - // Feature customization: different value and extra log - std::cout << \"Feature activated\" << std::endl; - NewAPI(200); - return 0; -} - diff --git a/fix_autorebase.py b/fix_autorebase.py deleted file mode 100644 index 3f55bff..0000000 --- a/fix_autorebase.py +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix AutoRebase to properly handle conflicts and apply AI resolution. -""" - -import os -import shutil -from pathlib import Path -from engine.core.ai_resolve import resolve_rejects - -def fix_autorebase_run(run_dir: Path): - """Fix a specific AutoRebase run by applying AI resolution.""" - - print(f"๐Ÿ”ง Fixing AutoRebase run: {run_dir}") - - # Check main.cpp - main_cpp = run_dir / "feature-5.1" / "src" / "main.cpp" - if not main_cpp.exists(): - print("โŒ main.cpp not found") - return False - - content = main_cpp.read_text() - print(f"๐Ÿ“„ Current main.cpp content:\n{content}") - - # Check if feature customizations are present - has_feature_logging = "Feature activated" in content - has_parameter_200 = "200" in content - uses_new_api = "NewAPI" in content - - print(f"\n๐Ÿ“‹ Validation results:") - print(f" โœ… Has feature logging: {has_feature_logging}") - print(f" โœ… Has parameter 200: {has_parameter_200}") - print(f" โœ… Uses NewAPI: {uses_new_api}") - - if has_feature_logging and has_parameter_200 and uses_new_api: - print("๐ŸŽ‰ All feature customizations are present!") - return True - - print("\nโŒ Missing feature customizations. Let's fix this...") - - # Check for .rej files - rej_files = list(run_dir.rglob("*.rej")) - print(f"๐Ÿ” Found {len(rej_files)} rejection files") - - if rej_files: - print("๐Ÿค– Applying AI resolution...") - requirements = ["Feature: While calling API we need to pass 200 as input"] - remaining = resolve_rejects(rej_files, requirements) - - if not remaining: - print("โœ… AI resolution successful!") - # Check the result - new_content = main_cpp.read_text() - print(f"๐Ÿ“„ Updated main.cpp content:\n{new_content}") - return True - else: - print(f"โŒ AI resolution failed. Remaining rejects: {len(remaining)}") - return False - else: - print("โŒ No rejection files found. Creating artificial conflict...") - - # Create a .rej file artificially to trigger AI resolution - rej_file = main_cpp.with_suffix(".cpp.rej") - rej_content = """diff a/src/main.cpp b/src/main.cpp (rejected hunks) -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - -""" - rej_file.write_text(rej_content) - print(f"๐Ÿ“ Created artificial .rej file: {rej_file}") - - # Apply AI resolution - print("๐Ÿค– Applying AI resolution...") - requirements = ["Feature: While calling API we need to pass 200 as input"] - remaining = resolve_rejects([rej_file], requirements) - - if not remaining: - print("โœ… AI resolution successful!") - # Check the result - new_content = main_cpp.read_text() - print(f"๐Ÿ“„ Updated main.cpp content:\n{new_content}") - return True - else: - print(f"โŒ AI resolution failed. Remaining rejects: {len(remaining)}") - return False - -def main(): - """Fix all AutoRebase runs.""" - - # Find all runs - runs = [] - for run_dir in Path("artifacts").iterdir(): - if run_dir.is_dir() and "run" in run_dir.name: - runs.append(run_dir) - - if not runs: - print("No runs found in artifacts/") - return - - print(f"๐Ÿ” Found {len(runs)} runs to check") - - fixed_count = 0 - for run_dir in sorted(runs): - print(f"\n{'='*50}") - if fix_autorebase_run(run_dir): - fixed_count += 1 - - print(f"\n๐ŸŽฏ Summary: Fixed {fixed_count}/{len(runs)} runs") - -if __name__ == "__main__": - main() diff --git a/force_conflict_fix/src/main.cpp b/force_conflict_fix/src/main.cpp deleted file mode 100644 index 94e3480..0000000 --- a/force_conflict_fix/src/main.cpp +++ /dev/null @@ -1,13 +0,0 @@ -#include - -void NewAPI(int v) { - std::cout << "NewAPI: " << v << std::endl; -} - -int main() { - // Feature customization: different value and extra log - std::cout << \"Feature activated\" << std::endl; - NewAPI(200); - return 0; -} - diff --git a/force_conflict_test.py b/force_conflict_test.py deleted file mode 100644 index 0a2b69f..0000000 --- a/force_conflict_test.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 -""" -Force conflict test to demonstrate the complete workflow. -""" - -import os -import shutil -import subprocess -from pathlib import Path -from engine.core.ai_resolve import resolve_rejects - -def force_conflict_test(): - """Force a conflict and test AI resolution.""" - - # Setup test environment - test_dir = Path("force_conflict_test") - if test_dir.exists(): - shutil.rmtree(test_dir) - - # Copy new base to test directory - shutil.copytree("data/sample/base-1.1", test_dir) - - print("๐Ÿš€ Force Conflict Test") - print(f"๐Ÿ“ Test directory: {test_dir}") - print() - - # Show initial state - main_cpp = test_dir / "src" / "main.cpp" - print("๐Ÿ“„ Initial state:") - print(main_cpp.read_text()) - print() - - # Create a patch that will definitely conflict - patch_content = """--- main.cpp 2025-09-13 12:12:50 -+++ main.cpp 2025-09-13 12:13:08 -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - -""" - - patch_file = test_dir / "src" / "conflict.patch" - patch_file.write_text(patch_content) - - # Apply patch with rejection - print("๐Ÿ“‹ Applying conflicting patch...") - try: - result = subprocess.run( - ["git", "apply", "--reject", "--no-3way", "-p0", "conflict.patch"], - cwd=test_dir / "src", - capture_output=True, - text=True - ) - print(f"Exit code: {result.returncode}") - if result.stdout: - print(f"STDOUT: {result.stdout}") - if result.stderr: - print(f"STDERR: {result.stderr}") - except Exception as e: - print(f"Error: {e}") - - # Check for rejection files - rej_files = list(test_dir.rglob("*.rej")) - print(f"\n๐Ÿ” Found {len(rej_files)} rejection files:") - for rej in rej_files: - print(f" - {rej}") - print(f"Content:\n{rej.read_text()}") - - if rej_files: - # Apply AI resolution - print("\n๐Ÿค– Applying AI resolution...") - requirements = ["Feature: While calling API we need to pass 200 as input"] - remaining = resolve_rejects(rej_files, requirements) - - print(f"โœ… Resolution complete. Remaining rejects: {len(remaining)}") - - # Show final result - print("\n๐ŸŽฏ Final result:") - print(main_cpp.read_text()) - else: - print("โŒ No rejection files found!") - print("Current file content:") - print(main_cpp.read_text()) - - # Cleanup - shutil.rmtree(test_dir) - print("\n๐Ÿงน Cleanup complete") - -if __name__ == "__main__": - force_conflict_test() diff --git a/manual_test/src/main.cpp b/manual_test/src/main.cpp deleted file mode 100644 index 7ab3c8d..0000000 --- a/manual_test/src/main.cpp +++ /dev/null @@ -1,11 +0,0 @@ -#include - -void NewAPI(int v) { - std::cout << "NewAPI: " << v << std::endl; -} - -int main() { - NewAPI(42); - return 0; -} - diff --git a/manual_test/src/main.cpp.rej b/manual_test/src/main.cpp.rej deleted file mode 100644 index bc43191..0000000 --- a/manual_test/src/main.cpp.rej +++ /dev/null @@ -1,12 +0,0 @@ -diff a/manual_test/src/main.cpp b/manual_test/src/main.cpp (rejected hunks) -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/master_prompt b/master_prompt deleted file mode 100644 index 80cbd88..0000000 --- a/master_prompt +++ /dev/null @@ -1,513 +0,0 @@ -MASTER SPEC PROMPT โ€” BUILD โ€œAUTO-REBASEโ€ (END-TO-END) - -You are an expert software engineer. Generate a production-ready monorepo called auto-rebase that automates re-targeting feature customizations whenever a Base SW (e.g., DriveOS) updates. - -High-Level Goal - -Given: - -base-OLD/ (e.g., base-1.0) - -base-NEW/ (e.g., base-1.1) - -feature-OLD/ (e.g., feature-5.0 = base-OLD + customizations) - -Produce automatically: - -feature-NEW/ (e.g., feature-5.1 = base-NEW + re-applied customizations) - -Machine-readable feature customization patchset - -Machine-readable base delta patchset - -Human report (HTML) with merge outcomes, conflicts, requirement traceability, and validation results - -Git commits/tags with commit trailers for traceability - -Languages & Stack - -Core engine & CLI: Python 3.11 - -Adapters / tooling bindings: Python subprocess + wrappers - -UI: Next.js 14 (TypeScript), TailwindCSS, minimal dashboard - -Auth/SSO: WorkOS (placeholder env vars, functions stubbed to compile) - -MCP server wrapper: Node.js TypeScript using Smithery-style template (expose CLI tools) - -Data store for runs: Convex (or fallback to local SQLite) โ€“ implement repository interface with two providers (Convex + SQLite) - -CI: GitHub Actions - -Package/deps: pin versions; provide requirements.txt and package.json - -Licensing: MIT - -External Tools (wrap with graceful fallbacks) - -git with rerere - -difftastic (syntax-aware diff) - -GumTree (AST map) โ€“ if not present, fallback to text heuristics but leave adapter scaffold - -clang-tidy/clang-apply-replacements (C/C++) - -coccinelle (semantic patches) โ€“ optional, guarded - -dtc (DTSI) - -jsonpatch (RFC-6902); jsonschema - -yq (YAML); yamale/pykwalify - -comby (structural find/replace) - -Every adapter must detect tool availability at runtime and degrade gracefully (log warning; switch to robust text merge). - -Monorepo Layout -auto-rebase/ - README.md - LICENSE - .gitignore - pyproject.toml - requirements.txt - scripts/ - install_tools.sh - demo_seed.sh - engine/ - __init__.py - cli/ - auto_rebase.py - core/ - config.py - diff_types.py - diff_graph.py - feature_extract.py - base_extract.py - retarget.py - validate.py - report.py - traceability.py - vcs.py - utils.py - adapters/ - __init__.py - c_cpp.py - json_cfg.py - yaml_cfg.py - dtsi.py - text_generic.py - gumtree.py - difftastic.py - comby.py - schema.py - rules/ - coccinelle/ - symbol_rename.cocci - comby/ - examples.rules - json/ - examples.jsonpatch - yaml/ - examples.yq - schemas/ - requirements_map.schema.json - report.schema.json - data/ - sample/base-1.0/ - sample/base-1.1/ - sample/feature-5.0/ - sample/requirements_map.yaml - artifacts/ (gitignored) - tests/ - test_end_to_end.py - test_adapters.py - test_traceability.py - fixtures/ - mcp-server/ - package.json - tsconfig.json - src/ - index.ts - tools.ts - config.ts - process.ts - README.md - web/ - package.json - next.config.js - src/ - app/ - page.tsx - api/ - runs/route.ts - components/ - UploadForm.tsx - RunTable.tsx - ReportViewer.tsx - lib/ - auth.ts - api.ts - convexClient.ts - public/ - server/ - api/ - main.py - models.py - storage/ - base.py - sqlite_store.py - convex_store.py - routers/ - runs.py - uploads.py - requirements.txt - README.md - .github/workflows/ci.yml - -Core Concepts - -ฮ”F (Feature Diff): changes from base-OLD โ†’ feature-OLD - -ฮ”B (Base Diff): changes from base-OLD โ†’ base-NEW - -ฮ”F' (Re-targeted): ฮ”F applied onto base-NEW - -Requirement mapping: file globs โ†’ requirement IDs, loaded from requirements_map.yaml - -CLI Commands (Implement now) - -auto-rebase (entrypoint engine/cli/auto_rebase.py): - -init --old-base PATH --new-base PATH --feature PATH --req-map PATH --workdir PATH - -Persists a run manifest (artifacts/run.json) - -extract-feature --out PATH - -Emits machine-readable ฮ”F: file list + patch units per adapter - -extract-base --out PATH - -Emits ฮ”B with change classification (moved/renamed/deleted/added/sig changes) - -retarget --feature-patch PATH --base-patch PATH --new-base PATH --out PATH - -3-way merge first (git); failed hunks โ†’ semantic relocate with adapters; log auto/manual decisions - -validate --path PATH --report PATH - -Build hooks (dummy; configurable script), schema checks, static checks; produce report.html + report.json - -finalize --path PATH --tag STRING --trace PATH - -Git commit + tag; add trailers: - -Req-Id: -Change-Type: FeatureCustomization -Auto-Rebase-Run: - - -All commands log to artifacts/logs/.log and return non-zero on failure. - -Engine Details -Adapters (engine/adapters/*.py) - -Each adapter must implement an interface: - -class PatchUnit(TypedDict): - file_path: str - kind: Literal["c_cpp","json","yaml","dtsi","text"] - ops: list[dict] # adapter-specific ops (e.g., jsonpatch ops) - anchors: dict | None # symbol/function/compatible anchors - req_ids: list[str] # attached via traceability - notes: str | None - -class Adapter(Protocol): - def detect_env() -> dict: ... - def extract_feature(old_base: Path, feature: Path) -> list[PatchUnit]: ... - def extract_base(old_base: Path, new_base: Path) -> dict: ... - def retarget(patch: PatchUnit, base_delta_map: dict, new_base_root: Path) -> PatchUnit | Conflict: ... - def apply(patch: PatchUnit, target_root: Path) -> ApplyResult: ... - def validate(target_root: Path) -> list[ValidationIssue]: ... - - -Implement for: - -c_cpp.py (clang-tidy fix-its, gumtree mapping, optional coccinelle) - -json_cfg.py (RFC-6902 ops + jsonschema validation) - -yaml_cfg.py (yq merges + yamale schema) - -dtsi.py (dtc compile; label/compatible anchors; overlay logic) - -text_generic.py (comby patterns; difftastic assist) - -Provide graceful fallback if external tool missing (textual smart diff with difflib + comby heuristics). - -Diff Graph (engine/core/diff_graph.py) - -Build a cross-file change graph: - -nodes: files/symbols/paths - -edges: moved/renamed/changed - -Export anchor maps (oldโ†’new locations) for adapters. - -Feature & Base Extraction - -feature_extract.py: compare trees; for each file, route to adapter; emit PatchUnit[] - -base_extract.py: compute ฮ”B; build rename/move maps; per-language classification - -Retargeting (engine/core/retarget.py) - -Phase 1: git merge-file / 3-way; enable git rerere - -Phase 2: For failed hunks, call adapter retarget() with ฮ”B maps - -Phase 3: Apply re-targeted patches into --out tree; produce per-file outcomes (applied/partial/conflict) - -Validation (engine/core/validate.py) - -Run: - -optional build script (configurable) - -adaptersโ€™ validate - -static checks (clang-tidy if C/C++; dtc if DTSI; jsonschema/yamale) - -Aggregate into ValidationSummary - -Traceability (engine/core/traceability.py) - -Load requirements_map.yaml with globs: - -- path_glob: "src/vision/**" - req_ids: ["AD-REQ-201","AD-REQ-318"] -- path_glob: "configs/rvc/*.json" - req_ids: ["AD-REQ-411"] - - -Attach IDs to PatchUnit during extraction - -Emit trace.json consumed by report & commit trailers - -Reporting (engine/core/report.py) - -Generate report.json and report.html (Jinja2 template): - -Summary % auto-merged / semantic / conflicts - -Table of files with badges - -Requirement IDs per change - -Validation results - -Tool availability matrix - -Validate report.json against schemas/report.schema.json - -VCS (engine/core/vcs.py) - -Utilities for git worktrees, rerere, tagging, trailers - -Server API (FastAPI) - -POST /runs โ†’ create run (stores metadata, paths; triggers background job via Python subprocess to call CLI) - -GET /runs โ†’ list runs - -GET /runs/{id} โ†’ status + links to artifacts (report, logs, produced tree tar) - -POST /uploads โ†’ accept zips of base-OLD, base-NEW, feature-OLD, requirements_map.yaml (store under /mnt/data/uploads//) - -Storage provider interface (server/api/storage/base.py) with two implementations: - -sqlite_store.py (default) - -convex_store.py (stubbed client; functions implemented but gated by CONVEX_URL presence) - -Provide server/api/requirements.txt separate (FastAPI, Uvicorn, Pydantic v2). - -MCP Server (Node/TypeScript) - -Expose tools that shell out to CLI: - -auto_rebase.init - -auto_rebase.extract_feature - -auto_rebase.extract_base - -auto_rebase.retarget - -auto_rebase.validate - -auto_rebase.finalize - -Each tool accepts JSON args โ†’ maps to CLI โ†’ streams logs back. Provide README to run: - -pnpm install -pnpm build -pnpm start - -Web UI (Next.js) - -Pages: - -/ dashboard: - -Upload three zips + requirements_map.yaml - -Button โ€œRun Auto-Rebaseโ€ - -Table of runs with status, % auto-merged, link to report - -Report viewer (embed /artifacts//report.html) - -Auth: - -WorkOS placeholders with env vars WORKOS_CLIENT_ID, WORKOS_API_KEY, WORKOS_REDIRECT_URI; if absent, use a dev login page - -API routes call Server API (configurable SERVER_URL) - -CI (GitHub Actions) - -Python lint + tests - -Build MCP server - -Build Next.js - -Cache deps - -On success, upload artifacts/ from sample e2e run - -Provide .github/workflows/ci.yml that: - -installs toolchain (or mocks tools absent) - -runs unit tests - -runs scripts/demo_seed.sh to execute full flow on data/sample/* and produce a report artifact - -Sample Data - -Under data/sample/, include tiny illustrative trees: - -base-1.0/ small C++ file, a DTSI snippet, a JSON config, a YAML config - -feature-5.0/ modifies those - -base-1.1/ moves/renames one function, changes a JSON key path, relocates a DTSI node, adds a YAML field -Include a minimal build script that always โ€œpassesโ€ but echoes files (for pipeline demo). - -Developer Ergonomics - -scripts/install_tools.sh detects OS; installs or skips external tools with clear logs - -pyproject.toml for black/isort/ruff configs - -README.md with: - -quickstart (local) - -running the e2e demo - -environment variables (WorkOS/Convex) - -limitations & fallbacks - -how to add a new adapter - -Testing - -tests/test_end_to_end.py: runs init โ†’ extract-feature โ†’ extract-base โ†’ retarget โ†’ validate on sample data; asserts: - -feature-NEW exists - -report.json schema-valid - -At least one auto-merge and one semantic relocate event - -tests/test_adapters.py: per-adapter unit tests on tiny fixtures - -tests/test_traceability.py: verifies mapping globs to req IDs - -Non-Functional Requirements - -Deterministic outputs where possible - -Every public function must have type hints & docstrings - -Log levels: INFO default, DEBUG via --verbose - -No secrets in logs - -Robust error messages with remediation tips - -Deliverables - -All files laid out as above, complete and runnable - -Clean, pinned dependency files - -A working demo path: - -bash scripts/install_tools.sh # optional -python -m engine.cli.auto_rebase init --old-base data/sample/base-1.0 --new-base data/sample/base-1.1 --feature data/sample/feature-5.0 --req-map data/sample/requirements_map.yaml --workdir artifacts/run1 -python -m engine.cli.auto_rebase extract-feature --out artifacts/run1/feature_patch -python -m engine.cli.auto_rebase extract-base --out artifacts/run1/base_patch -python -m engine.cli.auto_rebase retarget --feature-patch artifacts/run1/feature_patch --base-patch artifacts/run1/base_patch --new-base data/sample/base-1.1 --out artifacts/run1/feature-5.1 -python -m engine.cli.auto_rebase validate --path artifacts/run1/feature-5.1 --report artifacts/run1/report.html -python -m engine.cli.auto_rebase finalize --path artifacts/run1/feature-5.1 --tag v5.1 --trace artifacts/run1/trace.json - - -GitHub Actions passes and uploads artifacts/run1/* - -Extra: Seed Rules Examples (embed as files) - -rules/coccinelle/symbol_rename.cocci - -@r@ -identifier old = "OldAPI", new = "NewAPI"; -@@ -- old(...) -+ new(...) - - -rules/json/examples.jsonpatch - -[ - {"op":"move","from":"/camera/rvc/timeout","path":"/camera/rvcs/timeout"} -] - - -rules/comby/examples.rules - -:[pre]OldName(:[args]) -> :[pre]NewName(:[args]) - -Acceptance Criteria - -Running the demo produces: - -feature-5.1/ tree with applied changes - -report.html & report.json capturing auto vs semantic merges, conflicts, and validation - -trace.json listing requirement IDs per patch unit - -A Git tag v5.1 and commits with trailers - -Web UI can upload zips, launch a run, and display report (auth stub acceptable if WorkOS envs missing) - -MCP server exposes tools that invoke the CLI and return status - -Generate all code, configs, and docs now. If any external binary is absent in CI, mock behavior but keep interfaces intact and log warnings. \ No newline at end of file diff --git a/mcp-server/package.json b/mcp-server/package.json deleted file mode 100644 index c729997..0000000 --- a/mcp-server/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "auto-rebase-mcp", - "version": "0.1.0", - "license": "MIT", - "scripts": { - "build": "tsc -p tsconfig.json", - "start": "node dist/index.js" - }, - "dependencies": {}, - "devDependencies": { - "typescript": "5.6.2" - } -} - diff --git a/mcp-server/src/README.md b/mcp-server/src/README.md deleted file mode 100644 index da4f4da..0000000 --- a/mcp-server/src/README.md +++ /dev/null @@ -1,10 +0,0 @@ -MCP Server (stub) - -Expose CLI commands by spawning `python -m engine.cli.auto_rebase`. - -Usage: - -pnpm install -pnpm build -pnpm start - diff --git a/mcp-server/src/index.ts b/mcp-server/src/index.ts deleted file mode 100644 index 3aae6ef..0000000 --- a/mcp-server/src/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { spawn } from "child_process"; - -function runCli(args: string[]): Promise<{ code: number }> { - return new Promise((resolve) => { - const p = spawn("python", ["-m", "engine.cli.auto_rebase", ...args], { stdio: "inherit" }); - p.on("close", (code) => resolve({ code: code ?? 1 })); - }); -} - -async function main() { - console.log("auto-rebase MCP server stub. Expose CLI via tools."); - const code = (await runCli(["--help"]))?.code; - process.exit(0); -} - -main().catch((e) => { - console.error(e); - process.exit(1); -}); - diff --git a/mcp-server/src/tools.ts b/mcp-server/src/tools.ts deleted file mode 100644 index 5ebce5d..0000000 --- a/mcp-server/src/tools.ts +++ /dev/null @@ -1,58 +0,0 @@ -export type ToolCall = { - name: string; - args: Record; -}; - -export const tools = { - "auto_rebase.init": (args: Record) => [ - "init", - "--old-base", - args.old_base, - "--new-base", - args.new_base, - "--feature", - args.feature, - "--req-map", - args.req_map, - "--workdir", - args.workdir, - ], - "auto_rebase.extract_feature": (args: Record) => [ - "extract-feature", - "--out", - args.out, - ], - "auto_rebase.extract_base": (args: Record) => [ - "extract-base", - "--out", - args.out, - ], - "auto_rebase.retarget": (args: Record) => [ - "retarget", - "--feature-patch", - args.feature_patch, - "--base-patch", - args.base_patch, - "--new-base", - args.new_base, - "--out", - args.out, - ], - "auto_rebase.validate": (args: Record) => [ - "validate", - "--path", - args.path, - "--report", - args.report, - ], - "auto_rebase.finalize": (args: Record) => [ - "finalize", - "--path", - args.path, - "--tag", - args.tag, - "--trace", - args.trace, - ], -}; - diff --git a/mcp-server/tsconfig.json b/mcp-server/tsconfig.json deleted file mode 100644 index 53e8060..0000000 --- a/mcp-server/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2020", - "module": "commonjs", - "outDir": "dist", - "rootDir": "src", - "strict": true, - "esModuleInterop": true, - "skipLibCheck": true - }, - "include": ["src/**/*"] -} - diff --git a/resolve_conflict.py b/resolve_conflict.py deleted file mode 100755 index d29e153..0000000 --- a/resolve_conflict.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python3 -""" -Simple CLI script to resolve file conflicts using OpenAI -""" - -import argparse -import sys -from pathlib import Path - -# Add the engine to the path -sys.path.insert(0, str(Path(__file__).parent)) - -from engine.core.file_conflict_resolver import resolve_file_conflict_with_openai - - -def main(): - parser = argparse.ArgumentParser(description="Resolve file conflicts using OpenAI") - parser.add_argument("original_file", help="Path to the original file") - parser.add_argument("rejection_file", help="Path to the .rej file") - parser.add_argument("requirements_file", help="Path to the requirements_map.yaml file") - parser.add_argument("-o", "--output", help="Output file path (default: resolved_)") - parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output") - - args = parser.parse_args() - - # Determine output file - if args.output: - output_file = args.output - else: - original_path = Path(args.original_file) - output_file = f"resolved_{original_path.name}" - - print("๐Ÿ”ง File Conflict Resolver") - print(f"๐Ÿ“ Original file: {args.original_file}") - print(f"๐Ÿšซ Rejection file: {args.rejection_file}") - print(f"๐Ÿ“‹ Requirements file: {args.requirements_file}") - print(f"๐Ÿ’พ Output file: {output_file}") - print() - - # Resolve the conflict - result = resolve_file_conflict_with_openai( - original_file_path=args.original_file, - rejection_file_path=args.rejection_file, - requirements_file=args.requirements_file, - verbose=args.verbose - ) - - # Print results - print("๐Ÿ“Š RESULT:") - print(f" Success: {result['success']}") - print(f" Conflict Type: {result['conflict_type']}") - print(f" Explanation: {result['explanation']}") - print(f" Changes Applied: {result['changes_applied']}") - if result.get('requirement_used'): - print(f" Requirement Used: {result['requirement_used']}") - - if result['success'] and result['resolved_content']: - # Save the resolved content - Path(output_file).write_text(result['resolved_content']) - print(f"\nโœ… RESOLVED CONTENT SAVED TO: {output_file}") - print("\n๐Ÿ“„ RESOLVED CONTENT:") - print("-" * 50) - print(result['resolved_content']) - print("-" * 50) - - return 0 - else: - print(f"\nโŒ FAILED TO RESOLVE CONFLICT") - print(f" Error: {result['explanation']}") - return 1 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/schemas/report.schema.json b/schemas/report.schema.json deleted file mode 100644 index 613548b..0000000 --- a/schemas/report.schema.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "object", - "required": ["run_id", "summary", "files", "validation", "tools"], - "properties": { - "run_id": {"type": "string"}, - "summary": { - "type": "object", - "required": ["auto", "semantic", "conflicts"], - "properties": { - "auto": {"type": "number"}, - "semantic": {"type": "number"}, - "conflicts": {"type": "number"} - } - }, - "files": { - "type": "array", - "items": { - "type": "object", - "required": ["file", "status", "details", "req_ids"], - "properties": { - "file": {"type": "string"}, - "status": {"type": "string"}, - "details": {"type": "string"}, - "req_ids": {"type": "array", "items": {"type": "string"}} - } - } - }, - "validation": { - "type": "object", - "required": ["success", "issues"], - "properties": { - "success": {"type": "boolean"}, - "issues": { - "type": "array", - "items": { - "type": "object", - "required": ["file_path", "level", "message"], - "properties": { - "file_path": {"type": "string"}, - "level": {"type": "string"}, - "message": {"type": "string"} - } - } - } - } - }, - "tools": {"type": "object"} - } -} - diff --git a/schemas/requirements_map.schema.json b/schemas/requirements_map.schema.json deleted file mode 100644 index 6ce2671..0000000 --- a/schemas/requirements_map.schema.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "array", - "items": { - "type": "object", - "oneOf": [ - {"required": ["path_glob"]}, - {"required": ["path"]} - ], - "properties": { - "path_glob": {"type": "string"}, - "path": {"type": "string"}, - "req_ids": {"type": "array", "items": {"type": "string"}}, - "requirement": {"type": "string"} - } - } -} diff --git a/scripts/ai_direct_rebase.sh b/scripts/ai_direct_rebase.sh deleted file mode 100755 index bd9454d..0000000 --- a/scripts/ai_direct_rebase.sh +++ /dev/null @@ -1,140 +0,0 @@ -#!/bin/bash - -# AI Direct Rebase Script -# This script performs AI-based direct rebase without the traditional retarget step -# Usage: ./scripts/ai_direct_rebase.sh [workdir_name] - -set -e # Exit on any error - -# Configuration -WORKDIR_NAME=${1:-"ai_rebase_$(date +%Y%m%d_%H%M%S)"} -WORKDIR="artifacts/$WORKDIR_NAME" -OLD_BASE="data/sample-base-sw_1.0" -NEW_BASE="data/sample-base-sw_1.1" -FEATURE="data/sample-feature-sw_5.0" -REQ_MAP="data/sample/requirements_map.yaml" - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -# Function to print colored output -print_step() { - echo -e "${BLUE}=== $1 ===${NC}" -} - -print_success() { - echo -e "${GREEN}โœ… $1${NC}" -} - -print_warning() { - echo -e "${YELLOW}โš ๏ธ $1${NC}" -} - -print_error() { - echo -e "${RED}โŒ $1${NC}" -} - -# Function to check if command exists -check_command() { - if ! command -v "$1" &> /dev/null; then - print_warning "$1 not found, but continuing..." - return 1 - fi - return 0 -} - -# Function to run command with error handling -run_cmd() { - local cmd="$1" - local description="$2" - - print_step "$description" - echo "Running: $cmd" - - if eval "$cmd"; then - print_success "$description completed" - else - print_error "$description failed" - exit 1 - fi - echo -} - -# Main execution -echo -e "${GREEN}๐Ÿš€ Starting AI Direct Rebase Workflow${NC}" -echo "Workdir: $WORKDIR" -echo "Old Base: $OLD_BASE" -echo "New Base: $NEW_BASE" -echo "Feature: $FEATURE" -echo "Requirements Map: $REQ_MAP" -echo - -# Check prerequisites -print_step "Checking Prerequisites" -check_command "python3" -check_command "git" -echo - -# Step 1: Initialize the run (to create workdir structure) -run_cmd "python -m engine.cli.auto_rebase init --old-base $OLD_BASE --new-base $NEW_BASE --feature $FEATURE --req-map $REQ_MAP --workdir $WORKDIR --verbose" \ - "Initialize AutoRebase Run" - -# Step 2: Extract feature patches -run_cmd "python -m engine.cli.auto_rebase extract-feature --out $WORKDIR/feature_patch --git-patch $WORKDIR/feature.patch --patch-dir $WORKDIR/feature_patches --verbose" \ - "Extract Feature Patches" - -# Step 3: Extract base patches -run_cmd "python -m engine.cli.auto_rebase extract-base --out $WORKDIR/base_patch --git-patch $WORKDIR/base.patch --patch-dir $WORKDIR/base_patches --verbose" \ - "Extract Base Patches" - -# Step 4: AI Direct Rebase - Apply feature patches directly to new base using AI -print_step "AI Direct Rebase" -echo "This step applies feature patches directly to the new base using AI conflict resolution" -echo "No traditional retarget step needed!" - -run_cmd "python -m engine.cli.auto_rebase ai-rebase --feature-patches $WORKDIR/feature_patches --base-patches $WORKDIR/base_patches --new-base $NEW_BASE --req-map $REQ_MAP --out $WORKDIR/feature-5.1 --verbose" \ - "AI Direct Rebase" - -# Step 5: Validate the results -run_cmd "python -m engine.cli.auto_rebase validate --path $WORKDIR/feature-5.1 --report $WORKDIR/report.html --verbose" \ - "Validate Results" - -# Step 6: Finalize with git tag -run_cmd "python -m engine.cli.auto_rebase finalize --path $WORKDIR/feature-5.1 --tag v5.1 --trace $WORKDIR/trace.json --verbose" \ - "Finalize with Git Tag" - -# Summary -print_step "AI Direct Rebase Summary" -echo "๐Ÿ“ Workdir: $WORKDIR" -echo "๐Ÿ“„ Reports:" -echo " - HTML Report: $WORKDIR/report.html" -echo " - JSON Report: $WORKDIR/report.json" -echo " - Trace: $WORKDIR/trace.json" -echo " - AI Rebase Results: $WORKDIR/feature-5.1/ai_rebase_results.json" -echo -echo "๐Ÿ“ฆ Generated Patches:" -echo " - Base patches: $WORKDIR/base_patches/" -echo " - Feature patches: $WORKDIR/feature_patches/" -echo " - Combined patches: $WORKDIR/base.patch, $WORKDIR/feature.patch" -echo -echo "๐ŸŽฏ Final Feature (AI Rebased): $WORKDIR/feature-5.1/" -echo -echo "๐Ÿค– AI Resolution Features:" -echo " - Automatic conflict detection between feature and base patches" -echo " - OpenAI-based conflict resolution (if API key available)" -echo " - Heuristic fallback for common patterns" -echo " - Requirement-aware customization application" - -# Optional: Open the HTML report -if command -v open &> /dev/null; then - echo - read -p "Open HTML report in browser? (y/n): " -n 1 -r - echo - if [[ $REPLY =~ ^[Yy]$ ]]; then - open "$WORKDIR/report.html" - fi -fi diff --git a/scripts/demo_seed.sh b/scripts/demo_seed.sh deleted file mode 100644 index 516616b..0000000 --- a/scripts/demo_seed.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -ART="artifacts/run1" -mkdir -p "$ART" - -python -m engine.cli.auto_rebase init --old-base data/sample/base-1.0 --new-base data/sample/base-1.1 --feature data/sample/feature-5.0 --req-map data/sample/requirements_map.yaml --workdir "$ART" -python -m engine.cli.auto_rebase extract-feature --out "$ART/feature_patch" -python -m engine.cli.auto_rebase extract-base --out "$ART/base_patch" -python -m engine.cli.auto_rebase retarget --feature-patch "$ART/feature_patch" --base-patch "$ART/base_patch" --new-base data/sample/base-1.1 --out "$ART/feature-5.1" -python -m engine.cli.auto_rebase validate --path "$ART/feature-5.1" --report "$ART/report.html" -python -m engine.cli.auto_rebase finalize --path "$ART/feature-5.1" --tag v5.1 --trace "$ART/trace.json" - -echo "Demo complete. See $ART" - diff --git a/scripts/install_tools.sh b/scripts/install_tools.sh deleted file mode 100644 index b2fc4f8..0000000 --- a/scripts/install_tools.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -echo "[auto-rebase] Tool installer (best-effort)." -OS="$(uname -s || echo unknown)" -echo "Detected OS: $OS" - -need() { command -v "$1" >/dev/null 2>&1 || return 0; echo "$1 present"; } - -echo "Checking optional tools..." -for t in git difftastic gumtree clang-tidy coccinelle spatch dtc yq comby; do - if command -v "$t" >/dev/null 2>&1; then - echo " - $t: present" - else - echo " - $t: missing (will fallback at runtime)" - fi -done - -echo "Done." - diff --git a/scripts/quick_rebase.sh b/scripts/quick_rebase.sh deleted file mode 100755 index 39a1a70..0000000 --- a/scripts/quick_rebase.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -# Quick AutoRebase Script - Minimal version -# Usage: ./scripts/quick_rebase.sh - -set -e - -WORKDIR="artifacts/quick_run" -OLD_BASE="data/sample/base-1.0" -NEW_BASE="data/sample/base-1.1" -FEATURE="data/sample/feature-5.0" -REQ_MAP="data/sample/requirements_map.yaml" - -echo "๐Ÿš€ Quick AutoRebase Run" -echo "Workdir: $WORKDIR" -echo - -# Clean up previous run -rm -rf "$WORKDIR" - -# Run the complete workflow -python -m engine.cli.auto_rebase init --old-base "$OLD_BASE" --new-base "$NEW_BASE" --feature "$FEATURE" --req-map "$REQ_MAP" --workdir "$WORKDIR" -python -m engine.cli.auto_rebase extract-feature --out "$WORKDIR/feature_patch" --git-patch "$WORKDIR/feature.patch" --patch-dir "$WORKDIR/feature_patches" -python -m engine.cli.auto_rebase extract-base --out "$WORKDIR/base_patch" --git-patch "$WORKDIR/base.patch" --patch-dir "$WORKDIR/base_patches" -python -m engine.cli.auto_rebase retarget --feature-patch "$WORKDIR/feature_patch/feature_patch.json" --base-patch "$WORKDIR/base_patch/base_patch.json" --new-base "$NEW_BASE" --out "$WORKDIR/feature-5.1" --patch-dir "$WORKDIR/feature_patches" -python -m engine.cli.auto_rebase validate --path "$WORKDIR/feature-5.1" --report "$WORKDIR/report.html" -python -m engine.cli.auto_rebase finalize --path "$WORKDIR/feature-5.1" --tag v5.1 --trace "$WORKDIR/trace.json" - -echo "โœ… Quick AutoRebase completed!" -echo "๐Ÿ“ Results in: $WORKDIR" -echo "๐Ÿ“„ Report: $WORKDIR/report.html" diff --git a/scripts/resolve_conflict.py b/scripts/resolve_conflict.py deleted file mode 100755 index d29e153..0000000 --- a/scripts/resolve_conflict.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python3 -""" -Simple CLI script to resolve file conflicts using OpenAI -""" - -import argparse -import sys -from pathlib import Path - -# Add the engine to the path -sys.path.insert(0, str(Path(__file__).parent)) - -from engine.core.file_conflict_resolver import resolve_file_conflict_with_openai - - -def main(): - parser = argparse.ArgumentParser(description="Resolve file conflicts using OpenAI") - parser.add_argument("original_file", help="Path to the original file") - parser.add_argument("rejection_file", help="Path to the .rej file") - parser.add_argument("requirements_file", help="Path to the requirements_map.yaml file") - parser.add_argument("-o", "--output", help="Output file path (default: resolved_)") - parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output") - - args = parser.parse_args() - - # Determine output file - if args.output: - output_file = args.output - else: - original_path = Path(args.original_file) - output_file = f"resolved_{original_path.name}" - - print("๐Ÿ”ง File Conflict Resolver") - print(f"๐Ÿ“ Original file: {args.original_file}") - print(f"๐Ÿšซ Rejection file: {args.rejection_file}") - print(f"๐Ÿ“‹ Requirements file: {args.requirements_file}") - print(f"๐Ÿ’พ Output file: {output_file}") - print() - - # Resolve the conflict - result = resolve_file_conflict_with_openai( - original_file_path=args.original_file, - rejection_file_path=args.rejection_file, - requirements_file=args.requirements_file, - verbose=args.verbose - ) - - # Print results - print("๐Ÿ“Š RESULT:") - print(f" Success: {result['success']}") - print(f" Conflict Type: {result['conflict_type']}") - print(f" Explanation: {result['explanation']}") - print(f" Changes Applied: {result['changes_applied']}") - if result.get('requirement_used'): - print(f" Requirement Used: {result['requirement_used']}") - - if result['success'] and result['resolved_content']: - # Save the resolved content - Path(output_file).write_text(result['resolved_content']) - print(f"\nโœ… RESOLVED CONTENT SAVED TO: {output_file}") - print("\n๐Ÿ“„ RESOLVED CONTENT:") - print("-" * 50) - print(result['resolved_content']) - print("-" * 50) - - return 0 - else: - print(f"\nโŒ FAILED TO RESOLVE CONFLICT") - print(f" Error: {result['explanation']}") - return 1 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/run_auto_rebase.sh b/scripts/run_auto_rebase.sh deleted file mode 100755 index 49298a1..0000000 --- a/scripts/run_auto_rebase.sh +++ /dev/null @@ -1,130 +0,0 @@ -#!/bin/bash - -# AutoRebase One-Play Script -# This script runs the complete AutoRebase workflow in one go -# Usage: ./scripts/run_auto_rebase.sh [workdir_name] - -set -e # Exit on any error - -# Configuration -WORKDIR_NAME=${1:-"run$(date +%Y%m%d_%H%M%S)"} -WORKDIR="artifacts/$WORKDIR_NAME" -OLD_BASE="data/sample-base-sw_1.0" -NEW_BASE="data/sample-base-sw_1.1" -FEATURE="data/sample-feature-sw_5.0" -REQ_MAP="data/sample/requirements_map.yaml" - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -# Function to print colored output -print_step() { - echo -e "${BLUE}=== $1 ===${NC}" -} - -print_success() { - echo -e "${GREEN}โœ… $1${NC}" -} - -print_warning() { - echo -e "${YELLOW}โš ๏ธ $1${NC}" -} - -print_error() { - echo -e "${RED}โŒ $1${NC}" -} - -# Function to check if command exists -check_command() { - if ! command -v "$1" &> /dev/null; then - print_warning "$1 not found, but continuing..." - return 1 - fi - return 0 -} - -# Function to run command with error handling -run_cmd() { - local cmd="$1" - local description="$2" - - print_step "$description" - echo "Running: $cmd" - - if eval "$cmd"; then - print_success "$description completed" - else - print_error "$description failed" - exit 1 - fi - echo -} - -# Main execution -echo -e "${GREEN}๐Ÿš€ Starting AutoRebase One-Play Workflow${NC}" -echo "Workdir: $WORKDIR" -echo "Old Base: $OLD_BASE" -echo "New Base: $NEW_BASE" -echo "Feature: $FEATURE" -echo "Requirements Map: $REQ_MAP" -echo - -# Check prerequisites -print_step "Checking Prerequisites" -check_command "python3" -check_command "git" -echo - -# Step 1: Initialize the run -run_cmd "python -m engine.cli.auto_rebase init --old-base $OLD_BASE --new-base $NEW_BASE --feature $FEATURE --req-map $REQ_MAP --workdir $WORKDIR --verbose" \ - "Initialize AutoRebase Run" - -# Step 2: Extract feature patches -run_cmd "python -m engine.cli.auto_rebase extract-feature --out $WORKDIR/feature_patch --git-patch $WORKDIR/feature.patch --patch-dir $WORKDIR/feature_patches --verbose" \ - "Extract Feature Patches" - -# Step 3: Extract base patches -run_cmd "python -m engine.cli.auto_rebase extract-base --out $WORKDIR/base_patch --git-patch $WORKDIR/base.patch --patch-dir $WORKDIR/base_patches --verbose" \ - "Extract Base Patches" - -# Step 4: Retarget feature to new base -run_cmd "python -m engine.cli.auto_rebase retarget --feature-patch $WORKDIR/feature_patch/feature_patch.json --base-patch $WORKDIR/base_patch/base_patch.json --new-base $NEW_BASE --out $WORKDIR/feature-5.1 --patch-dir $WORKDIR/feature_patches --verbose" \ - "Retarget Feature to New Base" - -# Step 5: Validate the results -run_cmd "python -m engine.cli.auto_rebase validate --path $WORKDIR/feature-5.1 --report $WORKDIR/report.html --verbose" \ - "Validate Results" - -# Step 6: Finalize with git tag -run_cmd "python -m engine.cli.auto_rebase finalize --path $WORKDIR/feature-5.1 --tag v5.1 --trace $WORKDIR/trace.json --verbose" \ - "Finalize with Git Tag" - -# Summary -print_step "Workflow Summary" -echo "๐Ÿ“ Workdir: $WORKDIR" -echo "๐Ÿ“„ Reports:" -echo " - HTML Report: $WORKDIR/report.html" -echo " - JSON Report: $WORKDIR/report.json" -echo " - Trace: $WORKDIR/trace.json" -echo " - Retarget Results: $WORKDIR/feature-5.1/retarget_results.json" -echo -echo "๐Ÿ“ฆ Generated Patches:" -echo " - Base patches: $WORKDIR/base_patches/" -echo " - Feature patches: $WORKDIR/feature_patches/" -echo " - Combined patches: $WORKDIR/base.patch, $WORKDIR/feature.patch" -echo -echo "๐ŸŽฏ Final Feature: $WORKDIR/feature-5.1/" - -# Optional: Open the HTML report -if command -v open &> /dev/null; then - echo - read -p "Open HTML report in browser? (y/n): " -n 1 -r - echo - if [[ $REPLY =~ ^[Yy]$ ]]; then - open "$WORKDIR/report.html" - fi -fi diff --git a/server/api/README.md b/server/api/README.md deleted file mode 100644 index 5901880..0000000 --- a/server/api/README.md +++ /dev/null @@ -1,13 +0,0 @@ -Server API (FastAPI) - -- Run: `uvicorn server.api.main:app --reload` -- Endpoints: - - POST /runs - - GET /runs - - GET /runs/{id} - - POST /uploads - -Storage providers: -- sqlite_store.py: JSON-file backed demo -- convex_store.py: stubbed unless CONVEX_URL is set - diff --git a/server/api/main.py b/server/api/main.py deleted file mode 100644 index f6e0e44..0000000 --- a/server/api/main.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import annotations - -from fastapi import FastAPI - -from .routers import runs, uploads - -app = FastAPI(title="auto-rebase API") - -app.include_router(runs.router) -app.include_router(uploads.router) - diff --git a/server/api/models.py b/server/api/models.py deleted file mode 100644 index 292d2a6..0000000 --- a/server/api/models.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass -from datetime import datetime -from typing import Optional - - -@dataclass -class Run: - id: str - status: str - created_at: str - old_base: str - new_base: str - feature_old: str - req_map: str - artifacts_path: str - diff --git a/server/api/requirements.txt b/server/api/requirements.txt deleted file mode 100644 index fce9eec..0000000 --- a/server/api/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -fastapi==0.112.2 -uvicorn==0.30.6 -pydantic==2.9.2 -python-multipart==0.0.9 - diff --git a/server/api/routers/runs.py b/server/api/routers/runs.py deleted file mode 100644 index 43f9565..0000000 --- a/server/api/routers/runs.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import annotations - -from datetime import datetime -from pathlib import Path -from typing import Any, Dict - -from fastapi import APIRouter - -from ..storage.sqlite_store import SQLiteStore - - -router = APIRouter(prefix="/runs", tags=["runs"]) -store = SQLiteStore(Path("artifacts/store")) - - -@router.get("") -def list_runs() -> list[dict[str, Any]]: - return [r.__dict__ for r in store.list_runs()] - - -@router.post("") -def create_run(meta: dict[str, Any]) -> dict[str, Any]: - run_id = datetime.utcnow().strftime("%Y%m%dT%H%M%S") - rec = { - "id": run_id, - "status": "created", - "created_at": datetime.utcnow().isoformat() + "Z", - **meta, - } - store.create_run(rec) - return rec - - -@router.get("/{run_id}") -def get_run(run_id: str) -> dict[str, Any] | None: - r = store.get_run(run_id) - return r.__dict__ if r else None - diff --git a/server/api/routers/uploads.py b/server/api/routers/uploads.py deleted file mode 100644 index 135d50b..0000000 --- a/server/api/routers/uploads.py +++ /dev/null @@ -1,19 +0,0 @@ -from __future__ import annotations - -from pathlib import Path -from typing import Any - -from fastapi import APIRouter, UploadFile - - -router = APIRouter(prefix="/uploads", tags=["uploads"]) - - -@router.post("") -async def upload(file: UploadFile) -> dict[str, Any]: - root = Path("artifacts/uploads") - root.mkdir(parents=True, exist_ok=True) - out = root / file.filename - out.write_bytes(await file.read()) - return {"path": str(out)} - diff --git a/server/api/storage/base.py b/server/api/storage/base.py deleted file mode 100644 index 21f60c0..0000000 --- a/server/api/storage/base.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -from abc import ABC, abstractmethod -from typing import Any, Dict, List, Optional - -from ..models import Run - - -class Storage(ABC): - @abstractmethod - def create_run(self, meta: Dict[str, Any]) -> Run: ... - - @abstractmethod - def list_runs(self) -> List[Run]: ... - - @abstractmethod - def get_run(self, run_id: str) -> Optional[Run]: ... - diff --git a/server/api/storage/convex_store.py b/server/api/storage/convex_store.py deleted file mode 100644 index aecc9d4..0000000 --- a/server/api/storage/convex_store.py +++ /dev/null @@ -1,33 +0,0 @@ -from __future__ import annotations - -from typing import Any, Dict, List, Optional - -from .base import Storage -from ..models import Run -import os - - -class ConvexStore(Storage): - """Convex provider stubbed unless CONVEX_URL is set.""" - - def __init__(self) -> None: - self.enabled = bool(os.getenv("CONVEX_URL")) - self._inmem: dict[str, Dict[str, Any]] = {} - - def create_run(self, meta: Dict[str, Any]) -> Run: - if not self.enabled: - return Run(**meta) - self._inmem[meta["id"]] = meta - return Run(**meta) - - def list_runs(self) -> List[Run]: - if not self.enabled: - return [] - return [Run(**m) for m in self._inmem.values()] - - def get_run(self, run_id: str) -> Optional[Run]: - if not self.enabled: - return None - m = self._inmem.get(run_id) - return Run(**m) if m else None - diff --git a/server/api/storage/sqlite_store.py b/server/api/storage/sqlite_store.py deleted file mode 100644 index 90c5fcb..0000000 --- a/server/api/storage/sqlite_store.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - -import json -from pathlib import Path -from typing import Any, Dict, List, Optional - -from .base import Storage -from ..models import Run - - -class SQLiteStore(Storage): - """Simple JSON-file backed store for demo fallback.""" - - def __init__(self, root: Path) -> None: - self.root = root - self.path = root / "runs.json" - self.root.mkdir(parents=True, exist_ok=True) - if not self.path.exists(): - self.path.write_text("[]", encoding="utf-8") - - def _load(self) -> List[Dict[str, Any]]: - return json.loads(self.path.read_text(encoding="utf-8")) - - def _save(self, lst: List[Dict[str, Any]]) -> None: - self.path.write_text(json.dumps(lst, indent=2), encoding="utf-8") - - def create_run(self, meta: Dict[str, Any]) -> Run: - lst = self._load() - lst.append(meta) - self._save(lst) - return Run(**meta) - - def list_runs(self) -> List[Run]: - return [Run(**m) for m in self._load()] - - def get_run(self, run_id: str) -> Optional[Run]: - for m in self._load(): - if m["id"] == run_id: - return Run(**m) - return None - diff --git a/test_ai_resolution.py b/test_ai_resolution.py deleted file mode 100644 index 673bce6..0000000 --- a/test_ai_resolution.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python3 -""" -Test script to demonstrate the complete AutoRebase workflow with AI resolution. -This script forces the creation of .rej files and then applies AI resolution. -""" - -import os -import shutil -from pathlib import Path -from engine.core.ai_resolve import resolve_rejects -from engine.core.vcs import apply_patch_dir_with_reject - -def test_ai_resolution(): - """Test the complete AI resolution workflow.""" - - # Setup test environment - test_dir = Path("test_ai_workflow") - if test_dir.exists(): - shutil.rmtree(test_dir) - - # Copy new base to test directory - shutil.copytree("data/sample/base-1.1", test_dir) - - print("๐Ÿš€ Testing AI Resolution Workflow") - print(f"๐Ÿ“ Test directory: {test_dir}") - print() - - # Apply patches with rejection - print("๐Ÿ“‹ Applying feature patches...") - patch_dir = Path("artifacts/test_final_run/feature_patches") - rej_files = apply_patch_dir_with_reject(patch_dir, test_dir, strip=1) - - print(f"๐Ÿ” Found {len(rej_files)} rejection files:") - for rej in rej_files: - print(f" - {rej}") - - if not rej_files: - print("โŒ No rejection files found! This means patches applied successfully.") - print("Let's check what happened...") - - # Check the main.cpp file - main_cpp = test_dir / "src" / "main.cpp" - if main_cpp.exists(): - print(f"\n๐Ÿ“„ Content of {main_cpp}:") - print(main_cpp.read_text()) - - return - - # Show rejection content - print("\n๐Ÿ“‹ Rejection file content:") - for rej in rej_files: - print(f"\n--- {rej} ---") - print(rej.read_text()) - - # Apply AI resolution - print("\n๐Ÿค– Applying AI resolution...") - requirements = ["Feature: While calling API we need to pass 200 as input"] - remaining = resolve_rejects(rej_files, requirements) - - print(f"โœ… Resolution complete. Remaining rejects: {len(remaining)}") - - # Show final result - print("\n๐ŸŽฏ Final result:") - main_cpp = test_dir / "src" / "main.cpp" - if main_cpp.exists(): - print(f"\n๐Ÿ“„ Content of {main_cpp}:") - print(main_cpp.read_text()) - - # Cleanup - shutil.rmtree(test_dir) - print("\n๐Ÿงน Cleanup complete") - -if __name__ == "__main__": - test_ai_resolution() diff --git a/test_ai_workflow/configs/config.json b/test_ai_workflow/configs/config.json deleted file mode 100644 index a365f3a..0000000 --- a/test_ai_workflow/configs/config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "camera": { - "rvcs": { - "timeout": 100 - } - } -} - diff --git a/test_ai_workflow/configs/settings.yaml b/test_ai_workflow/configs/settings.yaml deleted file mode 100644 index 0c0b8c6..0000000 --- a/test_ai_workflow/configs/settings.yaml +++ /dev/null @@ -1,4 +0,0 @@ -feature: - enabled: false - default_y: true - diff --git a/test_ai_workflow/dts/device.dtsi b/test_ai_workflow/dts/device.dtsi deleted file mode 100644 index 1ee7f36..0000000 --- a/test_ai_workflow/dts/device.dtsi +++ /dev/null @@ -1,13 +0,0 @@ -/dts-v1/; - -/ { - model = "Demo-Board"; - soc { - camera@0 { - compatible = "demo,camera-rvcs"; - status = "okay"; - timeout-ms = <100>; - }; - }; -}; - diff --git a/test_ai_workflow/src/main.cpp b/test_ai_workflow/src/main.cpp deleted file mode 100644 index 7ab3c8d..0000000 --- a/test_ai_workflow/src/main.cpp +++ /dev/null @@ -1,11 +0,0 @@ -#include - -void NewAPI(int v) { - std::cout << "NewAPI: " << v << std::endl; -} - -int main() { - NewAPI(42); - return 0; -} - diff --git a/test_apply/test_feature_final.patch b/test_apply/test_feature_final.patch deleted file mode 100644 index f82cf62..0000000 --- a/test_apply/test_feature_final.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- src/main.cpp 2025-09-13 12:12:50 -+++ src/main.cpp 2025-09-13 12:13:08 -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/test_feature.patch b/test_feature.patch deleted file mode 100644 index 8238f0b..0000000 --- a/test_feature.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- data/sample/base-1.0/src/main.cpp 2025-09-13 12:12:50 -+++ data/sample/feature-5.0/src/main.cpp 2025-09-13 12:13:08 -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/test_feature_correct.patch b/test_feature_correct.patch deleted file mode 100644 index f82cf62..0000000 --- a/test_feature_correct.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- src/main.cpp 2025-09-13 12:12:50 -+++ src/main.cpp 2025-09-13 12:13:08 -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/test_feature_fixed.patch b/test_feature_fixed.patch deleted file mode 100644 index f82cf62..0000000 --- a/test_feature_fixed.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- src/main.cpp 2025-09-13 12:12:50 -+++ src/main.cpp 2025-09-13 12:13:08 -@@ -5,7 +5,9 @@ - } - - int main() { -- OldAPI(42); -+ // Feature customization: different value and extra log -+ std::cout << "Feature activated" << std::endl; -+ OldAPI(200); - return 0; - } - diff --git a/tests/test_adapters.py b/tests/test_adapters.py deleted file mode 100644 index 91679f4..0000000 --- a/tests/test_adapters.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import annotations - -from pathlib import Path - -from engine.core import feature_extract, base_extract - - -ROOT = Path(__file__).resolve().parents[1] - - -def test_feature_extract_smoke(): - """Test feature extraction using git patches.""" - old_base = ROOT / "data/sample-base-sw_1.0" - feature = ROOT / "data/sample-feature-sw_5.0" - req_map = ROOT / "data/sample/requirements_map.yaml" - - units = feature_extract.extract_feature(old_base, feature, req_map) - assert isinstance(units, list) - assert len(units) >= 1 - - # Check that each unit has the expected structure - for unit in units: - assert "file_path" in unit - assert "patch_content" in unit - assert "req_ids" in unit - assert "requirements" in unit - - -def test_base_extract_smoke(): - """Test base extraction using git patches.""" - old_base = ROOT / "data/sample-base-sw_1.0" - new_base = ROOT / "data/sample-base-sw_1.1" - - delta = base_extract.extract_base(old_base, new_base) - assert isinstance(delta, dict) - assert "git_patches" in delta - assert isinstance(delta["git_patches"], dict) - diff --git a/tests/test_end_to_end.py b/tests/test_end_to_end.py deleted file mode 100644 index 0fa35d4..0000000 --- a/tests/test_end_to_end.py +++ /dev/null @@ -1,49 +0,0 @@ -from __future__ import annotations - -import json -import subprocess -from pathlib import Path - - -ROOT = Path(__file__).resolve().parents[1] - - -def run_cli(args: list[str]) -> subprocess.CompletedProcess[str]: - return subprocess.run(["python", "-m", "engine.cli.auto_rebase", *args], cwd=ROOT, text=True, capture_output=True, check=True) - - -def test_e2e_flow(tmp_path: Path): - art = ROOT / "artifacts" / "run1" - art.mkdir(parents=True, exist_ok=True) - - run_cli(["init", "--old-base", "data/sample/base-1.0", "--new-base", "data/sample/base-1.1", "--feature", "data/sample/feature-5.0", "--req-map", "data/sample/requirements_map.yaml", "--workdir", str(art)]) - - fp = art / "feature_patch" - bp = art / "base_patch" - out = art / "feature-5.1" - - run_cli(["extract-feature", "--out", str(fp)]) - run_cli(["extract-base", "--out", str(bp)]) - - feature_patch = fp / "feature_patch.json" - base_patch = bp / "base_patch.json" - assert feature_patch.exists() - assert base_patch.exists() - - run_cli(["retarget", "--feature-patch", str(feature_patch), "--base-patch", str(base_patch), "--new-base", "data/sample/base-1.1", "--out", str(out)]) - - assert (out / "retarget_results.json").exists() - - # Validate and report - report_html = out / "../report.html" - run_cli(["validate", "--path", str(out), "--report", str(report_html)]) - report_json = report_html.with_suffix(".json") - assert report_html.exists() - assert report_json.exists() - - # Load and check schema-ish keys - report = json.loads(report_json.read_text(encoding="utf-8")) - assert "summary" in report and "files" in report - # Should have at least one file outcome - assert len(report["files"]) >= 1 - diff --git a/tests/test_traceability.py b/tests/test_traceability.py deleted file mode 100644 index 284724f..0000000 --- a/tests/test_traceability.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from pathlib import Path - -from engine.core.traceability import load_requirements_map, req_ids_for_file - - -ROOT = Path(__file__).resolve().parents[1] - - -def test_mapping_globs_to_ids(): - mappings = load_requirements_map(ROOT / "data/sample/requirements_map.yaml") - ids = req_ids_for_file("src/foo.cpp", mappings) - assert "AD-REQ-201" in ids and "AD-REQ-318" in ids - ids2 = req_ids_for_file("configs/config.json", mappings) - assert "AD-REQ-411" in ids2 - diff --git a/validate_patch_result.py b/validate_patch_result.py deleted file mode 100644 index 16f57dc..0000000 --- a/validate_patch_result.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python3 -""" -Validation script to check if patch application results are correct. -""" - -import os -import shutil -from pathlib import Path -from engine.core.ai_resolve import resolve_rejects - -def validate_and_fix_patch_result(): - """Validate patch results and fix if needed.""" - - # Check the latest run - latest_run = None - for run_dir in Path("artifacts").iterdir(): - if run_dir.is_dir() and run_dir.name.startswith("run"): - latest_run = run_dir - - # Also check for other run patterns - if not latest_run: - for run_dir in Path("artifacts").iterdir(): - if run_dir.is_dir() and "run" in run_dir.name: - latest_run = run_dir - break - - if not latest_run: - print("No runs found in artifacts/") - return - - print(f"๐Ÿ” Checking run: {latest_run}") - - # Check main.cpp - main_cpp = latest_run / "feature-5.1" / "src" / "main.cpp" - if not main_cpp.exists(): - print("โŒ main.cpp not found") - return - - content = main_cpp.read_text() - print(f"๐Ÿ“„ Current main.cpp content:\n{content}") - - # Check if feature customizations are present - has_feature_logging = "Feature activated" in content - has_parameter_200 = "200" in content - uses_new_api = "NewAPI" in content - - print(f"\n๐Ÿ“‹ Validation results:") - print(f" โœ… Has feature logging: {has_feature_logging}") - print(f" โœ… Has parameter 200: {has_parameter_200}") - print(f" โœ… Uses NewAPI: {uses_new_api}") - - if has_feature_logging and has_parameter_200 and uses_new_api: - print("๐ŸŽ‰ All feature customizations are present!") - return - - print("\nโŒ Missing feature customizations. Let's fix this...") - - # Check for .rej files - rej_files = list(latest_run.rglob("*.rej")) - print(f"๐Ÿ” Found {len(rej_files)} rejection files") - - if rej_files: - print("๐Ÿค– Applying AI resolution...") - requirements = ["Feature: While calling API we need to pass 200 as input"] - remaining = resolve_rejects(rej_files, requirements) - - if not remaining: - print("โœ… AI resolution successful!") - # Check the result - new_content = main_cpp.read_text() - print(f"๐Ÿ“„ Updated main.cpp content:\n{new_content}") - else: - print(f"โŒ AI resolution failed. Remaining rejects: {len(remaining)}") - else: - print("โŒ No rejection files found. Manual intervention needed.") - -if __name__ == "__main__": - validate_and_fix_patch_result() diff --git a/web/next.config.js b/web/next.config.js deleted file mode 100644 index ab155f2..0000000 --- a/web/next.config.js +++ /dev/null @@ -1,6 +0,0 @@ -/** @type {import('next').NextConfig} */ -const nextConfig = { - output: 'standalone', -}; -module.exports = nextConfig; - diff --git a/web/package.json b/web/package.json deleted file mode 100644 index 44118e9..0000000 --- a/web/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "auto-rebase-web", - "version": "0.1.0", - "private": true, - "scripts": { - "dev": "next dev", - "build": "next build", - "start": "next start" - }, - "dependencies": { - "next": "14.2.11", - "react": "18.3.1", - "react-dom": "18.3.1" - }, - "devDependencies": { - "typescript": "5.6.2" - } -} - diff --git a/web/src/app/api/runs/route.ts b/web/src/app/api/runs/route.ts deleted file mode 100644 index 83601eb..0000000 --- a/web/src/app/api/runs/route.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { NextResponse } from "next/server"; - -export async function GET() { - return NextResponse.json({ runs: [] }); -} - diff --git a/web/src/app/page.tsx b/web/src/app/page.tsx deleted file mode 100644 index 1604fe4..0000000 --- a/web/src/app/page.tsx +++ /dev/null @@ -1,9 +0,0 @@ -export default function Page() { - return ( -
-

auto-rebase dashboard

-

Upload archives and run auto-rebase (stub UI).

-
- ); -} - diff --git a/web/src/components/ReportViewer.tsx b/web/src/components/ReportViewer.tsx deleted file mode 100644 index bca0988..0000000 --- a/web/src/components/ReportViewer.tsx +++ /dev/null @@ -1,7 +0,0 @@ -type Props = { src: string }; -export default function ReportViewer({ src }: Props) { - return ( -