diff --git a/__init__.py b/__init__.py deleted file mode 100644 index a9cf49f..0000000 --- a/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -""" -REloadAI - Automated binary analysis and exploit generation with AI -""" - -__version__ = "2.0.0" - -# CLI entrypoint -def cli_main(): - """Invoke the CLI entrypoint.""" - from .cli.reloadai_cli import main - return main() - -# Core engines -from src.modules.static_analysis.static_analyzer import BinaryAnalyzer -from src.modules.dynamic_analysis.dynamic_analyzer import DynamicAnalyzer -from src.modules.exploit_development.exploit_generator import ExploitGenerator -from .core.fingerprints import ssdeep_hash, imphash -from .core.packers import detect as detect_packer -from .core.cfg_visualizer import CFGVisualizer -from .core.binary_differ import BinaryDiffer -from .core.ctf_solver import CTFSolver -from .core.malware_generator import MalwareGenerator - -__all__ = [ - "__version__", - "cli_main", - "BinaryAnalyzer", - "DynamicAnalyzer", - "ExploitGenerator", - "ssdeep_hash", - "imphash", - "detect_packer", - "CFGVisualizer", - "BinaryDiffer", - "CTFSolver", - "MalwareGenerator", -] diff --git a/reloadai/__init__.py b/reloadai/__init__.py new file mode 100644 index 0000000..08f1522 --- /dev/null +++ b/reloadai/__init__.py @@ -0,0 +1,57 @@ +"""REloadAI - Automated binary analysis and exploit generation with AI""" + +__version__ = "2.0.0" + +def cli_main(): + """Invoke the CLI entrypoint.""" + from cli.reloadai_cli import main + return main() + +from src.modules.static_analysis.static_analyzer import ( + BinaryAnalyzer, + BinaryAnalysisError, + SecurityError, +) + +try: # Optional heavy dependencies + from src.modules.dynamic_analysis.dynamic_analyzer import DynamicAnalyzer +except Exception: # pragma: no cover - optional + DynamicAnalyzer = None + +try: + from src.modules.exploit_development.exploit_generator import ExploitGenerator +except Exception: # pragma: no cover - optional + ExploitGenerator = None + +try: + from core.fingerprints import ssdeep_hash, imphash + from core.packers import detect as detect_packer + from core.cfg_visualizer import CFGVisualizer + from core.binary_differ import BinaryDiffer + from core.ctf_solver import CTFSolver + from core.malware_generator import MalwareGenerator +except Exception: # pragma: no cover - optional + ssdeep_hash = imphash = detect_packer = CFGVisualizer = None + BinaryDiffer = CTFSolver = MalwareGenerator = None + +__all__ = [ + "__version__", + "cli_main", + "BinaryAnalyzer", + "BinaryAnalysisError", + "SecurityError", +] +if DynamicAnalyzer: + __all__.append("DynamicAnalyzer") +if ExploitGenerator: + __all__.append("ExploitGenerator") +if ssdeep_hash: + __all__.extend([ + "ssdeep_hash", + "imphash", + "detect_packer", + "CFGVisualizer", + "BinaryDiffer", + "CTFSolver", + "MalwareGenerator", + ]) diff --git a/reloadai/core/__init__.py b/reloadai/core/__init__.py new file mode 100644 index 0000000..f42fb21 --- /dev/null +++ b/reloadai/core/__init__.py @@ -0,0 +1,3 @@ +"""Core utilities exposed under the :mod:`reloadai.core` namespace.""" + +__all__: list[str] = [] diff --git a/reloadai/core/analyzer.py b/reloadai/core/analyzer.py new file mode 100644 index 0000000..38693c5 --- /dev/null +++ b/reloadai/core/analyzer.py @@ -0,0 +1,33 @@ +"""Compatibility wrapper for static analysis utilities.""" +from src.modules.static_analysis.static_analyzer import ( + BinaryAnalyzer, + BinaryAnalysisError, + SecurityError, +) + + +def analyze_static(binary_path: str, deep: bool = False, openai_key: str = "") -> dict: + """Run a radare2-based static analysis on ``binary_path``. + + Parameters + ---------- + binary_path: str + Path to the binary to inspect. + deep: bool + Enable a deeper ``aaa`` analysis in radare2. + openai_key: str + API key for optional GPT-assisted features. + """ + analyzer = BinaryAnalyzer(binary_path, openai_key) + try: + return analyzer.analyze_static_details(deep=deep) + finally: + analyzer.close() + + +__all__ = [ + "analyze_static", + "BinaryAnalyzer", + "BinaryAnalysisError", + "SecurityError", +] diff --git a/src/modules/static_analysis/static_analyzer.py b/src/modules/static_analysis/static_analyzer.py index fb4128e..114a680 100644 --- a/src/modules/static_analysis/static_analyzer.py +++ b/src/modules/static_analysis/static_analyzer.py @@ -2,6 +2,7 @@ import json import os import re +import subprocess from typing import Dict, List, Optional, Tuple from pathlib import Path import openai @@ -39,11 +40,16 @@ class BinaryAnalyzer: def __init__(self, binary_path: str, openai_key: str): self.binary_path = binary_path self.filename = Path(binary_path).name + self.analysis_tool = "radare2" try: self.r2 = r2pipe.open(binary_path) except Exception as e: log_message(f"Failed to open r2pipe for {binary_path}: {e}") - raise BinaryAnalysisError(f"r2pipe open failed: {e}") + self.r2 = None + self.analysis_tool = "ghidra" + self._init_ghidra() + if self.r2 is None: + raise BinaryAnalysisError(f"r2pipe open failed: {e}") if openai_key: openai.api_key = openai_key @@ -65,6 +71,15 @@ def __init__(self, binary_path: str, openai_key: str): } # self.log = log_message # Assigning the placeholder logger + def _init_ghidra(self) -> None: + """Placeholder for initializing a Ghidra fallback.""" + ghidra_home = os.getenv("GHIDRA_HOME") + if not ghidra_home or not Path(ghidra_home).exists(): + log_message("Ghidra not found. Set GHIDRA_HOME to enable fallback analysis.") + return + # Fallback implementation would spawn Ghidra headless analysis here. + log_message("Ghidra integration is not yet implemented.") + def close(self): """Closes the r2pipe connection.""" if self.r2: @@ -251,8 +266,37 @@ def generate_insights(self) -> List[str]: return insights def generate_learning_notes(self) -> List[Dict]: - notes = [] - # ... (add learning notes generation as in the original class, using self.results) ... + notes: List[Dict] = [] + fi = self.results.get('file_info', {}) + if fi: + notes.append({ + 'topic': 'Architecture', + 'note': f"Binary '{fi.get('name')}' targets {fi.get('arch')}-{fi.get('bits')} which affects calling conventions and gadget availability." + }) + + cs = self.results.get('checksec', {}) + explanations = { + 'NX enabled': 'Non-executable stack makes shellcode injection harder.', + 'NX disabled': 'Executable stack allows direct shellcode execution.', + 'PIE enabled': 'Addresses are randomized each run; leaks are usually required.', + 'PIE disabled': 'Fixed addresses simplify ROP chains.', + 'Stack canary found': 'Canaries detect simple stack overflows.', + 'No stack canary': 'Lack of canaries means classic BoF attacks may succeed.', + 'Full RELRO': 'GOT is read-only; overwriting entries is difficult.', + 'Partial RELRO': 'Only part of GOT is protected.', + 'No RELRO': 'GOT overwrites are viable attack vectors.' + } + for feature, value in cs.items(): + explanation = explanations.get(value) + if explanation: + notes.append({'topic': f'security:{feature}', 'note': explanation}) + + for s in self.results.get('strings', []): + notes.append({ + 'topic': 'string', + 'note': f"Suspicious string '{s.get('string')}' matches pattern '{s.get('pattern')}' and may reveal credentials, commands or flags." + }) + self.results['learning_notes'] = notes return notes