diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml index 05f9dd8a..afa117c4 100644 --- a/.github/workflows/autofix.yml +++ b/.github/workflows/autofix.yml @@ -22,10 +22,10 @@ jobs: with: python-version: "3.11" - - name: Install formatters + - name: Install dependencies run: | python -m pip install -U pip - pip install ruff black isort + pip install -e ".[dev]" - name: Fix with ruff run: ruff check . --fix || true diff --git a/.github/workflows/automation.yml b/.github/workflows/automation.yml deleted file mode 100644 index faadc048..00000000 --- a/.github/workflows/automation.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: Cortex Automation - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -permissions: - contents: read - pull-requests: read - -jobs: - test: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.10', '3.11', '3.12'] - - steps: - - uses: actions/checkout@v4 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -e ".[dev]" - - - name: Run tests - env: - ANTHROPIC_API_KEY: "test-key-for-ci" - OPENAI_API_KEY: "test-key-for-ci" - run: | - python -m pytest tests/ -v --cov=cortex --cov-report=xml --cov-report=term-missing --timeout=60 - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v5 - if: matrix.python-version == '3.11' - with: - file: ./coverage.xml - fail_ci_if_error: false - - lint: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: '3.11' - - - name: Install linting tools - run: | - python -m pip install --upgrade pip - pip install ruff black mypy - - - name: Lint with ruff - run: | - ruff check . --output-format=github - - - name: Check formatting with black - run: | - black --check . --exclude "(venv|\.venv|build|dist)" - - - name: Type check with mypy - run: | - mypy cortex --ignore-missing-imports --no-error-summary || true - continue-on-error: true - - security: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: '3.11' - - - name: Install security tools - run: | - python -m pip install --upgrade pip - pip install bandit safety - - - name: Run Bandit security linter - run: | - bandit -r cortex/ -ll -ii || echo "::warning::Security issues found. Please review." - - - name: Check dependencies with safety - run: | - pip install -e ".[dev]" - safety check --full-report || echo "::warning::Vulnerable dependencies found." diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b2fe27bb..e9a10a9f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,10 +23,10 @@ jobs: with: python-version: "3.11" - - name: Install linting tools + - name: Install dependencies run: | python -m pip install -U pip - pip install ruff black mypy + pip install -e ".[dev]" - name: Lint with ruff run: ruff check . --output-format=github @@ -104,18 +104,16 @@ jobs: with: python-version: "3.11" - - name: Install security tools + - name: Install dependencies run: | python -m pip install -U pip - pip install bandit safety + pip install -e ".[security]" - name: Run bandit security scan run: bandit -r cortex -ll -ii --format json --output bandit-report.json || true - name: Check for known vulnerabilities - run: | - pip install -e . - safety check --json --output safety-report.json || true + run: safety check --json --output safety-report.json || true continue-on-error: true - name: Upload security reports @@ -140,10 +138,10 @@ jobs: with: python-version: "3.11" - - name: Install build tools + - name: Install dependencies run: | python -m pip install -U pip - pip install build twine + pip install -e ".[dev]" twine - name: Build package run: python -m build diff --git a/cortex/context_memory.py b/cortex/context_memory.py index 98c8d731..b7e9d1c2 100644 --- a/cortex/context_memory.py +++ b/cortex/context_memory.py @@ -97,8 +97,7 @@ def _init_database(self): cursor = conn.cursor() # Memory entries table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS memory_entries ( id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp TEXT NOT NULL, @@ -112,12 +111,10 @@ def _init_database(self): metadata TEXT, created_at TEXT DEFAULT CURRENT_TIMESTAMP ) - """ - ) + """) # Patterns table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS patterns ( pattern_id TEXT PRIMARY KEY, pattern_type TEXT NOT NULL, @@ -129,12 +126,10 @@ def _init_database(self): context TEXT, created_at TEXT DEFAULT CURRENT_TIMESTAMP ) - """ - ) + """) # Suggestions table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS suggestions ( suggestion_id TEXT PRIMARY KEY, suggestion_type TEXT NOT NULL, @@ -145,20 +140,17 @@ def _init_database(self): created_at TEXT DEFAULT CURRENT_TIMESTAMP, dismissed BOOLEAN DEFAULT 0 ) - """ - ) + """) # User preferences table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS preferences ( key TEXT PRIMARY KEY, value TEXT, category TEXT, updated_at TEXT DEFAULT CURRENT_TIMESTAMP ) - """ - ) + """) # Create indexes for performance cursor.execute( @@ -408,14 +400,12 @@ def generate_suggestions(self, context: str = None) -> list[Suggestion]: with self._pool.get_connection() as conn: cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" SELECT * FROM memory_entries WHERE timestamp > datetime('now', '-7 days') ORDER BY timestamp DESC LIMIT 50 - """ - ) + """) recent_entries = [self._row_to_memory_entry(row) for row in cursor.fetchall()] @@ -632,23 +622,19 @@ def get_statistics(self) -> dict[str, Any]: stats["total_entries"] = cursor.fetchone()[0] # Entries by category - cursor.execute( - """ + cursor.execute(""" SELECT category, COUNT(*) FROM memory_entries GROUP BY category - """ - ) + """) stats["by_category"] = dict(cursor.fetchall()) # Success rate - cursor.execute( - """ + cursor.execute(""" SELECT SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as success_rate FROM memory_entries - """ - ) + """) stats["success_rate"] = ( round(cursor.fetchone()[0], 2) if stats["total_entries"] > 0 else 0 ) @@ -662,12 +648,10 @@ def get_statistics(self) -> dict[str, Any]: stats["active_suggestions"] = cursor.fetchone()[0] # Recent activity - cursor.execute( - """ + cursor.execute(""" SELECT COUNT(*) FROM memory_entries WHERE timestamp > datetime('now', '-7 days') - """ - ) + """) stats["recent_activity"] = cursor.fetchone()[0] return stats diff --git a/cortex/first_run_wizard.py b/cortex/first_run_wizard.py index bf8ad5ac..48aa5eae 100644 --- a/cortex/first_run_wizard.py +++ b/cortex/first_run_wizard.py @@ -237,8 +237,7 @@ def _step_welcome(self) -> StepResult: self._clear_screen() self._print_banner() - print( - """ + print(""" Welcome to Cortex Linux! 🚀 Cortex is an AI-powered package manager that understands natural language. @@ -249,8 +248,7 @@ def _step_welcome(self) -> StepResult: $ cortex remove unused packages This wizard will help you set up Cortex in just a few minutes. -""" - ) +""") if self.interactive: response = self._prompt("Press Enter to continue (or 'q' to quit): ") @@ -264,16 +262,14 @@ def _step_api_setup(self) -> StepResult: self._clear_screen() self._print_header("Step 1: API Configuration") - print( - """ + print(""" Cortex uses AI to understand your commands. You can use: 1. Claude API (Anthropic) - Recommended 2. OpenAI API 3. Local LLM (Ollama) - Free, runs on your machine 4. Skip for now (limited functionality) -""" - ) +""") # Check for existing API keys existing_claude = os.environ.get("ANTHROPIC_API_KEY") @@ -708,8 +704,7 @@ def _step_complete(self) -> StepResult: # Save all config self.save_config() - print( - """ + print(""" Cortex is ready to use! Here are some things to try: 📦 Install packages: @@ -729,8 +724,7 @@ def _step_complete(self) -> StepResult: 📖 Get help: cortex help -""" - ) +""") # Show configuration summary print("Configuration Summary:") diff --git a/cortex/graceful_degradation.py b/cortex/graceful_degradation.py index b5b607c1..073d134c 100644 --- a/cortex/graceful_degradation.py +++ b/cortex/graceful_degradation.py @@ -81,8 +81,7 @@ def _init_db(self): """Initialize the cache database.""" self._pool = get_connection_pool(str(self.db_path), pool_size=5) with self._pool.get_connection() as conn: - conn.execute( - """ + conn.execute(""" CREATE TABLE IF NOT EXISTS response_cache ( query_hash TEXT PRIMARY KEY, query TEXT NOT NULL, @@ -91,14 +90,11 @@ def _init_db(self): hit_count INTEGER DEFAULT 0, last_used TIMESTAMP ) - """ - ) - conn.execute( - """ + """) + conn.execute(""" CREATE INDEX IF NOT EXISTS idx_last_used ON response_cache(last_used) - """ - ) + """) conn.commit() def _hash_query(self, query: str) -> str: diff --git a/cortex/installation_history.py b/cortex/installation_history.py index 61c559fd..38716f85 100644 --- a/cortex/installation_history.py +++ b/cortex/installation_history.py @@ -105,8 +105,7 @@ def _init_database(self): cursor = conn.cursor() # Create installations table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS installations ( id TEXT PRIMARY KEY, timestamp TEXT NOT NULL, @@ -120,16 +119,13 @@ def _init_database(self): rollback_available INTEGER, duration_seconds REAL ) - """ - ) + """) # Create index on timestamp - cursor.execute( - """ + cursor.execute(""" CREATE INDEX IF NOT EXISTS idx_timestamp ON installations(timestamp) - """ - ) + """) conn.commit() diff --git a/cortex/kernel_features/kv_cache_manager.py b/cortex/kernel_features/kv_cache_manager.py index 04d0bb89..59314620 100644 --- a/cortex/kernel_features/kv_cache_manager.py +++ b/cortex/kernel_features/kv_cache_manager.py @@ -50,14 +50,12 @@ def __init__(self): CORTEX_DB.parent.mkdir(parents=True, exist_ok=True) self._pool = get_connection_pool(str(CORTEX_DB), pool_size=5) with self._pool.get_connection() as conn: - conn.executescript( - """ + conn.executescript(""" CREATE TABLE IF NOT EXISTS pools (name TEXT PRIMARY KEY, config TEXT, shm_name TEXT); CREATE TABLE IF NOT EXISTS entries (seq_id INTEGER, pool TEXT, created REAL, accessed REAL, count INTEGER, tokens INTEGER, size INTEGER, offset INTEGER, PRIMARY KEY(seq_id, pool)); CREATE TABLE IF NOT EXISTS stats (pool TEXT PRIMARY KEY, hits INTEGER DEFAULT 0, misses INTEGER DEFAULT 0); - """ - ) + """) def save_pool(self, cfg: CacheConfig, shm: str): with self._pool.get_connection() as conn: diff --git a/cortex/kernel_features/model_lifecycle.py b/cortex/kernel_features/model_lifecycle.py index d0460b7f..8e2d6f9e 100644 --- a/cortex/kernel_features/model_lifecycle.py +++ b/cortex/kernel_features/model_lifecycle.py @@ -46,15 +46,13 @@ def __init__(self): def _init_db(self): with sqlite3.connect(CORTEX_DB_PATH) as conn: - conn.execute( - """ + conn.execute(""" CREATE TABLE IF NOT EXISTS models ( name TEXT PRIMARY KEY, config TEXT NOT NULL, created_at TEXT NOT NULL ) - """ - ) + """) def save_model(self, config: ModelConfig): with sqlite3.connect(CORTEX_DB_PATH) as conn: diff --git a/cortex/licensing.py b/cortex/licensing.py index 714832f1..d8d87693 100644 --- a/cortex/licensing.py +++ b/cortex/licensing.py @@ -201,8 +201,7 @@ def show_upgrade_prompt(feature: str, required_tier: str) -> None: price = "$20" if required_tier == FeatureTier.PRO else "$99" - print( - f""" + print(f""" ┌─────────────────────────────────────────────────────────┐ │ ⚡ UPGRADE REQUIRED │ ├─────────────────────────────────────────────────────────┤ @@ -216,8 +215,7 @@ def show_upgrade_prompt(feature: str, required_tier: str) -> None: │ 🌐 {PRICING_URL} │ │ └─────────────────────────────────────────────────────────┘ -""" - ) +""") def show_license_status() -> None: @@ -230,14 +228,12 @@ def show_license_status() -> None: FeatureTier.ENTERPRISE: "yellow", } - print( - f""" + print(f""" ┌─────────────────────────────────────────────────────────┐ │ CORTEX LICENSE STATUS │ ├─────────────────────────────────────────────────────────┤ │ Tier: {info.tier.upper():12} │ -│ Status: {"ACTIVE" if info.valid else "EXPIRED":12} │""" - ) +│ Status: {"ACTIVE" if info.valid else "EXPIRED":12} │""") if info.organization: print(f"│ Organization: {info.organization[:12]:12} │") diff --git a/cortex/role_manager.py b/cortex/role_manager.py index de95fe98..11269d28 100644 --- a/cortex/role_manager.py +++ b/cortex/role_manager.py @@ -308,12 +308,10 @@ def modifier(existing_content: str, key: str, value: str) -> str: try: # Use self.history_db defined in __init__ for consistency. with sqlite3.connect(self.history_db) as conn: - conn.execute( - """ + conn.execute(""" CREATE TABLE IF NOT EXISTS role_changes (timestamp TEXT, key TEXT, old_value TEXT, new_value TEXT) - """ - ) + """) conn.execute( "INSERT INTO role_changes VALUES (?, ?, ?, ?)", ( diff --git a/cortex/semantic_cache.py b/cortex/semantic_cache.py index 21ef935b..4dd8d75d 100644 --- a/cortex/semantic_cache.py +++ b/cortex/semantic_cache.py @@ -94,8 +94,7 @@ def _init_database(self) -> None: with self._pool.get_connection() as conn: cur = conn.cursor() - cur.execute( - """ + cur.execute(""" CREATE TABLE IF NOT EXISTS llm_cache_entries ( id INTEGER PRIMARY KEY AUTOINCREMENT, provider TEXT NOT NULL, @@ -109,29 +108,22 @@ def _init_database(self) -> None: last_accessed TEXT NOT NULL, hit_count INTEGER NOT NULL DEFAULT 0 ) - """ - ) - cur.execute( - """ + """) + cur.execute(""" CREATE UNIQUE INDEX IF NOT EXISTS idx_llm_cache_unique ON llm_cache_entries(provider, model, system_hash, prompt_hash) - """ - ) - cur.execute( - """ + """) + cur.execute(""" CREATE INDEX IF NOT EXISTS idx_llm_cache_lru ON llm_cache_entries(last_accessed) - """ - ) - cur.execute( - """ + """) + cur.execute(""" CREATE TABLE IF NOT EXISTS llm_cache_stats ( id INTEGER PRIMARY KEY CHECK (id = 1), hits INTEGER NOT NULL DEFAULT 0, misses INTEGER NOT NULL DEFAULT 0 ) - """ - ) + """) cur.execute("INSERT OR IGNORE INTO llm_cache_stats(id, hits, misses) VALUES (1, 0, 0)") conn.commit() diff --git a/cortex/transaction_history.py b/cortex/transaction_history.py index 6bc22843..eaa6ecd0 100644 --- a/cortex/transaction_history.py +++ b/cortex/transaction_history.py @@ -153,8 +153,7 @@ def __init__(self, db_path: Path | None = None): def _init_db(self): """Initialize the database schema.""" with sqlite3.connect(self.db_path) as conn: - conn.execute( - """ + conn.execute(""" CREATE TABLE IF NOT EXISTS transactions ( id TEXT PRIMARY KEY, transaction_type TEXT NOT NULL, @@ -171,22 +170,17 @@ def _init_db(self): is_rollback_safe INTEGER, rollback_warning TEXT ) - """ - ) + """) - conn.execute( - """ + conn.execute(""" CREATE INDEX IF NOT EXISTS idx_timestamp ON transactions(timestamp DESC) - """ - ) + """) - conn.execute( - """ + conn.execute(""" CREATE INDEX IF NOT EXISTS idx_status ON transactions(status) - """ - ) + """) conn.commit() diff --git a/cortex/uninstall_impact.py b/cortex/uninstall_impact.py index b83cd02e..ac861ee4 100644 --- a/cortex/uninstall_impact.py +++ b/cortex/uninstall_impact.py @@ -738,7 +738,8 @@ def analyze(self, package_name: str) -> ImpactResult: # Check if essential (only for installed packages) if is_installed and pkg_info and pkg_info.is_essential: result.warnings.append( - f"⚠️ '{package_name}' is marked as ESSENTIAL. " "Removing it may break your system!" + f"⚠️ '{package_name}' is marked as ESSENTIAL. " + "Removing it may break your system!" ) result.severity = ImpactSeverity.CRITICAL result.safe_to_remove = False diff --git a/daemon/scripts/setup_daemon.py b/daemon/scripts/setup_daemon.py index bee1a12d..b7899197 100755 --- a/daemon/scripts/setup_daemon.py +++ b/daemon/scripts/setup_daemon.py @@ -34,8 +34,7 @@ def init_audit_db() -> bool: cursor = conn.cursor() # Create events table if it doesn't exist - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS events ( id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp TEXT NOT NULL, @@ -43,8 +42,7 @@ def init_audit_db() -> bool: details TEXT, success INTEGER DEFAULT 1 ) - """ - ) + """) conn.commit() conn.close() diff --git a/tests/test_context_memory.py b/tests/test_context_memory.py index 8e40f5b7..16033203 100644 --- a/tests/test_context_memory.py +++ b/tests/test_context_memory.py @@ -41,13 +41,11 @@ def test_initialization(self): conn = sqlite3.connect(self.temp_db.name) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" SELECT name FROM sqlite_master WHERE type='table' ORDER BY name - """ - ) + """) tables = [row[0] for row in cursor.fetchall()] self.assertIn("memory_entries", tables) diff --git a/tests/test_role_management.py b/tests/test_role_management.py index 711df2af..348fcfaf 100644 --- a/tests/test_role_management.py +++ b/tests/test_role_management.py @@ -73,15 +73,13 @@ def test_get_system_context_fact_gathering(temp_cortex_dir): db_path = temp_cortex_dir / "history.db" with sqlite3.connect(db_path) as conn: cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE installations ( packages TEXT, status TEXT, timestamp DATETIME ) - """ - ) + """) # Insert a JSON array of packages as a successful installation. cursor.execute( "INSERT INTO installations (packages, status, timestamp) VALUES (?, ?, ?)", diff --git a/tests/test_shell_env_analyzer.py b/tests/test_shell_env_analyzer.py index 9115b4ed..a781b9dd 100644 --- a/tests/test_shell_env_analyzer.py +++ b/tests/test_shell_env_analyzer.py @@ -45,8 +45,7 @@ def temp_dir(): def bash_config(temp_dir): """Create a sample .bashrc file.""" bashrc = temp_dir / ".bashrc" - bashrc.write_text( - """# Sample bashrc + bashrc.write_text("""# Sample bashrc export PATH="/usr/local/bin:$PATH" export EDITOR="vim" export NODE_ENV="development" @@ -54,8 +53,7 @@ def bash_config(temp_dir): # Another PATH modification export PATH="$HOME/bin:$PATH" -""" - ) +""") return bashrc @@ -63,13 +61,11 @@ def bash_config(temp_dir): def zsh_config(temp_dir): """Create a sample .zshrc file.""" zshrc = temp_dir / ".zshrc" - zshrc.write_text( - """# Sample zshrc + zshrc.write_text("""# Sample zshrc export PATH="/opt/homebrew/bin:$PATH" export EDITOR="nvim" export ZSH_THEME="robbyrussell" -""" - ) +""") return zshrc @@ -79,14 +75,12 @@ def fish_config(temp_dir): fish_dir = temp_dir / ".config" / "fish" fish_dir.mkdir(parents=True) config_fish = fish_dir / "config.fish" - config_fish.write_text( - """# Sample fish config + config_fish.write_text("""# Sample fish config set -gx PATH /usr/local/bin $PATH set -gx EDITOR vim set -x NODE_ENV production set fish_greeting "" -""" - ) +""") return config_fish @@ -206,12 +200,10 @@ def test_parse_bash_export(self, bash_config): def test_parse_bash_quoted_values(self, temp_dir): """Test parsing quoted values in bash.""" bashrc = temp_dir / ".bashrc" - bashrc.write_text( - """export SINGLE='single quoted' + bashrc.write_text("""export SINGLE='single quoted' export DOUBLE="double quoted" export UNQUOTED=unquoted -""" - ) +""") parser = ShellConfigParser(shell=Shell.BASH) sources = parser.parse_file(bashrc) @@ -617,12 +609,10 @@ def test_full_audit_workflow(self, temp_dir): home = temp_dir / "home" home.mkdir() bashrc = home / ".bashrc" - bashrc.write_text( - """export PATH="/custom/bin:$PATH" + bashrc.write_text("""export PATH="/custom/bin:$PATH" export EDITOR="vim" export EDITOR="nano" -""" - ) +""") with patch.object(Path, "home", return_value=home): analyzer = ShellEnvironmentAnalyzer(shell=Shell.BASH)