diff --git a/.github/README.md b/.github/README.md
index 621ee40..86d9f69 100644
--- a/.github/README.md
+++ b/.github/README.md
@@ -2,7 +2,10 @@
[](https://codecov.io/gh/ccBittorrent/ccbt)
[](https://ccbittorrent.readthedocs.io/en/reports/bandit/)
-[](../pyproject.toml)
+[](https://github.com/ccBitTorrent/ccbt/actions/workflows/test.yml)
+[](https://github.com/ccBitTorrent/ccbt/actions/workflows/test.yml)
+[](https://github.com/ccBitTorrent/ccbt/actions/workflows/test.yml)
+
[](https://ccbittorrent.readthedocs.io/en/license/)
[](https://ccbittorrent.readthedocs.io/en/contributing/)
[](https://ccbittorrent.readthedocs.io/en/getting-started/)
diff --git a/.github/workflows/build-documentation.yml b/.github/workflows/build-documentation.yml
index e3ef4eb..11982dc 100644
--- a/.github/workflows/build-documentation.yml
+++ b/.github/workflows/build-documentation.yml
@@ -1,6 +1,21 @@
name: Build Documentation
on:
+ push:
+ branches: [main]
+ paths:
+ - 'docs/**'
+ - 'dev/mkdocs.yml'
+ - '.readthedocs.yaml'
+ - 'dev/requirements-rtd.txt'
+ - 'ccbt/**'
+ pull_request:
+ branches: [main]
+ paths:
+ - 'docs/**'
+ - 'dev/mkdocs.yml'
+ - '.readthedocs.yaml'
+ - 'dev/requirements-rtd.txt'
workflow_dispatch:
# Can be triggered manually from any branch for testing
# Documentation is automatically published to Read the Docs when changes are pushed
@@ -94,16 +109,13 @@ jobs:
- name: Generate coverage report
run: |
- uv run pytest -c dev/pytest.ini tests/ --cov=ccbt --cov-report=html:site/reports/htmlcov || echo "⚠️ Coverage report generation failed, continuing..."
+ uv run pytest -c dev/pytest.ini tests/ --cov=ccbt --cov-report=html:site/reports/htmlcov
continue-on-error: true
- - name: Generate Bandit reports
+ - name: Generate Bandit report
run: |
uv run python tests/scripts/ensure_bandit_dir.py
- # Generate main bandit report
- uv run bandit -r ccbt/ -f json -o docs/reports/bandit/bandit-report.json --severity-level medium -x tests,benchmarks,dev,dist,docs,htmlcov,site,.venv,.pre-commit-cache,.pre-commit-home,.pytest_cache,.ruff_cache,.hypothesis,.github,.ccbt,.cursor,.benchmarks || echo "⚠️ Bandit report generation failed"
- # Generate all severity levels report
- uv run bandit -r ccbt/ -f json -o docs/reports/bandit/bandit-report-all.json --severity-level all -x tests,benchmarks,dev,dist,docs,htmlcov,site,.venv,.pre-commit-cache,.pre-commit-home,.pytest_cache,.ruff_cache,.hypothesis,.github,.ccbt,.cursor,.benchmarks || echo "⚠️ Bandit all report generation failed"
+ uv run bandit -r ccbt/ -f json -o docs/reports/bandit/bandit-report.json --severity-level medium -x tests,benchmarks,dev,dist,docs,htmlcov,site,.venv,.pre-commit-cache,.pre-commit-home,.pytest_cache,.ruff_cache,.hypothesis,.github,.ccbt,.cursor,.benchmarks
continue-on-error: true
- name: Ensure report files exist in documentation location
@@ -140,12 +152,19 @@ jobs:
- name: Build documentation
run: |
+ # Ensure coverage directory exists right before build (in case it was cleaned)
+ mkdir -p site/reports/htmlcov
+ if [ ! -f site/reports/htmlcov/index.html ]; then
+ echo '
Coverage Report
Coverage report not available. Run tests to generate coverage data.
' > site/reports/htmlcov/index.html
+ fi
+
# Use the patched build script which includes all necessary patches:
# - i18n plugin fixes (alternates attribute, Locale validation for 'arc')
# - git-revision-date-localized plugin fix for 'arc' locale
+ # - Autorefs plugin patch to suppress multiple primary URLs warnings
+ # - Coverage plugin patch to suppress missing directory warnings
# - All patches are applied before mkdocs is imported
# Set MKDOCS_STRICT=true to enable strict mode in CI
- # Reports are ensured to exist in previous step to avoid warnings
MKDOCS_STRICT=true uv run python dev/build_docs_patched_clean.py
- name: Upload documentation artifact
@@ -155,6 +174,34 @@ jobs:
path: site/
retention-days: 7
+ - name: Trigger Read the Docs build
+ if: env.RTD_API_TOKEN != ''
+ env:
+ RTD_API_TOKEN: ${{ secrets.RTD_API_TOKEN }}
+ RTD_PROJECT_SLUG: ${{ secrets.RTD_PROJECT_SLUG || 'ccbittorrent' }}
+ BRANCH_NAME: ${{ github.ref_name }}
+ run: |
+ echo "Triggering Read the Docs build for branch: $BRANCH_NAME"
+ curl -X POST \
+ -H "Authorization: Token $RTD_API_TOKEN" \
+ -H "Content-Type: application/json" \
+ "https://readthedocs.org/api/v3/projects/$RTD_PROJECT_SLUG/versions/$BRANCH_NAME/builds/" \
+ -d "{}" || echo "⚠️ Failed to trigger Read the Docs build. This may be expected if the branch is not configured in Read the Docs."
+ continue-on-error: true
+
+ - name: Read the Docs build info
+ if: env.RTD_API_TOKEN == ''
+ run: |
+ echo "ℹ️ Read the Docs API token not configured."
+ echo " To enable automatic Read the Docs builds from any branch:"
+ echo " 1. Get your Read the Docs API token from https://readthedocs.org/accounts/token/"
+ echo " 2. Add it as a GitHub secret named RTD_API_TOKEN"
+ echo " 3. Optionally set RTD_PROJECT_SLUG secret (defaults to 'ccbittorrent')"
+ echo ""
+ echo " Note: Read the Docs will only build branches configured in your project settings."
+ echo " By default, only 'main' and 'dev' branches are built automatically."
+
# Note: Documentation is automatically published to Read the Docs
- # when changes are pushed to the repository. No GitHub Pages deployment needed.
+ # when changes are pushed to the repository for configured branches (main/dev by default).
+ # To build other branches, configure them in Read the Docs project settings or use the API trigger above.
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 894dd8c..fd31cc7 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -38,6 +38,10 @@ jobs:
- name: Run Ruff formatting check
run: |
uv run ruff --config dev/ruff.toml format --check ccbt/
+
+ - name: Run compatibility linter
+ run: |
+ uv run python dev/compatibility_linter.py ccbt/
type-check:
name: type-check
diff --git a/.gitignore b/.gitignore
index b51a23d..7cc6e5b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,7 +12,8 @@ MagicMock
.coverage_html
.cursor
scripts
-compatibility_tests/
+compatibility_tests/
+lint_outputs/
# Byte-compiled / optimized / DLL files
__pycache__/
diff --git a/ccbt/cli/advanced_commands.py b/ccbt/cli/advanced_commands.py
index 1cc7453..f85576c 100644
--- a/ccbt/cli/advanced_commands.py
+++ b/ccbt/cli/advanced_commands.py
@@ -11,7 +11,7 @@
import tempfile
import time
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
import click
from rich.console import Console
@@ -36,7 +36,7 @@ class OptimizationPreset:
def _apply_optimizations(
preset: str = OptimizationPreset.BALANCED,
save_to_file: bool = False,
- config_file: str | None = None,
+ config_file: Optional[str] = None,
) -> dict[str, Any]:
"""Apply performance optimizations based on system capabilities.
@@ -248,7 +248,7 @@ def performance(
optimize: bool,
preset: str,
save: bool,
- config_file: str | None,
+ config_file: Optional[str],
benchmark: bool,
profile: bool,
) -> None:
diff --git a/ccbt/cli/checkpoints.py b/ccbt/cli/checkpoints.py
index 6a020be..b9f4de5 100644
--- a/ccbt/cli/checkpoints.py
+++ b/ccbt/cli/checkpoints.py
@@ -9,7 +9,7 @@
import asyncio
import time
from pathlib import Path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
from rich.progress import Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
from rich.table import Table
@@ -236,7 +236,7 @@ def backup_checkpoint(
def restore_checkpoint(
config_manager: ConfigManager,
backup_file: str,
- info_hash: str | None,
+ info_hash: Optional[str],
console: Console,
) -> None:
"""Restore a checkpoint from a backup file."""
diff --git a/ccbt/cli/config_commands.py b/ccbt/cli/config_commands.py
index f2423c3..59557db 100644
--- a/ccbt/cli/config_commands.py
+++ b/ccbt/cli/config_commands.py
@@ -15,6 +15,7 @@
import logging
import os
from pathlib import Path
+from typing import Optional, Union
import click
import toml
@@ -26,7 +27,7 @@
logger = logging.getLogger(__name__)
-def _find_project_root(start_path: Path | None = None) -> Path | None:
+def _find_project_root(start_path: Optional[Path] = None) -> Optional[Path]:
"""Find the project root directory by looking for pyproject.toml or .git.
Walks up the directory tree from start_path (or current directory) until
@@ -56,7 +57,7 @@ def _find_project_root(start_path: Path | None = None) -> Path | None:
def _should_skip_project_local_write(
- config_file: Path | None, explicit_config_file: str | Path | None
+ config_file: Optional[Path], explicit_config_file: Optional[Union[str, Path]]
) -> bool:
"""Check if we should skip writing to project-local ccbt.toml during tests.
@@ -130,9 +131,9 @@ def config():
@click.option("--config", "config_file", type=click.Path(exists=True), default=None)
def show_config(
format_: str,
- section: str | None,
- key: str | None,
- config_file: str | None,
+ section: Optional[str],
+ key: Optional[str],
+ config_file: Optional[str],
):
"""Show current configuration in the desired format."""
cm = ConfigManager(config_file)
@@ -174,7 +175,7 @@ def show_config(
@config.command("get")
@click.argument("key")
@click.option("--config", "config_file", type=click.Path(exists=True), default=None)
-def get_value(key: str, config_file: str | None):
+def get_value(key: str, config_file: Optional[str]):
"""Get a specific configuration value by dotted path."""
cm = ConfigManager(config_file)
data = cm.config.model_dump(mode="json")
@@ -223,9 +224,9 @@ def set_value(
value: str,
global_flag: bool,
local_flag: bool,
- config_file: str | None,
- restart_daemon_flag: bool | None,
- no_restart_daemon_flag: bool | None,
+ config_file: Optional[str],
+ restart_daemon_flag: Optional[bool],
+ no_restart_daemon_flag: Optional[bool],
):
"""Set a configuration value and persist to TOML file.
@@ -325,12 +326,12 @@ def parse_value(raw: str):
help=_("Skip daemon restart even if needed"),
)
def reset_config(
- section: str | None,
- key: str | None,
+ section: Optional[str],
+ key: Optional[str],
confirm: bool,
- config_file: str | None,
- restart_daemon_flag: bool | None,
- no_restart_daemon_flag: bool | None,
+ config_file: Optional[str],
+ restart_daemon_flag: Optional[bool],
+ no_restart_daemon_flag: Optional[bool],
):
"""Reset configuration to defaults (optionally for a section/key)."""
if not confirm:
@@ -399,7 +400,7 @@ def reset_config(
@config.command("validate")
@click.option("--config", "config_file", type=click.Path(exists=True), default=None)
-def validate_config_cmd(config_file: str | None):
+def validate_config_cmd(config_file: Optional[str]):
"""Validate configuration file and print result."""
try:
ConfigManager(config_file)
@@ -414,10 +415,10 @@ def validate_config_cmd(config_file: str | None):
@click.option("--backup", is_flag=True, help=_("Create backup before migration"))
@click.option("--config", "config_file", type=click.Path(exists=True), default=None)
def migrate_config_cmd(
- from_version: str | None, # noqa: ARG001
- to_version: str | None, # noqa: ARG001
+ from_version: Optional[str], # noqa: ARG001
+ to_version: Optional[str], # noqa: ARG001
backup: bool,
- config_file: str | None,
+ config_file: Optional[str],
):
"""Migrate configuration between versions (no-op placeholder)."""
# For now, this is a placeholder that just validates and echoes
diff --git a/ccbt/cli/config_commands_extended.py b/ccbt/cli/config_commands_extended.py
index 924d19a..35f7415 100644
--- a/ccbt/cli/config_commands_extended.py
+++ b/ccbt/cli/config_commands_extended.py
@@ -49,6 +49,7 @@
import logging
import os
from pathlib import Path
+from typing import Optional
import click
import toml
@@ -130,7 +131,7 @@ def config_extended():
help="Specific model to generate schema for (e.g., Config, NetworkConfig)",
)
@click.option("--output", "-o", type=click.Path(), help="Output file path")
-def schema_cmd(format_: str, model: str | None, output: str | None):
+def schema_cmd(format_: str, model: Optional[str], output: Optional[str]):
"""Generate JSON schema for configuration models."""
try:
if model:
@@ -209,10 +210,10 @@ def schema_cmd(format_: str, model: str | None, output: str | None):
def template_cmd(
template_name: str,
apply: bool,
- output: str | None,
- config_file: str | None,
- restart_daemon_flag: bool | None,
- no_restart_daemon_flag: bool | None,
+ output: Optional[str],
+ config_file: Optional[str],
+ restart_daemon_flag: Optional[bool],
+ no_restart_daemon_flag: Optional[bool],
):
"""Manage configuration templates."""
try:
@@ -333,10 +334,10 @@ def template_cmd(
def profile_cmd(
profile_name: str,
apply: bool,
- output: str | None,
- config_file: str | None,
- restart_daemon_flag: bool | None,
- no_restart_daemon_flag: bool | None,
+ output: Optional[str],
+ config_file: Optional[str],
+ restart_daemon_flag: Optional[bool],
+ no_restart_daemon_flag: Optional[bool],
):
"""Manage configuration profiles."""
try:
@@ -448,7 +449,7 @@ def profile_cmd(
help="Compress backup",
)
@click.option("--config", "config_file", type=click.Path(exists=True), default=None)
-def backup_cmd(description: str, compress: bool, config_file: str | None):
+def backup_cmd(description: str, compress: bool, config_file: Optional[str]):
"""Create configuration backup."""
try:
cm = ConfigManager(config_file)
@@ -488,7 +489,7 @@ def backup_cmd(description: str, compress: bool, config_file: str | None):
help="Skip confirmation prompt",
)
@click.option("--config", "config_file", type=click.Path(), default=None)
-def restore_cmd(backup_file: str, confirm: bool, config_file: str | None):
+def restore_cmd(backup_file: str, confirm: bool, config_file: Optional[str]):
"""Restore configuration from backup."""
try:
if not confirm:
@@ -578,7 +579,7 @@ def list_backups_cmd(format_: str):
type=click.Path(),
help="Output file path",
)
-def diff_cmd(config1: str, config2: str, format_: str, output: str | None):
+def diff_cmd(config1: str, config2: str, format_: str, output: Optional[str]):
"""Compare two configuration files."""
try:
# ConfigDiff instance is not required; use classmethod compare_files
@@ -696,10 +697,10 @@ def capabilities_summary_cmd():
)
def auto_tune_cmd(
apply: bool,
- output: str | None,
- config_file: str | None,
- restart_daemon_flag: bool | None,
- no_restart_daemon_flag: bool | None,
+ output: Optional[str],
+ config_file: Optional[str],
+ restart_daemon_flag: Optional[bool],
+ no_restart_daemon_flag: Optional[bool],
):
"""Auto-tune configuration based on system capabilities."""
try:
@@ -792,7 +793,7 @@ def auto_tune_cmd(
help="Output file path",
)
@click.option("--config", "config_file", type=click.Path(exists=True), default=None)
-def export_cmd(format_: str, output: str, config_file: str | None):
+def export_cmd(format_: str, output: str, config_file: Optional[str]):
"""Export configuration to file."""
try:
cm = ConfigManager(config_file)
@@ -857,11 +858,11 @@ def export_cmd(format_: str, output: str, config_file: str | None):
)
def import_cmd(
import_file: str,
- format_: str | None,
- output: str | None,
- config_file: str | None,
- restart_daemon_flag: bool | None,
- no_restart_daemon_flag: bool | None,
+ format_: Optional[str],
+ output: Optional[str],
+ config_file: Optional[str],
+ restart_daemon_flag: Optional[bool],
+ no_restart_daemon_flag: Optional[bool],
):
"""Import configuration from file."""
try:
@@ -967,7 +968,7 @@ def import_cmd(
is_flag=True,
help="Show detailed validation results",
)
-def validate_cmd(config_file: str | None, detailed: bool):
+def validate_cmd(config_file: Optional[str], detailed: bool):
"""Validate configuration file."""
try:
cm = ConfigManager(config_file)
diff --git a/ccbt/cli/config_utils.py b/ccbt/cli/config_utils.py
index 93419ec..e12e53a 100644
--- a/ccbt/cli/config_utils.py
+++ b/ccbt/cli/config_utils.py
@@ -6,7 +6,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
from rich.console import Console
from rich.prompt import Confirm
@@ -249,7 +249,7 @@ async def _restart_daemon_async(force: bool = False) -> bool:
def restart_daemon_if_needed(
_config_manager: ConfigManager,
requires_restart: bool,
- auto_restart: bool | None = None,
+ auto_restart: Optional[bool] = None,
force: bool = False,
) -> bool:
"""Restart daemon if needed and running.
diff --git a/ccbt/cli/create_torrent.py b/ccbt/cli/create_torrent.py
index 81f84a5..37383b5 100644
--- a/ccbt/cli/create_torrent.py
+++ b/ccbt/cli/create_torrent.py
@@ -7,6 +7,7 @@
import logging
from pathlib import Path
+from typing import Optional
import click
from rich.console import Console
@@ -89,15 +90,15 @@
def create_torrent(
_ctx: click.Context,
source: Path,
- output: Path | None,
+ output: Optional[Path],
format_v2: bool,
format_hybrid: bool,
format_v1: bool,
tracker: tuple[str, ...],
web_seed: tuple[str, ...],
- comment: str | None,
+ comment: Optional[str],
created_by: str,
- piece_length: int | None,
+ piece_length: Optional[int],
private: bool,
_verbose: int = 0, # ARG001: Unused parameter (Click count=True)
) -> None:
diff --git a/ccbt/cli/daemon_commands.py b/ccbt/cli/daemon_commands.py
index f330800..334ca37 100644
--- a/ccbt/cli/daemon_commands.py
+++ b/ccbt/cli/daemon_commands.py
@@ -10,7 +10,7 @@
import sys
import time
import warnings
-from typing import Any
+from typing import Any, Optional
import click
from rich.console import Console
@@ -139,8 +139,8 @@ def daemon():
)
def start(
foreground: bool,
- config: str | None,
- port: int | None,
+ config: Optional[str],
+ port: Optional[int],
regenerate_api_key: bool,
verbose: int,
vv: bool,
@@ -553,7 +553,7 @@ def run_splash():
async def _run_daemon_foreground(
- _daemon_config: DaemonConfig, config_file: str | None
+ _daemon_config: DaemonConfig, config_file: Optional[str]
) -> None:
"""Run daemon in foreground mode."""
from ccbt.daemon.main import DaemonMain
@@ -569,7 +569,7 @@ async def _run_daemon_foreground(
def _wait_for_daemon(
daemon_config: DaemonConfig,
timeout: float = 15.0,
- splash_manager: Any | None = None,
+ splash_manager: Optional[Any] = None,
) -> bool:
"""Wait for daemon to be ready.
@@ -635,11 +635,11 @@ async def _check_daemon_loop() -> bool:
def _wait_for_daemon_with_progress(
daemon_config: DaemonConfig,
timeout: float = 15.0,
- progress: Progress | None = None,
- task: int | None = None,
- verbosity: Any | None = None,
- daemon_pid: int | None = None,
- splash_manager: Any | None = None,
+ progress: Optional[Any] = None, # Optional[Progress]
+ task: Optional[int] = None,
+ verbosity: Optional[Any] = None,
+ daemon_pid: Optional[int] = None,
+ splash_manager: Optional[Any] = None,
) -> bool:
"""Wait for daemon to be ready with progress indicator.
@@ -723,7 +723,7 @@ async def _check_daemon_stage() -> tuple[bool, int, str]:
# Fallback: try to get PID from file (may not exist yet)
initial_pid = daemon_manager.get_pid()
- def _is_process_alive(pid: int | None) -> bool:
+ def _is_process_alive(pid: Optional[int]) -> bool:
"""Check if process is actually running.
Args:
diff --git a/ccbt/cli/downloads.py b/ccbt/cli/downloads.py
index 106d420..92eaf7f 100644
--- a/ccbt/cli/downloads.py
+++ b/ccbt/cli/downloads.py
@@ -8,7 +8,7 @@
import asyncio
import contextlib
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.cli.interactive import InteractiveCLI
from ccbt.cli.progress import ProgressManager
@@ -27,9 +27,9 @@ async def start_interactive_download(
torrent_data: dict[str, Any],
console: Console,
resume: bool = False,
- queue_priority: str | None = None,
- files_selection: tuple[int, ...] | None = None,
- file_priorities: tuple[str, ...] | None = None,
+ queue_priority: Optional[str] = None,
+ files_selection: Optional[tuple[int, ...]] = None,
+ file_priorities: Optional[tuple[str, ...]] = None,
) -> None:
"""Start an interactive download session with user prompts.
@@ -129,9 +129,9 @@ async def start_basic_download(
torrent_data: dict[str, Any],
console: Console,
resume: bool = False,
- queue_priority: str | None = None,
- files_selection: tuple[int, ...] | None = None,
- file_priorities: tuple[str, ...] | None = None,
+ queue_priority: Optional[str] = None,
+ files_selection: Optional[tuple[int, ...]] = None,
+ file_priorities: Optional[tuple[str, ...]] = None,
) -> None:
"""Start a basic download session without interactive prompts.
diff --git a/ccbt/cli/filter_commands.py b/ccbt/cli/filter_commands.py
index cc4db16..30feeb6 100644
--- a/ccbt/cli/filter_commands.py
+++ b/ccbt/cli/filter_commands.py
@@ -4,6 +4,7 @@
import asyncio
import ipaddress
+from typing import Optional
import click
from rich.console import Console
@@ -205,7 +206,7 @@ async def _list_rules() -> None:
help="Filter mode (uses default if not specified)",
)
@click.pass_context
-def filter_load(ctx, file_path: str, mode: str | None) -> None:
+def filter_load(ctx, file_path: str, mode: Optional[str]) -> None:
"""Load filter rules from file."""
console = Console()
diff --git a/ccbt/cli/interactive.py b/ccbt/cli/interactive.py
index 0808e6e..04fb276 100644
--- a/ccbt/cli/interactive.py
+++ b/ccbt/cli/interactive.py
@@ -18,7 +18,7 @@
import logging
import time
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.i18n import _
@@ -29,7 +29,7 @@
def _agent_debug_log(
hypothesis_id: str,
message: str,
- data: dict[str, Any] | None = None,
+ data: Optional[dict[str, Any]] = None,
) -> None:
payload = {
"sessionId": "debug-session",
@@ -115,10 +115,6 @@ def _agent_debug_log(
logger = logging.getLogger(__name__)
if TYPE_CHECKING: # pragma: no cover - TYPE_CHECKING imports not executed at runtime
- from rich.progress import (
- Progress,
- )
-
from ccbt.session.session import AsyncSessionManager
@@ -130,7 +126,7 @@ def __init__(
executor: UnifiedCommandExecutor,
adapter: SessionAdapter,
console: Console,
- session: AsyncSessionManager | None = None,
+ session: Optional[AsyncSessionManager] = None,
):
"""Initialize interactive CLI interface.
@@ -151,9 +147,9 @@ def __init__(
# Daemon mode - no direct session access
self.session = None
self.running = False
- self.current_torrent: dict[str, Any] | None = None
+ self.current_torrent: Optional[dict[str, Any]] = None
self.layout = Layout()
- self.live_display: Live | None = None
+ self.live_display: Optional[Any] = None # Optional[Live]
# Statistics
self.stats = {
@@ -165,12 +161,12 @@ def __init__(
}
# Track current torrent info-hash (hex) for control commands
- self.current_info_hash_hex: str | None = None
+ self.current_info_hash_hex: Optional[str] = None
self._last_peers: list[dict[str, Any]] = []
# Download progress widgets
- self._download_progress: Progress | None = None
- self._download_task: int | None = None
+ self._download_progress: Optional[Any] = None # Optional[Progress]
+ self._download_task: Optional[int] = None
self.progress_manager = ProgressManager(self.console)
# Commands
diff --git a/ccbt/cli/ipfs_commands.py b/ccbt/cli/ipfs_commands.py
index e5ee303..ee3ecfe 100644
--- a/ccbt/cli/ipfs_commands.py
+++ b/ccbt/cli/ipfs_commands.py
@@ -6,6 +6,7 @@
import json
import logging
from pathlib import Path
+from typing import Any, Optional
import click
from rich.console import Console
@@ -24,7 +25,7 @@
logger = logging.getLogger(__name__)
-async def _get_ipfs_protocol() -> IPFSProtocol | None:
+async def _get_ipfs_protocol() -> Optional[Any]: # Optional[IPFSProtocol]
"""Get IPFS protocol instance from session manager.
Note: If daemon is running, this will check via IPC but cannot return
@@ -147,7 +148,7 @@ async def _add() -> None:
"--output", "-o", type=click.Path(path_type=Path), help="Output file path"
)
@click.option("--json", "json_output", is_flag=True, help="Output as JSON")
-def ipfs_get(cid: str, output: Path | None, json_output: bool) -> None:
+def ipfs_get(cid: str, output: Optional[Path], json_output: bool) -> None:
"""Get content from IPFS by CID."""
console = Console()
@@ -240,7 +241,7 @@ async def _unpin() -> None:
@click.argument("cid", type=str, required=False)
@click.option("--all", "all_stats", is_flag=True, help="Show stats for all content")
@click.option("--json", "json_output", is_flag=True, help="Output as JSON")
-def ipfs_stats(cid: str | None, all_stats: bool, json_output: bool) -> None:
+def ipfs_stats(cid: Optional[str], all_stats: bool, json_output: bool) -> None:
"""Show IPFS content statistics."""
console = Console()
diff --git a/ccbt/cli/main.py b/ccbt/cli/main.py
index 7c65c32..5075544 100644
--- a/ccbt/cli/main.py
+++ b/ccbt/cli/main.py
@@ -18,7 +18,7 @@
import logging
import time
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
import click
from rich.console import Console
@@ -358,7 +358,7 @@ async def _route_to_daemon_if_running(
logger.debug(_("No daemon config or API key found - will create local session"))
return False
- client: IPCClient | None = None
+ client: Optional[Any] = None # Optional[IPCClient]
try:
# CRITICAL FIX: Create client and verify connection before attempting operation
# Explicitly use host/port from config to ensure consistency with daemon
@@ -662,7 +662,7 @@ async def _route_to_daemon_if_running(
logger.debug(_("Error closing IPC client: %s"), e)
-async def _get_executor() -> tuple[Any | None, bool]:
+async def _get_executor() -> tuple[Optional[Any], bool]:
"""Get command executor (daemon or local).
Returns:
@@ -800,7 +800,9 @@ async def _get_executor() -> tuple[Any | None, bool]:
return (executor, True)
-async def _check_daemon_and_get_client() -> tuple[bool, IPCClient | None]:
+async def _check_daemon_and_get_client() -> tuple[
+ bool, Optional[Any]
+]: # Optional[IPCClient]
"""Check if daemon is running and return IPC client if available.
Returns:
@@ -1234,7 +1236,7 @@ def _apply_nat_overrides(cfg: Config, options: dict[str, Any]) -> None:
def _apply_protocol_v2_overrides(cfg: Config, options: dict[str, Any]) -> None:
"""Apply Protocol v2-related CLI overrides."""
- # v2_only flag sets all v2 options
+ # v2_only flag sets all v2 options (takes precedence)
if options.get("v2_only"):
cfg.network.protocol_v2.enable_protocol_v2 = True
cfg.network.protocol_v2.prefer_protocol_v2 = True
@@ -1433,6 +1435,10 @@ def cli(ctx, config, verbose, debug):
)
@click.option("--unchoke-interval", type=float, help=_("Unchoke interval (s)"))
@click.option("--metrics-interval", type=float, help=_("Metrics interval (s)"))
+@click.option("--enable-v2", "enable_v2", is_flag=True, help=_("Enable Protocol v2 (BEP 52)"))
+@click.option("--disable-v2", "disable_v2", is_flag=True, help=_("Disable Protocol v2 (BEP 52)"))
+@click.option("--prefer-v2", "prefer_v2", is_flag=True, help=_("Prefer Protocol v2 when available"))
+@click.option("--v2-only", "v2_only", is_flag=True, help=_("Use Protocol v2 only (disable v1)"))
@click.pass_context
def download(
ctx,
@@ -1769,6 +1775,10 @@ async def _add_torrent_to_daemon():
)
@click.option("--unchoke-interval", type=float, help=_("Unchoke interval (s)"))
@click.option("--metrics-interval", type=float, help=_("Metrics interval (s)"))
+@click.option("--enable-v2", "enable_v2", is_flag=True, help=_("Enable Protocol v2 (BEP 52)"))
+@click.option("--disable-v2", "disable_v2", is_flag=True, help=_("Disable Protocol v2 (BEP 52)"))
+@click.option("--prefer-v2", "prefer_v2", is_flag=True, help=_("Prefer Protocol v2 when available"))
+@click.option("--v2-only", "v2_only", is_flag=True, help=_("Use Protocol v2 only (disable v1)"))
@click.pass_context
def magnet(
ctx,
@@ -2237,7 +2247,7 @@ def config(ctx):
@click.option("--set", "locale_code", help=_("Set locale (e.g., 'en', 'es', 'fr')"))
@click.option("--list", "list_locales", is_flag=True, help=_("List available locales"))
@click.pass_context
-def language(ctx, locale_code: str | None, list_locales: bool) -> None:
+def language(ctx, locale_code: Optional[str], list_locales: bool) -> None:
"""Manage language/locale settings."""
from pathlib import Path
diff --git a/ccbt/cli/monitoring_commands.py b/ccbt/cli/monitoring_commands.py
index bc63c52..7a248c6 100644
--- a/ccbt/cli/monitoring_commands.py
+++ b/ccbt/cli/monitoring_commands.py
@@ -5,7 +5,7 @@
import asyncio
import contextlib
import logging
-from typing import TYPE_CHECKING, Any
+from typing import Any, Optional
import click
from rich.console import Console
@@ -13,9 +13,6 @@
from ccbt.i18n import _
from ccbt.monitoring import get_alert_manager
-if TYPE_CHECKING:
- from ccbt.session.session import AsyncSessionManager
-
logger = logging.getLogger(__name__)
# Exception messages
@@ -43,7 +40,7 @@
help="Disable splash screen (useful for debugging)",
)
def dashboard(
- refresh: float, rules: str | None, no_daemon: bool, no_splash: bool
+ refresh: float, rules: Optional[str], no_daemon: bool, no_splash: bool
) -> None:
"""Start terminal monitoring dashboard (Textual)."""
console = Console()
@@ -73,7 +70,9 @@ def dashboard(
console=console,
)
- session: AsyncSessionManager | DaemonInterfaceAdapter | None = None
+ session: Optional[Any] = (
+ None # Optional[AsyncSessionManager | DaemonInterfaceAdapter]
+ )
if no_daemon:
# User explicitly requested local session
@@ -222,13 +221,13 @@ def alerts(
remove_rule: bool,
clear_active: bool,
test_rule: bool,
- load: str | None,
- save: str | None,
- name: str | None,
- metric: str | None,
- condition: str | None,
+ load: Optional[str],
+ save: Optional[str],
+ name: Optional[str],
+ metric: Optional[str],
+ condition: Optional[str],
severity: str,
- value: str | None,
+ value: Optional[str],
) -> None:
"""Manage alert rules (add/list/remove/test/clear)."""
console = Console()
@@ -416,9 +415,9 @@ def alerts(
)
def metrics(
format_: str,
- output: str | None,
+ output: Optional[str],
duration: float,
- interval: float | None,
+ interval: Optional[float],
include_system: bool,
include_performance: bool,
) -> None:
diff --git a/ccbt/cli/overrides.py b/ccbt/cli/overrides.py
index c6d870a..f21241e 100644
--- a/ccbt/cli/overrides.py
+++ b/ccbt/cli/overrides.py
@@ -488,11 +488,14 @@ def _apply_utp_overrides(cfg: Config, options: dict[str, Any]) -> None:
def _apply_protocol_v2_overrides(cfg: Config, options: dict[str, Any]) -> None:
+ """Apply Protocol v2-related CLI overrides."""
+ # v2_only flag sets all v2 options (takes precedence)
if options.get("v2_only"):
cfg.network.protocol_v2.enable_protocol_v2 = True
cfg.network.protocol_v2.prefer_protocol_v2 = True
cfg.network.protocol_v2.support_hybrid = False
- if not options.get("v2_only"):
+ else:
+ # Individual flags (only if v2_only is not set)
if options.get("enable_v2"):
cfg.network.protocol_v2.enable_protocol_v2 = True
if options.get("disable_v2"):
diff --git a/ccbt/cli/progress.py b/ccbt/cli/progress.py
index 24e8e18..c67ef34 100644
--- a/ccbt/cli/progress.py
+++ b/ccbt/cli/progress.py
@@ -13,7 +13,7 @@
from __future__ import annotations
import contextlib
-from typing import TYPE_CHECKING, Any, Callable, Iterator, Mapping
+from typing import TYPE_CHECKING, Any, Callable, Iterator, Mapping, Optional, Union
from rich.progress import (
BarColumn,
@@ -46,7 +46,7 @@ def __init__(self, console: Console):
self.active_progress: dict[str, Progress] = {}
self.progress_tasks: dict[str, Any] = {}
- def create_progress(self, _description: str | None = None) -> Progress:
+ def create_progress(self, _description: Optional[str] = None) -> Progress:
"""Create a new progress bar with i18n support.
Args:
@@ -67,7 +67,7 @@ def create_progress(self, _description: str | None = None) -> Progress:
)
def create_download_progress(
- self, _torrent: TorrentInfo | Mapping[str, Any]
+ self, _torrent: Union[TorrentInfo, Mapping[str, Any]]
) -> Progress:
"""Create download progress bar with i18n support."""
return Progress(
@@ -83,7 +83,7 @@ def create_download_progress(
)
def create_upload_progress(
- self, _torrent: TorrentInfo | Mapping[str, Any]
+ self, _torrent: Union[TorrentInfo, Mapping[str, Any]]
) -> Progress:
"""Create upload progress bar with i18n support."""
return Progress(
@@ -389,7 +389,7 @@ def create_success_progress(self, _torrent: TorrentInfo) -> Progress:
)
def create_operation_progress(
- self, _description: str | None = None, show_speed: bool = False
+ self, _description: Optional[str] = None, show_speed: bool = False
) -> Progress:
"""Create a generic operation progress bar.
@@ -415,7 +415,9 @@ def create_operation_progress(
return Progress(*columns, console=self.console)
- def create_multi_task_progress(self, _description: str | None = None) -> Progress:
+ def create_multi_task_progress(
+ self, _description: Optional[str] = None
+ ) -> Progress:
"""Create a progress bar for multiple parallel tasks.
Args:
@@ -437,7 +439,7 @@ def create_multi_task_progress(self, _description: str | None = None) -> Progres
)
def create_indeterminate_progress(
- self, _description: str | None = None
+ self, _description: Optional[str] = None
) -> Progress:
"""Create an indeterminate progress bar (no known total).
@@ -460,7 +462,7 @@ def create_indeterminate_progress(
def with_progress(
self,
description: str,
- total: int | None = None,
+ total: Optional[int] = None,
progress_type: str = "operation",
) -> Iterator[tuple[Progress, int]]:
"""Context manager for automatic progress tracking.
@@ -507,7 +509,7 @@ def with_progress(
def create_progress_callback(
self, progress: Progress, task_id: int
- ) -> Callable[[float, dict[str, Any] | None], None]:
+ ) -> Callable[[float, Optional[dict[str, Any]]], None]:
"""Create a progress callback for async operations.
Args:
@@ -519,7 +521,7 @@ def create_progress_callback(
"""
- def callback(completed: float, fields: dict[str, Any] | None = None) -> None:
+ def callback(completed: float, fields: Optional[dict[str, Any]] = None) -> None:
"""Update progress with completed amount and optional fields."""
progress.update(task_id, completed=completed)
if fields:
diff --git a/ccbt/cli/proxy_commands.py b/ccbt/cli/proxy_commands.py
index cb6f4bf..63e1980 100644
--- a/ccbt/cli/proxy_commands.py
+++ b/ccbt/cli/proxy_commands.py
@@ -5,6 +5,7 @@
import asyncio
import os
from pathlib import Path # noqa: TC003 - Used at runtime for path operations
+from typing import Optional
import click
from rich.console import Console
@@ -17,7 +18,7 @@
from ccbt.proxy.exceptions import ProxyError
-def _should_skip_project_local_write(config_file: Path | None) -> bool:
+def _should_skip_project_local_write(config_file: Optional[Path]) -> bool:
"""Check if we should skip writing to project-local ccbt.toml during tests.
Args:
@@ -95,12 +96,12 @@ def proxy_set(
host: str,
port: int,
proxy_type: str,
- username: str | None,
- password: str | None,
+ username: Optional[str],
+ password: Optional[str],
for_trackers: bool,
for_peers: bool,
for_webseeds: bool,
- bypass_list: str | None,
+ bypass_list: Optional[str],
) -> None:
"""Set proxy configuration."""
console = Console()
diff --git a/ccbt/cli/resume.py b/ccbt/cli/resume.py
index 0792684..b833995 100644
--- a/ccbt/cli/resume.py
+++ b/ccbt/cli/resume.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import asyncio
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.cli.interactive import InteractiveCLI
@@ -16,12 +16,9 @@
from ccbt.cli.progress import ProgressManager
from ccbt.i18n import _
-if TYPE_CHECKING:
- from ccbt.session.session import AsyncSessionManager
-
async def resume_download(
- session: AsyncSessionManager | None,
+ session: Optional[Any], # Optional[AsyncSessionManager]
info_hash_bytes: bytes,
checkpoint: Any,
interactive: bool,
diff --git a/ccbt/cli/ssl_commands.py b/ccbt/cli/ssl_commands.py
index 20bcadf..57ac5e7 100644
--- a/ccbt/cli/ssl_commands.py
+++ b/ccbt/cli/ssl_commands.py
@@ -5,6 +5,7 @@
import logging
import os
from pathlib import Path
+from typing import Optional
import click
from rich.console import Console
@@ -18,7 +19,7 @@
console = Console()
-def _should_skip_project_local_write(config_file: Path | None) -> bool:
+def _should_skip_project_local_write(config_file: Optional[Path]) -> bool:
"""Check if we should skip writing to project-local ccbt.toml during tests.
Args:
diff --git a/ccbt/cli/task_detector.py b/ccbt/cli/task_detector.py
index b7f80c6..7a8f0f6 100644
--- a/ccbt/cli/task_detector.py
+++ b/ccbt/cli/task_detector.py
@@ -6,7 +6,7 @@
from __future__ import annotations
from dataclasses import dataclass
-from typing import Any, ClassVar
+from typing import Any, ClassVar, Optional
@dataclass
@@ -79,7 +79,7 @@ def is_long_running(self, command_name: str) -> bool:
return task_info.expected_duration >= self.threshold
return False
- def get_task_info(self, command_name: str) -> TaskInfo | None:
+ def get_task_info(self, command_name: str) -> Optional[Any]: # Optional[TaskInfo]
"""Get task information for a command.
Args:
@@ -148,7 +148,7 @@ def register_command(
)
@staticmethod
- def from_command(ctx: dict[str, Any] | None = None) -> TaskDetector:
+ def from_command(ctx: Optional[dict[str, Any]] = None) -> TaskDetector:
"""Create TaskDetector from Click context.
Args:
diff --git a/ccbt/cli/tonic_commands.py b/ccbt/cli/tonic_commands.py
index 0a0b6fe..66e277f 100644
--- a/ccbt/cli/tonic_commands.py
+++ b/ccbt/cli/tonic_commands.py
@@ -10,6 +10,7 @@
import asyncio
import logging
from pathlib import Path
+from typing import Optional
import click
from rich.console import Console
@@ -74,12 +75,12 @@ def tonic() -> None:
def tonic_create(
ctx,
folder_path: str,
- output_path: str | None,
+ output_path: Optional[str],
sync_mode: str,
- source_peers: str | None,
- allowlist_path: str | None,
- git_ref: str | None,
- announce: str | None,
+ source_peers: Optional[str],
+ allowlist_path: Optional[str],
+ git_ref: Optional[str],
+ announce: Optional[str],
generate_link: bool,
) -> None:
"""Generate .tonic file from folder."""
@@ -114,8 +115,8 @@ def tonic_create(
def tonic_link(
_ctx,
folder_path: str,
- tonic_file: str | None,
- sync_mode: str | None,
+ tonic_file: Optional[str],
+ sync_mode: Optional[str],
) -> None:
"""Generate tonic?: link from folder or .tonic file."""
console = Console()
@@ -138,7 +139,7 @@ def tonic_link(
allowlist_hash = parsed_data.get("allowlist_hash")
# Flatten trackers
- tracker_list: list[str] | None = None
+ tracker_list: Optional[list[str]] = None
if trackers:
tracker_list = [url for tier in trackers for url in tier]
@@ -192,7 +193,7 @@ def tonic_link(
def tonic_sync(
_ctx,
tonic_input: str,
- output_dir: str | None,
+ output_dir: Optional[str],
check_interval: float,
) -> None:
"""Start syncing folder from .tonic file or tonic?: link."""
@@ -331,8 +332,8 @@ def tonic_allowlist_add(
_ctx,
allowlist_path: str,
peer_id: str,
- public_key: str | None,
- alias: str | None,
+ public_key: Optional[str],
+ alias: Optional[str],
) -> None:
"""Add peer to allowlist."""
console = Console()
@@ -493,14 +494,14 @@ def tonic_mode_set(
_ctx,
folder_path: str,
sync_mode: str,
- source_peers: str | None,
+ source_peers: Optional[str],
) -> None:
"""Set synchronization mode for folder."""
console = Console()
try:
# Parse source peers
- source_peers_list: list[str] | None = None
+ source_peers_list: Optional[list[str]] = None
if source_peers:
source_peers_list = [
p.strip() for p in source_peers.split(",") if p.strip()
diff --git a/ccbt/cli/tonic_generator.py b/ccbt/cli/tonic_generator.py
index 568fa37..b017801 100644
--- a/ccbt/cli/tonic_generator.py
+++ b/ccbt/cli/tonic_generator.py
@@ -9,6 +9,7 @@
import asyncio
import logging
from pathlib import Path
+from typing import Optional, Union
import click
from rich.console import Console
@@ -27,17 +28,17 @@
async def generate_tonic_from_folder(
- folder_path: str | Path,
- output_path: str | Path | None = None,
+ folder_path: Union[str, Path],
+ output_path: Optional[Union[str, Path]] = None,
sync_mode: str = "best_effort",
- source_peers: list[str] | None = None,
- allowlist_path: str | Path | None = None,
- git_ref: str | None = None,
- announce: str | None = None,
- announce_list: list[list[str]] | None = None,
- comment: str | None = None,
+ source_peers: Optional[list[str]] = None,
+ allowlist_path: Optional[Union[str, Path]] = None,
+ git_ref: Optional[str] = None,
+ announce: Optional[str] = None,
+ announce_list: Optional[list[list[str]]] = None,
+ comment: Optional[str] = None,
generate_link: bool = False,
-) -> tuple[bytes, str | None]:
+) -> tuple[bytes, Optional[str]]:
"""Generate .tonic file from folder.
Args:
@@ -118,7 +119,7 @@ async def generate_tonic_from_folder(
progress.update(task, completed=True)
# Get git refs if git versioning enabled
- git_refs: list[str] | None = None
+ git_refs: Optional[list[str]] = None
git_versioning = GitVersioning(folder_path=folder)
if git_versioning.is_git_repo():
if git_ref:
@@ -133,7 +134,7 @@ async def generate_tonic_from_folder(
git_refs = recent_refs
# Get allowlist hash if allowlist provided
- allowlist_hash: bytes | None = None
+ allowlist_hash: Optional[bytes] = None
if allowlist_path:
allowlist = XetAllowlist(allowlist_path=allowlist_path)
await allowlist.load()
@@ -184,7 +185,7 @@ async def generate_tonic_from_folder(
)
# Generate link if requested
- tonic_link: str | None = None
+ tonic_link: Optional[str] = None
if generate_link:
tonic_link = generate_tonic_link(
info_hash=info_hash,
@@ -245,19 +246,19 @@ async def generate_tonic_from_folder(
def tonic_generate(
_ctx,
folder_path: str,
- output_path: str | None,
+ output_path: Optional[str],
sync_mode: str,
- source_peers: str | None,
- allowlist_path: str | None,
- git_ref: str | None,
- announce: str | None,
+ source_peers: Optional[str],
+ allowlist_path: Optional[str],
+ git_ref: Optional[str],
+ announce: Optional[str],
generate_link: bool,
) -> None:
"""Generate .tonic file from folder."""
console = Console()
# Parse source peers
- source_peers_list: list[str] | None = None
+ source_peers_list: Optional[list[str]] = None
if source_peers:
source_peers_list = [p.strip() for p in source_peers.split(",") if p.strip()]
diff --git a/ccbt/cli/torrent_config_commands.py b/ccbt/cli/torrent_config_commands.py
index 53a6c47..21aedba 100644
--- a/ccbt/cli/torrent_config_commands.py
+++ b/ccbt/cli/torrent_config_commands.py
@@ -8,7 +8,7 @@
from __future__ import annotations
import asyncio
-from typing import Any, cast
+from typing import Any, Optional, Union, cast
import click
from rich.console import Console
@@ -25,7 +25,7 @@
async def _get_torrent_session(
- info_hash_hex: str, session_manager: AsyncSessionManager | None = None
+ info_hash_hex: str, session_manager: Optional[AsyncSessionManager] = None
) -> Any:
"""Get torrent session by info hash.
@@ -50,7 +50,7 @@ async def _get_torrent_session(
return session_manager.torrents.get(info_hash)
-def _parse_value(raw: str) -> bool | int | float | str:
+def _parse_value(raw: str) -> Union[bool, int, float, str]:
"""Parse string value to appropriate type.
Args:
@@ -430,7 +430,7 @@ async def _list_options() -> None:
async def _reset_torrent_options(
- info_hash: str, key: str | None, save_checkpoint: bool
+ info_hash: str, key: Optional[str], save_checkpoint: bool
) -> None:
"""Reset per-torrent configuration options (async implementation).
@@ -551,7 +551,7 @@ async def _reset_torrent_options(
)
@click.pass_context
def torrent_config_reset(
- _ctx: click.Context, info_hash: str, key: str | None, save_checkpoint: bool
+ _ctx: click.Context, info_hash: str, key: Optional[str], save_checkpoint: bool
) -> None:
"""Reset per-torrent configuration options.
diff --git a/ccbt/cli/utp_commands.py b/ccbt/cli/utp_commands.py
index d2f6516..b9f3e81 100644
--- a/ccbt/cli/utp_commands.py
+++ b/ccbt/cli/utp_commands.py
@@ -11,6 +11,7 @@
from __future__ import annotations
import logging
+from typing import Optional
import click
from rich.console import Console
@@ -133,7 +134,7 @@ def utp_config_group() -> None:
@utp_config_group.command("get")
@click.argument("key", required=False)
-def utp_config_get(key: str | None) -> None:
+def utp_config_get(key: Optional[str]) -> None:
"""Get uTP configuration value(s).
Args:
diff --git a/ccbt/cli/verbosity.py b/ccbt/cli/verbosity.py
index 657e82b..20dfdfa 100644
--- a/ccbt/cli/verbosity.py
+++ b/ccbt/cli/verbosity.py
@@ -7,7 +7,7 @@
import logging
from enum import IntEnum
-from typing import Any, ClassVar
+from typing import Any, ClassVar, Optional
from ccbt.utils.logging_config import get_logger
@@ -128,7 +128,7 @@ def is_trace(self) -> bool:
return self.level == VerbosityLevel.TRACE
-def get_verbosity_from_ctx(ctx: dict[str, Any] | None) -> VerbosityManager:
+def get_verbosity_from_ctx(ctx: Optional[dict[str, Any]]) -> VerbosityManager:
"""Get verbosity manager from Click context.
Args:
@@ -151,7 +151,7 @@ def log_with_verbosity(
level: int,
message: str,
*args: Any,
- exc_info: bool | None = None,
+ exc_info: Optional[bool] = None,
**kwargs: Any,
) -> None:
"""Log a message respecting verbosity level.
diff --git a/ccbt/cli/xet_commands.py b/ccbt/cli/xet_commands.py
index 9e8c819..1d938d5 100644
--- a/ccbt/cli/xet_commands.py
+++ b/ccbt/cli/xet_commands.py
@@ -6,6 +6,7 @@
import json
import logging
from pathlib import Path
+from typing import Any, Optional
import click
from rich.console import Console
@@ -20,7 +21,7 @@
logger = logging.getLogger(__name__)
-async def _get_xet_protocol() -> XetProtocol | None:
+async def _get_xet_protocol() -> Optional[Any]: # Optional[XetProtocol]
"""Get Xet protocol instance from session manager.
Note: If daemon is running, this will check via IPC but cannot return
@@ -98,7 +99,7 @@ def xet() -> None:
@xet.command("enable")
@click.option("--config", "config_file", type=click.Path(), default=None)
@click.pass_context
-def xet_enable(_ctx, config_file: str | None) -> None:
+def xet_enable(_ctx, config_file: Optional[str]) -> None:
"""Enable Xet protocol in configuration."""
console = Console()
from ccbt.cli.main import _get_config_from_context
@@ -140,7 +141,7 @@ def xet_enable(_ctx, config_file: str | None) -> None:
@xet.command("disable")
@click.option("--config", "config_file", type=click.Path(), default=None)
@click.pass_context
-def xet_disable(_ctx, config_file: str | None) -> None:
+def xet_disable(_ctx, config_file: Optional[str]) -> None:
"""Disable Xet protocol in configuration."""
console = Console()
from ccbt.cli.main import _get_config_from_context
@@ -178,7 +179,7 @@ def xet_disable(_ctx, config_file: str | None) -> None:
@xet.command("status")
@click.option("--config", "config_file", type=click.Path(), default=None)
@click.pass_context
-def xet_status(_ctx, config_file: str | None) -> None:
+def xet_status(_ctx, config_file: Optional[str]) -> None:
"""Show Xet protocol status and configuration."""
console = Console()
from ccbt.cli.main import _get_config_from_context
@@ -253,7 +254,7 @@ async def _show_runtime_status() -> None:
@click.option("--config", "config_file", type=click.Path(), default=None)
@click.option("--json", "json_output", is_flag=True, help="Output in JSON format")
@click.pass_context
-def xet_stats(_ctx, config_file: str | None, json_output: bool) -> None:
+def xet_stats(_ctx, config_file: Optional[str], json_output: bool) -> None:
"""Show Xet deduplication cache statistics."""
console = Console()
from ccbt.cli.main import _get_config_from_context
@@ -320,7 +321,7 @@ async def _show_stats() -> None:
@click.option("--limit", type=int, default=10, help="Limit number of chunks to show")
@click.pass_context
def xet_cache_info(
- _ctx, config_file: str | None, json_output: bool, limit: int
+ _ctx, config_file: Optional[str], json_output: bool, limit: int
) -> None:
"""Show detailed information about cached chunks."""
console = Console()
@@ -454,7 +455,7 @@ async def _show_cache_info() -> None:
)
@click.pass_context
def xet_cleanup(
- _ctx, config_file: str | None, dry_run: bool, max_age_days: int
+ _ctx, config_file: Optional[str], dry_run: bool, max_age_days: int
) -> None:
"""Clean up unused chunks from the deduplication cache."""
console = Console()
diff --git a/ccbt/config/config.py b/ccbt/config/config.py
index 08fd49b..3b2676c 100644
--- a/ccbt/config/config.py
+++ b/ccbt/config/config.py
@@ -14,7 +14,7 @@
import os
import sys
from pathlib import Path
-from typing import Any, Callable, cast
+from typing import Any, Callable, Optional, Union, cast
import toml
@@ -79,13 +79,13 @@ def _safe_get_plugins():
IS_MACOS = sys.platform == "darwin"
# Global configuration instance
-_config_manager: ConfigManager | None = None
+_config_manager: Optional[ConfigManager] = None
class ConfigManager:
"""Manages configuration loading, validation, and hot-reload."""
- def __init__(self, config_file: str | Path | None = None):
+ def __init__(self, config_file: Optional[Union[str, Path]] = None):
"""Initialize configuration manager.
Args:
@@ -93,8 +93,8 @@ def __init__(self, config_file: str | Path | None = None):
"""
# internal
- self._hot_reload_task: asyncio.Task | None = None
- self._encryption_key: bytes | None = None
+ self._hot_reload_task: Optional[asyncio.Task] = None
+ self._encryption_key: Optional[bytes] = None
self.config_file = self._find_config_file(config_file)
self.config = self._load_config()
@@ -106,8 +106,8 @@ def __init__(self, config_file: str | Path | None = None):
def _find_config_file(
self,
- config_file: str | Path | None,
- ) -> Path | None:
+ config_file: Optional[Union[str, Path]],
+ ) -> Optional[Path]:
"""Find configuration file in standard locations."""
if config_file:
return Path(config_file)
@@ -560,7 +560,7 @@ def _get_env_config(self) -> dict[str, Any]:
def _parse_env_value(
raw: str, path: str
- ) -> bool | int | float | str | list[str]:
+ ) -> Union[bool, int, float, str, list[str]]:
# Handle list values (comma-separated strings)
if path == "security.encryption_allowed_ciphers":
return [item.strip() for item in raw.split(",") if item.strip()]
@@ -685,7 +685,7 @@ def save_config(self) -> None:
config_str = self.export(fmt="toml", encrypt_passwords=True)
self.config_file.write_text(config_str, encoding="utf-8")
- def _get_encryption_key(self) -> bytes | None:
+ def _get_encryption_key(self) -> Optional[bytes]:
"""Get or create encryption key for proxy passwords.
Returns:
@@ -919,7 +919,7 @@ def get_schema(self) -> dict[str, Any]:
return ConfigSchema.generate_full_schema()
- def get_section_schema(self, section_name: str) -> dict[str, Any] | None:
+ def get_section_schema(self, section_name: str) -> Optional[dict[str, Any]]:
"""Get schema for a specific configuration section.
Args:
@@ -944,7 +944,7 @@ def list_options(self) -> list[dict[str, Any]]:
return ConfigDiscovery.list_all_options()
- def get_option_metadata(self, key_path: str) -> dict[str, Any] | None:
+ def get_option_metadata(self, key_path: str) -> Optional[dict[str, Any]]:
"""Get metadata for a specific configuration option.
Args:
@@ -973,7 +973,9 @@ def validate_option(self, key_path: str, value: Any) -> tuple[bool, str]:
return ConfigValidator.validate_option(key_path, value)
- def apply_profile(self, profile: OptimizationProfile | str | None = None) -> None:
+ def apply_profile(
+ self, profile: Optional[Union[OptimizationProfile, str]] = None
+ ) -> None:
"""Apply optimization profile to configuration.
Args:
@@ -1134,7 +1136,7 @@ def get_config() -> Config:
return _config_manager.config
-def init_config(config_file: str | Path | None = None) -> ConfigManager:
+def init_config(config_file: Optional[Union[str, Path]] = None) -> ConfigManager:
"""Initialize the global configuration manager."""
return ConfigManager(config_file)
diff --git a/ccbt/config/config_backup.py b/ccbt/config/config_backup.py
index 01a2fcc..8d570a2 100644
--- a/ccbt/config/config_backup.py
+++ b/ccbt/config/config_backup.py
@@ -11,7 +11,7 @@
import logging
from datetime import datetime, timezone
from pathlib import Path
-from typing import Any
+from typing import Any, Optional, Union
from ccbt.config.config_migration import ConfigMigrator
@@ -21,7 +21,7 @@
class ConfigBackup:
"""Configuration backup and restore system."""
- def __init__(self, backup_dir: Path | str | None = None):
+ def __init__(self, backup_dir: Optional[Union[Path, str]] = None):
"""Initialize backup system.
Args:
@@ -36,10 +36,10 @@ def __init__(self, backup_dir: Path | str | None = None):
def create_backup(
self,
- config_file: Path | str,
- description: str | None = None,
+ config_file: Union[Path, str],
+ description: Optional[str] = None,
compress: bool = True,
- ) -> tuple[bool, Path | None, list[str]]:
+ ) -> tuple[bool, Optional[Path], list[str]]:
"""Create a configuration backup.
Args:
@@ -110,8 +110,8 @@ def create_backup(
def restore_backup(
self,
- backup_file: Path | str,
- target_file: Path | str | None = None,
+ backup_file: Union[Path, str],
+ target_file: Optional[Union[Path, str]] = None,
create_backup: bool = True,
) -> tuple[bool, list[str]]:
"""Restore configuration from backup.
@@ -209,9 +209,9 @@ def list_backups(self) -> list[dict[str, Any]]:
def auto_backup(
self,
- config_file: Path | str,
+ config_file: Union[Path, str],
max_backups: int = 10,
- ) -> tuple[bool, Path | None, list[str]]:
+ ) -> tuple[bool, Optional[Path], list[str]]:
"""Create automatic backup before configuration changes.
Args:
@@ -276,7 +276,7 @@ def _cleanup_auto_backups(self, max_backups: int) -> None:
except Exception as e:
logger.warning("Failed to cleanup auto backups: %s", e)
- def validate_backup(self, backup_file: Path | str) -> tuple[bool, list[str]]:
+ def validate_backup(self, backup_file: Union[Path, str]) -> tuple[bool, list[str]]:
"""Validate a backup file.
Args:
diff --git a/ccbt/config/config_capabilities.py b/ccbt/config/config_capabilities.py
index 094036e..5da594a 100644
--- a/ccbt/config/config_capabilities.py
+++ b/ccbt/config/config_capabilities.py
@@ -11,7 +11,7 @@
import subprocess
import sys
import time
-from typing import Any
+from typing import Any, Optional
import psutil
@@ -30,7 +30,7 @@ def __init__(self, cache_ttl: int = 300):
self._cache: dict[str, tuple[Any, float]] = {}
self._platform = platform.system().lower()
- def _get_cached(self, key: str) -> Any | None:
+ def _get_cached(self, key: str) -> Optional[Any]:
"""Get cached value if not expired.
Args:
diff --git a/ccbt/config/config_conditional.py b/ccbt/config/config_conditional.py
index 13a99a8..5447a9c 100644
--- a/ccbt/config/config_conditional.py
+++ b/ccbt/config/config_conditional.py
@@ -8,7 +8,7 @@
import copy
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.config.config_capabilities import SystemCapabilities
@@ -21,7 +21,7 @@
class ConditionalConfig:
"""Applies conditional configuration based on system capabilities."""
- def __init__(self, capabilities: SystemCapabilities | None = None):
+ def __init__(self, capabilities: Optional[SystemCapabilities] = None):
"""Initialize conditional configuration.
Args:
diff --git a/ccbt/config/config_diff.py b/ccbt/config/config_diff.py
index f462c19..a3b783d 100644
--- a/ccbt/config/config_diff.py
+++ b/ccbt/config/config_diff.py
@@ -9,7 +9,7 @@
import json
import logging
from pathlib import Path
-from typing import Any
+from typing import Any, Optional, Union
logger = logging.getLogger(__name__)
@@ -115,7 +115,7 @@ def merge_configs(
def apply_changes(
base_config: dict[str, Any],
changes: dict[str, Any],
- change_types: dict[str, str] | None = None,
+ change_types: Optional[dict[str, str]] = None,
) -> dict[str, Any]:
"""Apply specific changes to a configuration.
@@ -437,8 +437,8 @@ def _generate_text_report(diff: dict[str, Any]) -> str:
@staticmethod
def compare_files(
- file1: Path | str,
- file2: Path | str,
+ file1: Union[Path, str],
+ file2: Union[Path, str],
ignore_metadata: bool = True,
) -> dict[str, Any]:
"""Compare two configuration files.
diff --git a/ccbt/config/config_migration.py b/ccbt/config/config_migration.py
index 491665a..4b60638 100644
--- a/ccbt/config/config_migration.py
+++ b/ccbt/config/config_migration.py
@@ -9,7 +9,7 @@
import json
import logging
from pathlib import Path
-from typing import Any, ClassVar
+from typing import Any, ClassVar, Optional, Union
from ccbt.models import Config
@@ -57,7 +57,7 @@ def detect_version(config_data: dict[str, Any]) -> str:
@staticmethod
def migrate_config(
config_data: dict[str, Any],
- target_version: str | None = None,
+ target_version: Optional[str] = None,
) -> tuple[dict[str, Any], list[str]]:
"""Migrate configuration to target version.
@@ -220,9 +220,9 @@ def _migrate_0_9_0_to_1_0_0(config_data: dict[str, Any]) -> dict[str, Any]:
@staticmethod
def migrate_file(
- config_file: Path | str,
+ config_file: Union[Path, str],
backup: bool = True,
- target_version: str | None = None,
+ target_version: Optional[str] = None,
) -> tuple[bool, list[str]]:
"""Migrate a configuration file.
@@ -305,8 +305,8 @@ def validate_migrated_config(config_data: dict[str, Any]) -> tuple[bool, list[st
@staticmethod
def rollback_migration(
- config_file: Path | str,
- backup_file: Path | str | None = None,
+ config_file: Union[Path, str],
+ backup_file: Optional[Union[Path, str]] = None,
) -> tuple[bool, list[str]]:
"""Rollback a migration using backup file.
diff --git a/ccbt/config/config_schema.py b/ccbt/config/config_schema.py
index 00de3a8..449516c 100644
--- a/ccbt/config/config_schema.py
+++ b/ccbt/config/config_schema.py
@@ -8,7 +8,7 @@
import json
import logging
-from typing import Any
+from typing import Any, Optional
from pydantic import BaseModel, ValidationError
@@ -48,7 +48,7 @@ def generate_full_schema() -> dict[str, Any]:
return ConfigSchema.generate_schema(Config)
@staticmethod
- def get_schema_for_section(section_name: str) -> dict[str, Any] | None:
+ def get_schema_for_section(section_name: str) -> Optional[dict[str, Any]]:
"""Get schema for a specific configuration section.
Args:
@@ -126,7 +126,7 @@ def get_all_options() -> dict[str, Any]:
}
@staticmethod
- def get_option_metadata(key_path: str) -> dict[str, Any] | None:
+ def get_option_metadata(key_path: str) -> Optional[dict[str, Any]]:
"""Get metadata for specific configuration option.
Args:
diff --git a/ccbt/config/config_templates.py b/ccbt/config/config_templates.py
index 1eade7d..b8ee601 100644
--- a/ccbt/config/config_templates.py
+++ b/ccbt/config/config_templates.py
@@ -9,7 +9,7 @@
import json
import logging
from pathlib import Path
-from typing import Any, ClassVar
+from typing import Any, ClassVar, Optional, Union
from ccbt.models import Config
@@ -915,7 +915,7 @@ def list_templates() -> list[dict[str, Any]]:
]
@staticmethod
- def get_template(template_name: str) -> dict[str, Any] | None:
+ def get_template(template_name: str) -> Optional[dict[str, Any]]:
"""Get a specific configuration template.
Args:
@@ -1167,7 +1167,7 @@ def list_profiles() -> list[dict[str, Any]]:
]
@staticmethod
- def get_profile(profile_name: str) -> dict[str, Any] | None:
+ def get_profile(profile_name: str) -> Optional[dict[str, Any]]:
"""Get a specific configuration profile.
Args:
@@ -1236,7 +1236,7 @@ def create_custom_profile(
description: str,
templates: list[str],
overrides: dict[str, Any],
- profile_file: Path | str | None = None,
+ profile_file: Optional[Union[Path, str]] = None,
) -> dict[str, Any]:
"""Create a custom configuration profile.
@@ -1278,7 +1278,7 @@ def create_custom_profile(
return profile
@staticmethod
- def load_custom_profile(profile_file: Path | str) -> dict[str, Any]:
+ def load_custom_profile(profile_file: Union[Path, str]) -> dict[str, Any]:
"""Load a custom profile from file.
Args:
diff --git a/ccbt/consensus/__init__.py b/ccbt/consensus/__init__.py
index e1a08c3..9818543 100644
--- a/ccbt/consensus/__init__.py
+++ b/ccbt/consensus/__init__.py
@@ -25,3 +25,9 @@
"RaftState",
"RaftStateType",
]
+
+
+
+
+
+
diff --git a/ccbt/consensus/byzantine.py b/ccbt/consensus/byzantine.py
index 0a85db1..427a2c3 100644
--- a/ccbt/consensus/byzantine.py
+++ b/ccbt/consensus/byzantine.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import Any
+from typing import Any, Optional
logger = logging.getLogger(__name__)
@@ -31,7 +31,7 @@ def __init__(
node_id: str,
fault_threshold: float = 0.33,
weighted_voting: bool = False,
- node_weights: dict[str, float] | None = None,
+ node_weights: Optional[dict[str, float]] = None,
):
"""Initialize Byzantine consensus.
@@ -55,7 +55,7 @@ def __init__(
def propose(
self,
proposal: dict[str, Any],
- signature: bytes | None = None,
+ signature: Optional[bytes] = None,
) -> dict[str, Any]:
"""Create a proposal with optional signature.
@@ -77,7 +77,7 @@ def vote(
self,
proposal: dict[str, Any],
vote: bool,
- signature: bytes | None = None,
+ signature: Optional[bytes] = None,
) -> dict[str, Any]:
"""Create a vote on a proposal.
@@ -132,7 +132,7 @@ def verify_signature(
def check_byzantine_threshold(
self,
votes: dict[str, bool],
- weights: dict[str, float] | None = None,
+ weights: Optional[dict[str, float]] = None,
) -> tuple[bool, float]:
"""Check if consensus threshold is met with Byzantine fault tolerance.
diff --git a/ccbt/consensus/raft.py b/ccbt/consensus/raft.py
index cac0e5b..53ca732 100644
--- a/ccbt/consensus/raft.py
+++ b/ccbt/consensus/raft.py
@@ -12,7 +12,7 @@
import time
from enum import Enum
from pathlib import Path
-from typing import Any, Callable
+from typing import Any, Callable, Optional, Union
from ccbt.consensus.raft_state import RaftState
@@ -45,10 +45,10 @@ class RaftNode:
def __init__(
self,
node_id: str,
- state_path: Path | str | None = None,
+ state_path: Optional[Union[Path, str]] = None,
election_timeout: float = 1.0,
heartbeat_interval: float = 0.1,
- apply_command_callback: Callable[[dict[str, Any]], None] | None = None,
+ apply_command_callback: Optional[Callable[[dict[str, Any]], None]] = None,
):
"""Initialize Raft node.
@@ -70,7 +70,7 @@ def __init__(
self.state = RaftState()
self.role = RaftRole.FOLLOWER
- self.leader_id: str | None = None
+ self.leader_id: Optional[str] = None
self.peers: set[str] = set()
self.election_timeout = election_timeout
@@ -79,17 +79,17 @@ def __init__(
# Timers
self.last_heartbeat = time.time()
- self.election_deadline: float | None = None
+ self.election_deadline: Optional[float] = None
# Running state
self.running = False
- self._election_task: asyncio.Task | None = None
- self._heartbeat_task: asyncio.Task | None = None
- self._apply_task: asyncio.Task | None = None
+ self._election_task: Optional[asyncio.Task] = None
+ self._heartbeat_task: Optional[asyncio.Task] = None
+ self._apply_task: Optional[asyncio.Task] = None
# RPC handlers (would be network calls in production)
- self.send_vote_request: Callable[[str, dict[str, Any]], Any] | None = None
- self.send_append_entries: Callable[[str, dict[str, Any]], Any] | None = None
+ self.send_vote_request: Optional[Callable[[str, dict[str, Any]], Any]] = None
+ self.send_append_entries: Optional[Callable[[str, dict[str, Any]], Any]] = None
async def start(self) -> None:
"""Start Raft node."""
diff --git a/ccbt/consensus/raft_state.py b/ccbt/consensus/raft_state.py
index a4649fb..bcab30c 100644
--- a/ccbt/consensus/raft_state.py
+++ b/ccbt/consensus/raft_state.py
@@ -9,7 +9,7 @@
import logging
import time
from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from pathlib import Path
@@ -40,7 +40,7 @@ class RaftState:
"""
current_term: int = 0
- voted_for: str | None = None
+ voted_for: Optional[str] = None
log: list[LogEntry] = field(default_factory=list)
commit_index: int = -1
last_applied: int = -1
@@ -160,7 +160,7 @@ def append_entry(self, term: int, command: dict[str, Any]) -> LogEntry:
self.log.append(entry)
return entry
- def get_entry(self, index: int) -> LogEntry | None:
+ def get_entry(self, index: int) -> Optional[LogEntry]:
"""Get log entry by index.
Args:
diff --git a/ccbt/core/magnet.py b/ccbt/core/magnet.py
index a5f08a7..3ac1e15 100644
--- a/ccbt/core/magnet.py
+++ b/ccbt/core/magnet.py
@@ -10,7 +10,7 @@
import urllib.parse
from dataclasses import dataclass
-from typing import Any
+from typing import Any, Optional
@dataclass
@@ -18,11 +18,11 @@ class MagnetInfo:
"""Information extracted from a magnet link (BEP 9 + BEP 53)."""
info_hash: bytes
- display_name: str | None
+ display_name: Optional[str]
trackers: list[str]
web_seeds: list[str]
- selected_indices: list[int] | None = None # BEP 53: so parameter
- prioritized_indices: dict[int, int] | None = None # BEP 53: x.pe parameter
+ selected_indices: Optional[list[int]] = None # BEP 53: so parameter
+ prioritized_indices: Optional[dict[int, int]] = None # BEP 53: x.pe parameter
def _hex_or_base32_to_bytes(btih: str) -> bytes:
@@ -243,9 +243,9 @@ def parse_magnet(uri: str) -> MagnetInfo:
def build_minimal_torrent_data(
info_hash: bytes,
- name: str | None,
+ name: Optional[str],
trackers: list[str],
- web_seeds: list[str] | None = None,
+ web_seeds: Optional[list[str]] = None,
) -> dict[str, Any]:
"""Create a minimal `torrent_data` placeholder using known info.
@@ -371,7 +371,7 @@ def build_minimal_torrent_data(
def validate_and_normalize_indices(
- indices: list[int] | None,
+ indices: Optional[list[int]],
num_files: int,
parameter_name: str = "indices",
) -> list[int]:
@@ -695,11 +695,11 @@ async def apply_magnet_file_selection(
def generate_magnet_link(
info_hash: bytes,
- display_name: str | None = None,
- trackers: list[str] | None = None,
- web_seeds: list[str] | None = None,
- selected_indices: list[int] | None = None,
- prioritized_indices: dict[int, int] | None = None,
+ display_name: Optional[str] = None,
+ trackers: Optional[list[str]] = None,
+ web_seeds: Optional[list[str]] = None,
+ selected_indices: Optional[list[int]] = None,
+ prioritized_indices: Optional[dict[int, int]] = None,
use_base32: bool = False,
) -> str:
"""Generate a magnet URI with optional file indices (BEP 53).
diff --git a/ccbt/core/tonic.py b/ccbt/core/tonic.py
index 9147da3..71abe4f 100644
--- a/ccbt/core/tonic.py
+++ b/ccbt/core/tonic.py
@@ -11,7 +11,7 @@
import hashlib
import time
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
from ccbt.core.bencode import decode, encode
from ccbt.utils.exceptions import TorrentError
@@ -32,7 +32,7 @@ class TonicFile:
def __init__(self) -> None:
"""Initialize the tonic file handler."""
- def parse(self, tonic_path: str | Path) -> dict[str, Any]:
+ def parse(self, tonic_path: Union[str, Path]) -> dict[str, Any]:
"""Parse a .tonic file from a local path.
Args:
@@ -106,13 +106,13 @@ def create(
self,
folder_name: str,
xet_metadata: XetTorrentMetadata,
- git_refs: list[str] | None = None,
+ git_refs: Optional[list[str]] = None,
sync_mode: str = "best_effort",
- source_peers: list[str] | None = None,
- allowlist_hash: bytes | None = None,
- announce: str | None = None,
- announce_list: list[list[str]] | None = None,
- comment: str | None = None,
+ source_peers: Optional[list[str]] = None,
+ allowlist_hash: Optional[bytes] = None,
+ announce: Optional[str] = None,
+ announce_list: Optional[list[list[str]]] = None,
+ comment: Optional[str] = None,
) -> bytes:
"""Create a bencoded .tonic file.
@@ -293,7 +293,7 @@ def get_file_tree(self, tonic_data: dict[str, Any]) -> dict[str, Any]:
return {}
def _convert_tree_keys(
- self, tree: dict[bytes, Any] | dict[str, Any]
+ self, tree: Union[dict[bytes, Any], dict[str, Any]]
) -> dict[str, Any]:
"""Convert tree keys from bytes to strings recursively.
@@ -389,7 +389,7 @@ def get_info_hash(self, tonic_data: dict[str, Any]) -> bytes:
info_bencoded = encode(info_bytes_dict)
return hashlib.sha256(info_bencoded).digest()
- def _read_from_file(self, file_path: str | Path) -> bytes:
+ def _read_from_file(self, file_path: Union[str, Path]) -> bytes:
"""Read tonic data from a local file.
Args:
diff --git a/ccbt/core/tonic_link.py b/ccbt/core/tonic_link.py
index fc3b1a7..5ddec27 100644
--- a/ccbt/core/tonic_link.py
+++ b/ccbt/core/tonic_link.py
@@ -10,7 +10,7 @@
import base64
import urllib.parse
from dataclasses import dataclass
-from typing import Any
+from typing import Any, Optional
@dataclass
@@ -18,12 +18,12 @@ class TonicLinkInfo:
"""Information extracted from a tonic?: link."""
info_hash: bytes # 32-byte SHA-256 hash
- display_name: str | None = None
- trackers: list[str] | None = None
- git_refs: list[str] | None = None
- sync_mode: str | None = None
- source_peers: list[str] | None = None
- allowlist_hash: bytes | None = None
+ display_name: Optional[str] = None
+ trackers: Optional[list[str]] = None
+ git_refs: Optional[list[str]] = None
+ sync_mode: Optional[str] = None
+ source_peers: Optional[list[str]] = None
+ allowlist_hash: Optional[bytes] = None
def _hex_or_base32_to_bytes(value: str) -> bytes:
@@ -166,12 +166,12 @@ def parse_tonic_link(uri: str) -> TonicLinkInfo:
def generate_tonic_link(
info_hash: bytes,
- display_name: str | None = None,
- trackers: list[str] | None = None,
- git_refs: list[str] | None = None,
- sync_mode: str | None = None,
- source_peers: list[str] | None = None,
- allowlist_hash: bytes | None = None,
+ display_name: Optional[str] = None,
+ trackers: Optional[list[str]] = None,
+ git_refs: Optional[list[str]] = None,
+ sync_mode: Optional[str] = None,
+ source_peers: Optional[list[str]] = None,
+ allowlist_hash: Optional[bytes] = None,
use_base32: bool = False,
) -> str:
"""Generate a tonic?: link from provided parameters.
@@ -251,7 +251,7 @@ def generate_tonic_link(
def build_minimal_tonic_data(
info_hash: bytes,
- name: str | None,
+ name: Optional[str],
trackers: list[str],
sync_mode: str = "best_effort",
) -> dict[str, Any]:
diff --git a/ccbt/core/torrent.py b/ccbt/core/torrent.py
index 13e5f99..1367ccb 100644
--- a/ccbt/core/torrent.py
+++ b/ccbt/core/torrent.py
@@ -12,7 +12,7 @@
import os
import urllib.request
from pathlib import Path
-from typing import Any
+from typing import Any, Union
from ccbt.core.bencode import decode, encode
from ccbt.models import FileInfo, TorrentInfo
@@ -75,7 +75,7 @@ class TorrentParser:
def __init__(self) -> None:
"""Initialize the torrent parser."""
- def parse(self, torrent_path: str | Path) -> TorrentInfo:
+ def parse(self, torrent_path: Union[str, Path]) -> TorrentInfo:
"""Parse a torrent file from a local path or URL.
Args:
@@ -113,12 +113,12 @@ def parse(self, torrent_path: str | Path) -> TorrentInfo:
msg = f"Failed to parse torrent: {e}"
raise TorrentError(msg) from e
- def _is_url(self, path: str | Path) -> bool:
+ def _is_url(self, path: Union[str, Path]) -> bool:
"""Check if path is a URL."""
path_str = str(path)
return path_str.startswith(("http://", "https://"))
- def _read_from_file(self, file_path: str | Path) -> bytes:
+ def _read_from_file(self, file_path: Union[str, Path]) -> bytes:
"""Read torrent data from a local file."""
path = Path(file_path)
if not path.exists():
@@ -357,7 +357,7 @@ def _extract_file_info(self, info: dict[bytes, Any]) -> list[FileInfo]:
if b"symlink path" in info:
symlink_path = info[b"symlink path"].decode("utf-8")
- file_sha1 = info.get(b"sha1") # bytes | None, 20 bytes if present
+ file_sha1 = info.get(b"sha1") # Optional[bytes], 20 bytes if present
return [
FileInfo(
@@ -386,7 +386,7 @@ def _extract_file_info(self, info: dict[bytes, Any]) -> list[FileInfo]:
if b"symlink path" in file_info:
symlink_path = file_info[b"symlink path"].decode("utf-8")
- file_sha1 = file_info.get(b"sha1") # bytes | None, 20 bytes if present
+ file_sha1 = file_info.get(b"sha1") # Optional[bytes], 20 bytes if present
files.append(
FileInfo(
diff --git a/ccbt/core/torrent_attributes.py b/ccbt/core/torrent_attributes.py
index 047ce16..f61564b 100644
--- a/ccbt/core/torrent_attributes.py
+++ b/ccbt/core/torrent_attributes.py
@@ -33,6 +33,7 @@
import platform
from enum import IntFlag
from pathlib import Path
+from typing import Optional, Union
logger = logging.getLogger(__name__)
@@ -47,7 +48,7 @@ class FileAttribute(IntFlag):
HIDDEN = 1 << 3 # Hidden file (bit 3)
-def parse_attributes(attr_str: str | None) -> FileAttribute:
+def parse_attributes(attr_str: Optional[str]) -> FileAttribute:
"""Parse attribute string into FileAttribute flags.
Args:
@@ -84,7 +85,7 @@ def parse_attributes(attr_str: str | None) -> FileAttribute:
return flags
-def is_padding_file(attributes: str | None) -> bool:
+def is_padding_file(attributes: Optional[str]) -> bool:
"""Check if attributes indicate a padding file.
Args:
@@ -98,8 +99,8 @@ def is_padding_file(attributes: str | None) -> bool:
def validate_symlink(
- attributes: str | None,
- symlink_path: str | None,
+ attributes: Optional[str],
+ symlink_path: Optional[str],
) -> bool:
"""Validate symlink attributes and path are consistent.
@@ -125,7 +126,7 @@ def validate_symlink(
return True
-def should_skip_file(attributes: str | None) -> bool:
+def should_skip_file(attributes: Optional[str]) -> bool:
"""Determine if file should be skipped (padding files).
Args:
@@ -139,9 +140,9 @@ def should_skip_file(attributes: str | None) -> bool:
def apply_file_attributes(
- file_path: str | Path,
- attributes: str | None,
- symlink_path: str | None = None,
+ file_path: Union[str, Path],
+ attributes: Optional[str],
+ symlink_path: Optional[str] = None,
) -> None:
"""Apply file attributes to a file on disk.
@@ -227,7 +228,7 @@ def apply_file_attributes(
logger.warning("Failed to set hidden attribute on %s: %s", file_path, e)
-def verify_file_sha1(file_path: str | Path, expected_sha1: bytes) -> bool:
+def verify_file_sha1(file_path: Union[str, Path], expected_sha1: bytes) -> bool:
"""Verify file SHA-1 hash matches expected value.
Args:
@@ -273,7 +274,7 @@ def verify_file_sha1(file_path: str | Path, expected_sha1: bytes) -> bool:
return matches
-def get_attribute_display_string(attributes: str | None) -> str:
+def get_attribute_display_string(attributes: Optional[str]) -> str:
"""Get human-readable display string for attributes.
Args:
diff --git a/ccbt/core/torrent_v2.py b/ccbt/core/torrent_v2.py
index 1eb8e2a..193c7ca 100644
--- a/ccbt/core/torrent_v2.py
+++ b/ccbt/core/torrent_v2.py
@@ -13,7 +13,7 @@
import math
from dataclasses import dataclass, field
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
from ccbt.core.bencode import encode
from ccbt.models import FileInfo, TorrentInfo
@@ -32,8 +32,8 @@ class FileTreeNode:
name: str
length: int = 0
- pieces_root: bytes | None = None
- children: dict[str, FileTreeNode] | None = None
+ pieces_root: Optional[bytes] = None
+ children: Optional[dict[str, FileTreeNode]] = None
def __post_init__(self) -> None:
"""Validate node structure."""
@@ -99,13 +99,13 @@ class TorrentV2Info:
name: str
info_hash_v2: bytes # 32 bytes SHA-256
- info_hash_v1: bytes | None = None # 20 bytes SHA-1 for hybrid torrents
+ info_hash_v1: Optional[bytes] = None # 20 bytes SHA-1 for hybrid torrents
announce: str = ""
- announce_list: list[list[str]] | None = None
- comment: str | None = None
- created_by: str | None = None
- creation_date: int | None = None
- encoding: str | None = None
+ announce_list: Optional[list[list[str]]] = None
+ comment: Optional[str] = None
+ created_by: Optional[str] = None
+ creation_date: Optional[int] = None
+ encoding: Optional[str] = None
is_private: bool = False
# v2-specific fields
@@ -149,7 +149,7 @@ def traverse(node: FileTreeNode, path: str = "") -> None:
return paths
- def get_piece_layer(self, pieces_root: bytes) -> PieceLayer | None:
+ def get_piece_layer(self, pieces_root: bytes) -> Optional[PieceLayer]:
"""Get piece layer for a given pieces_root hash."""
return self.piece_layers.get(pieces_root)
@@ -497,7 +497,7 @@ def _calculate_info_hash_v2(info_dict: dict[bytes, Any]) -> bytes:
raise TorrentError(msg) from e
-def _calculate_info_hash_v1(info_dict: dict[bytes, Any]) -> bytes | None:
+def _calculate_info_hash_v1(info_dict: dict[bytes, Any]) -> Optional[bytes]:
"""Calculate SHA-1 info hash for hybrid torrent (v1 part).
Args:
@@ -810,7 +810,7 @@ def parse_hybrid(
def _build_file_tree(
self,
files: list[tuple[str, int]],
- base_path: Path | None = None,
+ base_path: Optional[Path] = None,
) -> dict[str, FileTreeNode]:
"""Build v2 file tree structure from file list.
@@ -874,7 +874,7 @@ def _build_file_tree_node(
self,
name: str,
files: list[tuple[str, int]],
- ) -> FileTreeNode | None:
+ ) -> Optional[FileTreeNode]:
"""Build a FileTreeNode from a list of files.
Args:
@@ -906,7 +906,7 @@ def _build_file_tree_node(
# Build directory structure
# Group files by first path component
children_dict: dict[str, list[tuple[str, int]]] = {}
- single_file_at_root: tuple[str, int] | None = None
+ single_file_at_root: Optional[tuple[str, int]] = None
for file_path, file_length in files:
if not file_path or file_path == "/": # pragma: no cover
@@ -1310,7 +1310,7 @@ def _piece_layers_to_dict(
return result
def _collect_files_from_path(
- self, source: Path, base_path: Path | None = None
+ self, source: Path, base_path: Optional[Path] = None
) -> list[tuple[str, int]]:
"""Collect all files from source path with their sizes.
@@ -1386,12 +1386,12 @@ def _collect_files_from_path(
def generate_v2_torrent(
self,
source: Path,
- output: Path | None = None,
- trackers: list[str] | None = None,
- web_seeds: list[str] | None = None,
- comment: str | None = None,
+ output: Optional[Path] = None,
+ trackers: Optional[list[str]] = None,
+ web_seeds: Optional[list[str]] = None,
+ comment: Optional[str] = None,
created_by: str = "ccBitTorrent",
- piece_length: int | None = None,
+ piece_length: Optional[int] = None,
private: bool = False,
) -> bytes:
"""Generate a v2-only torrent file.
@@ -1531,12 +1531,12 @@ def generate_v2_torrent(
def generate_hybrid_torrent(
self,
source: Path,
- output: Path | None = None,
- trackers: list[str] | None = None,
- web_seeds: list[str] | None = None,
- comment: str | None = None,
+ output: Optional[Path] = None,
+ trackers: Optional[list[str]] = None,
+ web_seeds: Optional[list[str]] = None,
+ comment: Optional[str] = None,
created_by: str = "ccBitTorrent",
- piece_length: int | None = None,
+ piece_length: Optional[int] = None,
private: bool = False,
) -> bytes:
"""Generate a hybrid torrent (v1 + v2).
diff --git a/ccbt/daemon/daemon_manager.py b/ccbt/daemon/daemon_manager.py
index 5dbc9ea..a2f153f 100644
--- a/ccbt/daemon/daemon_manager.py
+++ b/ccbt/daemon/daemon_manager.py
@@ -16,7 +16,7 @@
import sys
import time
from pathlib import Path
-from typing import Any
+from typing import Any, Optional, Union
from ccbt.utils.logging_config import get_logger
@@ -88,8 +88,8 @@ class DaemonManager:
def __init__(
self,
- pid_file: str | Path | None = None,
- state_dir: str | Path | None = None,
+ pid_file: Optional[str | Path] = None,
+ state_dir: Optional[str | Path] = None,
):
"""Initialize daemon manager.
@@ -201,7 +201,7 @@ def ensure_single_instance(self) -> bool:
return True
- def get_pid(self) -> int | None:
+ def get_pid(self) -> Optional[int]:
"""Get daemon PID from file with validation and retry logic.
Returns:
@@ -585,7 +585,7 @@ def remove_pid(self) -> None:
def start(
self,
- script_path: str | None = None,
+ script_path: Optional[str] = None,
foreground: bool = False,
) -> int:
"""Start daemon process.
@@ -622,7 +622,7 @@ def start(
# CRITICAL FIX: Capture stderr to a log file for background mode
# This allows debugging daemon startup failures
log_file = self.state_dir / "daemon_startup.log"
- log_fd: int | Any = subprocess.DEVNULL
+ log_fd: Union[int, Any] = subprocess.DEVNULL
try:
log_fd = open(log_file, "a", encoding="utf-8")
except Exception:
@@ -765,7 +765,7 @@ def stop(self, timeout: float = 30.0, force: bool = False) -> bool:
self.remove_pid()
return False
- def restart(self, script_path: str | None = None) -> int:
+ def restart(self, script_path: Optional[str] = None) -> int:
"""Restart daemon process.
Args:
diff --git a/ccbt/daemon/debug_utils.py b/ccbt/daemon/debug_utils.py
index 06c5059..53f0964 100644
--- a/ccbt/daemon/debug_utils.py
+++ b/ccbt/daemon/debug_utils.py
@@ -9,15 +9,15 @@
import time
import traceback
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
# Global debug state
_debug_enabled = False
-_debug_log_file: Path | None = None
+_debug_log_file: Optional[Path] = None
_debug_lock = threading.Lock()
-def enable_debug_logging(log_file: Path | None = None) -> None:
+def enable_debug_logging(log_file: Optional[Path] = None) -> None:
"""Enable comprehensive debug logging to file.
Args:
diff --git a/ccbt/daemon/ipc_client.py b/ccbt/daemon/ipc_client.py
index 0cd1a92..96330b7 100644
--- a/ccbt/daemon/ipc_client.py
+++ b/ccbt/daemon/ipc_client.py
@@ -12,7 +12,7 @@
import json
import logging
import os
-from typing import Any
+from typing import Any, Optional
import aiohttp
@@ -80,8 +80,8 @@ class IPCClient:
def __init__(
self,
- api_key: str | None = None,
- base_url: str | None = None,
+ api_key: Optional[str] = None,
+ base_url: Optional[str] = None,
key_manager: Any = None, # Ed25519KeyManager
timeout: float = 30.0,
):
@@ -99,12 +99,12 @@ def __init__(
self.base_url = base_url or self._get_default_url()
self.timeout = aiohttp.ClientTimeout(total=timeout)
- self._session: aiohttp.ClientSession | None = None
- self._session_loop: asyncio.AbstractEventLoop | None = (
+ self._session: Optional[aiohttp.ClientSession] = None
+ self._session_loop: Optional[asyncio.AbstractEventLoop] = (
None # Track loop session was created with
)
- self._websocket: aiohttp.ClientWebSocketResponse | None = None
- self._websocket_task: asyncio.Task | None = None
+ self._websocket: Optional[aiohttp.ClientWebSocketResponse] = None
+ self._websocket_task: Optional[asyncio.Task] = None
@property
def session(self) -> aiohttp.ClientSession:
@@ -288,7 +288,7 @@ async def _ensure_session(self) -> aiohttp.ClientSession:
return self._session
def _get_headers(
- self, method: str = "GET", path: str = "", body: bytes | None = None
+ self, method: str = "GET", path: str = "", body: Optional[bytes] = None
) -> dict[str, str]:
"""Get request headers with authentication.
@@ -333,7 +333,7 @@ async def _get_json(
self,
endpoint: str,
*,
- params: dict[str, Any] | None = None,
+ params: Optional[dict[str, Any]] = None,
requires_auth: bool = True,
) -> Any:
"""Issue authenticated GET requests and return JSON payload."""
@@ -428,7 +428,7 @@ async def get_status(self) -> StatusResponse:
async def add_torrent(
self,
path_or_magnet: str,
- output_dir: str | None = None,
+ output_dir: Optional[str] = None,
resume: bool = False,
) -> str:
"""Add torrent or magnet.
@@ -550,7 +550,9 @@ async def list_torrents(self) -> list[TorrentStatusResponse]:
response = TorrentListResponse(**data)
return response.torrents
- async def get_torrent_status(self, info_hash: str) -> TorrentStatusResponse | None:
+ async def get_torrent_status(
+ self, info_hash: str
+ ) -> Optional[TorrentStatusResponse]:
"""Get torrent status.
Args:
@@ -601,7 +603,7 @@ async def get_torrent_option(
self,
info_hash: str,
key: str,
- ) -> Any | None:
+ ) -> Optional[Any]:
"""Get a per-torrent configuration option value.
Args:
@@ -649,7 +651,7 @@ async def get_torrent_config(
async def reset_torrent_options(
self,
info_hash: str,
- key: str | None = None,
+ key: Optional[str] = None,
) -> bool:
"""Reset per-torrent configuration options.
@@ -1377,7 +1379,7 @@ async def discover_nat(self) -> dict[str, Any]:
async def map_nat_port(
self,
internal_port: int,
- external_port: int | None = None,
+ external_port: Optional[int] = None,
protocol: str = "tcp",
) -> dict[str, Any]:
"""Map a port via NAT.
@@ -1484,7 +1486,7 @@ async def list_scrape_results(self) -> ScrapeListResponse:
data = await resp.json()
return ScrapeListResponse(**data)
- async def get_scrape_result(self, info_hash: str) -> ScrapeResult | None:
+ async def get_scrape_result(self, info_hash: str) -> Optional[ScrapeResult]:
"""Get cached scrape result for a torrent.
Args:
@@ -1541,11 +1543,11 @@ async def get_ipfs_protocol(self) -> ProtocolInfo:
async def add_xet_folder(
self,
folder_path: str,
- tonic_file: str | None = None,
- tonic_link: str | None = None,
- sync_mode: str | None = None,
- source_peers: list[str] | None = None,
- check_interval: float | None = None,
+ tonic_file: Optional[str] = None,
+ tonic_link: Optional[str] = None,
+ sync_mode: Optional[str] = None,
+ source_peers: Optional[list[str]] = None,
+ check_interval: Optional[float] = None,
) -> dict[str, Any]:
"""Add XET folder for synchronization.
@@ -1955,7 +1957,7 @@ async def force_announce(self, info_hash: str) -> dict[str, Any]:
resp.raise_for_status()
return await resp.json()
- async def export_session_state(self, path: str | None = None) -> dict[str, Any]:
+ async def export_session_state(self, path: Optional[str] = None) -> dict[str, Any]:
"""Export session state to a file.
Args:
@@ -2003,7 +2005,7 @@ async def resume_from_checkpoint(
self,
info_hash: str,
checkpoint: dict[str, Any],
- torrent_path: str | None = None,
+ torrent_path: Optional[str] = None,
) -> dict[str, Any]:
"""Resume download from checkpoint.
@@ -2150,7 +2152,7 @@ async def set_per_peer_rate_limit(
async def get_per_peer_rate_limit(
self, info_hash: str, peer_key: str
- ) -> int | None:
+ ) -> Optional[int]:
"""Get per-peer upload rate limit for a specific peer.
Args:
@@ -2210,7 +2212,9 @@ async def get_metrics(self) -> str:
resp.raise_for_status()
return await resp.text()
- async def get_rate_samples(self, seconds: int | None = None) -> RateSamplesResponse:
+ async def get_rate_samples(
+ self, seconds: Optional[int] = None
+ ) -> RateSamplesResponse:
"""Get recent upload/download rate samples for graphing.
Args:
@@ -2272,7 +2276,7 @@ async def get_peer_metrics(self) -> GlobalPeerMetricsResponse:
async def get_torrent_dht_metrics(
self,
info_hash: str,
- ) -> DHTQueryMetricsResponse | None:
+ ) -> Optional[DHTQueryMetricsResponse]:
"""Get DHT query effectiveness metrics for a torrent."""
try:
data = await self._get_json(f"/metrics/torrents/{info_hash}/dht")
@@ -2285,7 +2289,7 @@ async def get_torrent_dht_metrics(
async def get_torrent_peer_quality(
self,
info_hash: str,
- ) -> PeerQualityMetricsResponse | None:
+ ) -> Optional[PeerQualityMetricsResponse]:
"""Get peer quality metrics for a torrent."""
try:
data = await self._get_json(f"/metrics/torrents/{info_hash}/peer-quality")
@@ -2386,7 +2390,7 @@ async def get_aggressive_discovery_status(
async def get_swarm_health_matrix(
self,
limit: int = 6,
- seconds: int | None = None,
+ seconds: Optional[int] = None,
) -> SwarmHealthMatrixResponse:
"""Get swarm health matrix combining performance, peer, and piece metrics.
@@ -2545,10 +2549,10 @@ async def connect_websocket(self) -> bool:
async def subscribe_events(
self,
- event_types: list[EventType] | None = None,
- info_hash: str | None = None,
- priority_filter: str | None = None,
- rate_limit: float | None = None,
+ event_types: Optional[list[EventType]] = None,
+ info_hash: Optional[str] = None,
+ priority_filter: Optional[str] = None,
+ rate_limit: Optional[float] = None,
) -> bool:
"""Subscribe to event types with optional filtering.
@@ -2584,7 +2588,7 @@ async def subscribe_events(
logger.exception("Error subscribing to events")
return False
- async def receive_event(self, timeout: float = 1.0) -> WebSocketEvent | None:
+ async def receive_event(self, timeout: float = 1.0) -> Optional[WebSocketEvent]:
"""Receive event from WebSocket.
Args:
@@ -2832,7 +2836,7 @@ async def is_daemon_running(self) -> bool:
return False
@staticmethod
- def get_daemon_pid() -> int | None:
+ def get_daemon_pid() -> Optional[int]:
"""Read daemon PID from file with validation and retry logic.
Returns:
diff --git a/ccbt/daemon/ipc_protocol.py b/ccbt/daemon/ipc_protocol.py
index bf6db4e..b689402 100644
--- a/ccbt/daemon/ipc_protocol.py
+++ b/ccbt/daemon/ipc_protocol.py
@@ -8,7 +8,7 @@
from __future__ import annotations
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from pydantic import BaseModel, Field
@@ -86,7 +86,7 @@ class TorrentAddRequest(BaseModel):
"""Request to add a torrent."""
path_or_magnet: str = Field(..., description="Torrent file path or magnet URI")
- output_dir: str | None = Field(None, description="Output directory override")
+ output_dir: Optional[str] = Field(None, description="Output directory override")
resume: bool = Field(False, description="Resume from checkpoint if available")
@@ -105,7 +105,7 @@ class TorrentStatusResponse(BaseModel):
downloaded: int = Field(0, description="Downloaded bytes")
uploaded: int = Field(0, description="Uploaded bytes")
is_private: bool = Field(False, description="Whether torrent is private (BEP 27)")
- output_dir: str | None = Field(
+ output_dir: Optional[str] = Field(
None, description="Output directory where files are saved"
)
pieces_completed: int = Field(0, description="Number of completed pieces")
@@ -129,7 +129,7 @@ class PeerInfo(BaseModel):
download_rate: float = Field(0.0, description="Download rate from peer (bytes/sec)")
upload_rate: float = Field(0.0, description="Upload rate to peer (bytes/sec)")
choked: bool = Field(False, description="Whether peer is choked")
- client: str | None = Field(None, description="Peer client name")
+ client: Optional[str] = Field(None, description="Peer client name")
class PeerListResponse(BaseModel):
@@ -159,7 +159,7 @@ class TrackerInfo(BaseModel):
peers: int = Field(0, description="Number of peers from last scrape")
downloaders: int = Field(0, description="Number of downloaders from last scrape")
last_update: float = Field(0.0, description="Last update timestamp")
- error: str | None = Field(None, description="Error message if any")
+ error: Optional[str] = Field(None, description="Error message if any")
class TrackerListResponse(BaseModel):
@@ -293,7 +293,7 @@ class AllPeersRateLimitResponse(BaseModel):
class ExportStateRequest(BaseModel):
"""Request to export session state."""
- path: str | None = Field(
+ path: Optional[str] = Field(
None, description="Export path (optional, defaults to state dir)"
)
@@ -309,7 +309,7 @@ class ResumeCheckpointRequest(BaseModel):
info_hash: str = Field(..., description="Torrent info hash (hex)")
checkpoint: dict[str, Any] = Field(..., description="Checkpoint data")
- torrent_path: str | None = Field(
+ torrent_path: Optional[str] = Field(
None, description="Optional explicit torrent file path"
)
@@ -319,7 +319,9 @@ class ErrorResponse(BaseModel):
error: str = Field(..., description="Error message")
code: str = Field(..., description="Error code")
- details: dict[str, Any] | None = Field(None, description="Additional error details")
+ details: Optional[dict[str, Any]] = Field(
+ None, description="Additional error details"
+ )
class TorrentCancelRequest(BaseModel):
@@ -342,15 +344,15 @@ class WebSocketSubscribeRequest(BaseModel):
default_factory=list,
description="Event types to subscribe to (empty = all events)",
)
- info_hash: str | None = Field(
+ info_hash: Optional[str] = Field(
None,
description="Filter events to specific torrent (optional)",
)
- priority_filter: str | None = Field(
+ priority_filter: Optional[str] = Field(
None,
description="Filter by priority: 'critical', 'high', 'normal', 'low'",
)
- rate_limit: float | None = Field(
+ rate_limit: Optional[float] = Field(
None,
description="Maximum events per second (throttling)",
)
@@ -360,7 +362,7 @@ class WebSocketMessage(BaseModel):
"""WebSocket message."""
action: str = Field(..., description="Message action")
- data: dict[str, Any] | None = Field(None, description="Message data")
+ data: Optional[dict[str, Any]] = Field(None, description="Message data")
class WebSocketAuthMessage(BaseModel):
@@ -387,7 +389,7 @@ class FileInfo(BaseModel):
selected: bool = Field(..., description="Whether file is selected")
priority: str = Field(..., description="File priority")
progress: float = Field(0.0, ge=0.0, le=1.0, description="Download progress")
- attributes: str | None = Field(None, description="File attributes")
+ attributes: Optional[str] = Field(None, description="File attributes")
class FileListResponse(BaseModel):
@@ -451,9 +453,9 @@ class NATStatusResponse(BaseModel):
"""NAT status response."""
enabled: bool = Field(..., description="Whether NAT traversal is enabled")
- method: str | None = Field(None, description="NAT method (UPnP, NAT-PMP, etc.)")
- external_ip: str | None = Field(None, description="External IP address")
- mapped_port: int | None = Field(None, description="Mapped port")
+ method: Optional[str] = Field(None, description="NAT method (UPnP, NAT-PMP, etc.)")
+ external_ip: Optional[str] = Field(None, description="External IP address")
+ mapped_port: Optional[int] = Field(None, description="Mapped port")
mappings: list[dict[str, Any]] = Field(
default_factory=list, description="Active port mappings"
)
@@ -463,15 +465,15 @@ class NATMapRequest(BaseModel):
"""Request to map a port."""
internal_port: int = Field(..., description="Internal port")
- external_port: int | None = Field(None, description="External port (optional)")
+ external_port: Optional[int] = Field(None, description="External port (optional)")
protocol: str = Field("tcp", description="Protocol (tcp/udp)")
class ExternalIPResponse(BaseModel):
"""External IP address response."""
- external_ip: str | None = Field(None, description="External IP address")
- method: str | None = Field(
+ external_ip: Optional[str] = Field(None, description="External IP address")
+ method: Optional[str] = Field(
None, description="Method used to obtain IP (UPnP, NAT-PMP, etc.)"
)
@@ -480,7 +482,7 @@ class ExternalPortResponse(BaseModel):
"""External port mapping response."""
internal_port: int = Field(..., description="Internal port")
- external_port: int | None = Field(None, description="External port (if mapped)")
+ external_port: Optional[int] = Field(None, description="External port (if mapped)")
protocol: str = Field("tcp", description="Protocol (tcp/udp)")
@@ -533,14 +535,14 @@ class BlacklistAddRequest(BaseModel):
"""Request to add IP to blacklist."""
ip: str = Field(..., description="IP address to blacklist")
- reason: str | None = Field(None, description="Reason for blacklisting")
+ reason: Optional[str] = Field(None, description="Reason for blacklisting")
class WhitelistAddRequest(BaseModel):
"""Request to add IP to whitelist."""
ip: str = Field(..., description="IP address to whitelist")
- reason: str | None = Field(None, description="Reason for whitelisting")
+ reason: Optional[str] = Field(None, description="Reason for whitelisting")
class IPFilterStatsResponse(BaseModel):
@@ -673,7 +675,7 @@ class GlobalPeerMetrics(BaseModel):
0, description="Total bytes downloaded from peer"
)
total_bytes_uploaded: int = Field(0, description="Total bytes uploaded to peer")
- client: str | None = Field(None, description="Peer client name")
+ client: Optional[str] = Field(None, description="Peer client name")
choked: bool = Field(False, description="Whether peer is choked")
connection_duration: float = Field(
0.0, description="Connection duration in seconds"
@@ -928,8 +930,8 @@ class PeerEventData(BaseModel):
info_hash: str = Field(..., description="Torrent info hash (hex)")
peer_ip: str = Field(..., description="Peer IP address")
peer_port: int = Field(..., description="Peer port")
- peer_id: str | None = Field(None, description="Peer ID (hex)")
- client: str | None = Field(None, description="Peer client name")
+ peer_id: Optional[str] = Field(None, description="Peer ID (hex)")
+ client: Optional[str] = Field(None, description="Peer client name")
download_rate: float = Field(0.0, description="Download rate from peer (bytes/sec)")
upload_rate: float = Field(0.0, description="Upload rate to peer (bytes/sec)")
pieces_available: int = Field(0, description="Number of pieces available from peer")
@@ -941,7 +943,7 @@ class FileSelectionEventData(BaseModel):
info_hash: str = Field(..., description="Torrent info hash (hex)")
file_index: int = Field(..., description="File index")
selected: bool = Field(..., description="Whether file is selected")
- priority: str | None = Field(None, description="File priority")
+ priority: Optional[str] = Field(None, description="File priority")
progress: float = Field(0.0, ge=0.0, le=1.0, description="File download progress")
@@ -959,6 +961,6 @@ class ServiceEventData(BaseModel):
"""Data for service/component events."""
service_name: str = Field(..., description="Service name")
- component_name: str | None = Field(None, description="Component name (optional)")
+ component_name: Optional[str] = Field(None, description="Component name (optional)")
status: str = Field(..., description="Service/component status")
- error: str | None = Field(None, description="Error message if any")
+ error: Optional[str] = Field(None, description="Error message if any")
diff --git a/ccbt/daemon/ipc_server.py b/ccbt/daemon/ipc_server.py
index 69b967f..ffcef7d 100644
--- a/ccbt/daemon/ipc_server.py
+++ b/ccbt/daemon/ipc_server.py
@@ -13,7 +13,7 @@
import os
import ssl
import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
import aiohttp
from aiohttp import web
@@ -163,8 +163,8 @@ def __init__(
self.websocket_heartbeat_interval = websocket_heartbeat_interval
self.app = web.Application() # type: ignore[attr-defined]
- self.runner: web.AppRunner | None = None # type: ignore[attr-defined]
- self.site: web.TCPSite | None = None # type: ignore[attr-defined]
+ self.runner: Optional[web.AppRunner] = None # type: ignore[attr-defined]
+ self.site: Optional[web.TCPSite] = None # type: ignore[attr-defined]
self._start_time = time.time()
# WebSocket connections
@@ -2131,7 +2131,7 @@ async def _handle_aggressive_discovery_status(self, request: Request) -> Respons
async def _handle_add_torrent(self, request: Request) -> Response:
"""Handle POST /api/v1/torrents/add."""
- info_hash_hex: str | None = None
+ info_hash_hex: Optional[str] = None
path_or_magnet: str = "unknown"
try:
# Parse JSON request body with error handling
diff --git a/ccbt/daemon/main.py b/ccbt/daemon/main.py
index 2342f5a..468b1a7 100644
--- a/ccbt/daemon/main.py
+++ b/ccbt/daemon/main.py
@@ -10,7 +10,7 @@
import asyncio
import contextlib
import sys
-from typing import TYPE_CHECKING, Any, Callable, Coroutine
+from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional
if TYPE_CHECKING:
from pathlib import Path
@@ -80,7 +80,7 @@ class DaemonMain:
def __init__(
self,
- config_file: str | Path | None = None,
+ config_file: Optional[str | Path] = None,
foreground: bool = False,
):
"""Initialize daemon main.
@@ -108,11 +108,11 @@ def __init__(
state_dir=daemon_state_dir,
)
- self.session_manager: AsyncSessionManager | None = None
- self.ipc_server: IPCServer | None = None
+ self.session_manager: Optional[AsyncSessionManager] = None
+ self.ipc_server: Optional[IPCServer] = None
self._shutdown_event = asyncio.Event()
- self._auto_save_task: asyncio.Task | None = None
+ self._auto_save_task: Optional[asyncio.Task] = None
self._stopping = False # Flag to prevent double-calling stop()
@property
@@ -199,7 +199,7 @@ async def start(self) -> None:
# This ensures API key, Ed25519 keys, and TLS are ready before NAT manager starts
# Security initialization must happen before any network components
daemon_config = self.config.daemon
- api_key: str | None = None
+ api_key: Optional[str] = None
key_manager = None
tls_enabled = False
@@ -386,7 +386,7 @@ async def on_torrent_complete_callback(info_hash: bytes, name: str) -> None:
from typing import cast
self.session_manager.on_torrent_complete = cast( # type: ignore[assignment]
- "Callable[[bytes, str], None] | Callable[[bytes, str], Coroutine[Any, Any, None]] | None",
+ "Optional[Callable[[bytes, str], None] | Callable[[bytes, str], Coroutine[Any, Any, None]]]",
on_torrent_complete_callback,
)
@@ -669,7 +669,7 @@ async def run(self) -> None:
# CRITICAL FIX: Initialize keep_alive to None to ensure it's always in scope
# This prevents NameError if exception occurs before task creation
- keep_alive: asyncio.Task | None = None
+ keep_alive: Optional[asyncio.Task] = None
# CRITICAL: Create a background task to keep the event loop alive
# This ensures the loop never exits even if all other tasks complete
diff --git a/ccbt/daemon/state_manager.py b/ccbt/daemon/state_manager.py
index 61820ae..0d6ccde 100644
--- a/ccbt/daemon/state_manager.py
+++ b/ccbt/daemon/state_manager.py
@@ -12,7 +12,7 @@
import os
import time
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
try:
import msgpack
@@ -36,7 +36,7 @@
class StateManager:
"""Manages daemon state persistence using msgpack format."""
- def __init__(self, state_dir: str | Path | None = None):
+ def __init__(self, state_dir: Optional[str | Path] = None):
"""Initialize state manager.
Args:
@@ -109,7 +109,7 @@ async def save_state(self, session_manager: Any) -> None:
logger.exception("Error saving state")
raise
- async def load_state(self) -> DaemonState | None:
+ async def load_state(self) -> Optional[DaemonState]:
"""Load state from msgpack file.
Returns:
@@ -380,7 +380,7 @@ async def validate_state(self, state: DaemonState) -> bool:
async def _migrate_state(
self, state: DaemonState, from_version: str
- ) -> DaemonState | None:
+ ) -> Optional[DaemonState]:
"""Migrate state from an older version to current version.
Args:
diff --git a/ccbt/daemon/state_models.py b/ccbt/daemon/state_models.py
index 80a034f..675b5d6 100644
--- a/ccbt/daemon/state_models.py
+++ b/ccbt/daemon/state_models.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import time
-from typing import Any
+from typing import Any, Optional
from pydantic import BaseModel, Field
@@ -44,12 +44,14 @@ class TorrentState(BaseModel):
total_size: int = Field(0, description="Total size in bytes")
downloaded: int = Field(0, description="Downloaded bytes")
uploaded: int = Field(0, description="Uploaded bytes")
- torrent_file_path: str | None = Field(None, description="Path to torrent file")
- magnet_uri: str | None = Field(None, description="Magnet URI if added via magnet")
- per_torrent_options: dict[str, Any] | None = Field(
+ torrent_file_path: Optional[str] = Field(None, description="Path to torrent file")
+ magnet_uri: Optional[str] = Field(
+ None, description="Magnet URI if added via magnet"
+ )
+ per_torrent_options: Optional[dict[str, Any]] = Field(
None, description="Per-torrent configuration options"
)
- rate_limits: dict[str, int] | None = Field(
+ rate_limits: Optional[dict[str, int]] = Field(
None, description="Per-torrent rate limits: {down_kib: int, up_kib: int}"
)
diff --git a/ccbt/daemon/utils.py b/ccbt/daemon/utils.py
index 1bfd16a..3828b93 100644
--- a/ccbt/daemon/utils.py
+++ b/ccbt/daemon/utils.py
@@ -8,7 +8,7 @@
from __future__ import annotations
import secrets
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
from ccbt.utils.logging_config import get_logger
@@ -50,7 +50,7 @@ def validate_api_key(api_key: str) -> bool:
return False
-def migrate_api_key_to_ed25519(key_dir: Path | str | None = None) -> bool:
+def migrate_api_key_to_ed25519(key_dir: Optional[Path | str] = None) -> bool:
"""Migrate from api_key to Ed25519 keys.
Generates Ed25519 keys if they don't exist and api_key does.
diff --git a/ccbt/discovery/bloom_filter.py b/ccbt/discovery/bloom_filter.py
index 5f48bdc..530cff1 100644
--- a/ccbt/discovery/bloom_filter.py
+++ b/ccbt/discovery/bloom_filter.py
@@ -10,6 +10,7 @@
import hashlib
import logging
import struct
+from typing import Optional
logger = logging.getLogger(__name__)
@@ -83,7 +84,7 @@ def __init__(
self,
size: int = 1024 * 8, # 1KB default
hash_count: int = 3,
- bit_array: bytearray | None = None,
+ bit_array: Optional[bytearray] = None,
):
"""Initialize bloom filter.
@@ -249,7 +250,7 @@ def intersection(self, other: BloomFilter) -> BloomFilter:
return result
- def false_positive_rate(self, expected_items: int | None = None) -> float:
+ def false_positive_rate(self, expected_items: Optional[int] = None) -> float:
"""Calculate false positive rate.
Args:
diff --git a/ccbt/discovery/dht.py b/ccbt/discovery/dht.py
index 594d2c4..59eb25e 100644
--- a/ccbt/discovery/dht.py
+++ b/ccbt/discovery/dht.py
@@ -1,7 +1,5 @@
"""Enhanced DHT (BEP 5) client with full Kademlia implementation.
-from __future__ import annotations
-
Provides high-performance peer discovery using Kademlia routing table,
iterative lookups, token verification, and continuous refresh.
"""
@@ -15,7 +13,7 @@
import socket
import time
from dataclasses import dataclass, field
-from typing import Any, Callable
+from typing import Any, Callable, Optional, Union
from ccbt.config.config import get_config
from ccbt.core.bencode import BencodeDecoder, BencodeEncoder
@@ -44,8 +42,8 @@ class DHTNode:
failed_queries: int = 0
successful_queries: int = 0
# IPv6 support
- ipv6: str | None = None
- port6: int | None = None
+ ipv6: Optional[str] = None
+ port6: Optional[int] = None
has_ipv6: bool = False
additional_addresses: list[tuple[str, int]] = field(default_factory=list)
@@ -283,7 +281,9 @@ def remove_node(self, node_id: bytes) -> None:
bucket.remove(node)
del self.nodes[node_id]
- def mark_node_bad(self, node_id: bytes, response_time: float | None = None) -> None:
+ def mark_node_bad(
+ self, node_id: bytes, response_time: Optional[float] = None
+ ) -> None:
"""Mark a node as bad and update quality metrics.
Args:
@@ -337,7 +337,7 @@ def mark_node_bad(self, node_id: bytes, response_time: float | None = None) -> N
node.quality_score = node.success_rate * time_factor
def mark_node_good(
- self, node_id: bytes, response_time: float | None = None
+ self, node_id: bytes, response_time: Optional[float] = None
) -> None:
"""Mark a node as good and update quality metrics.
@@ -459,8 +459,8 @@ def __init__(
# Network
self.bind_ip = bind_ip
self.bind_port = bind_port
- self.socket: asyncio.DatagramProtocol | None = None
- self.transport: asyncio.DatagramTransport | None = None
+ self.socket: Optional[asyncio.DatagramProtocol] = None
+ self.transport: Optional[asyncio.DatagramTransport] = None
# Routing table
self.routing_table = KademliaRoutingTable(self.node_id)
@@ -513,18 +513,18 @@ def __init__(
self.query_timeout = self.config.network.dht_timeout
# Peer manager reference for health tracking (optional)
- self.peer_manager: Any | None = None
+ self.peer_manager: Optional[Any] = None
# Adaptive timeout calculator (lazy initialization)
- self._timeout_calculator: Any | None = None
+ self._timeout_calculator: Optional[Any] = None
# Tokens for announce_peer
self.tokens: dict[bytes, DHTToken] = {}
self.token_secret = os.urandom(20)
# Background tasks
- self._refresh_task: asyncio.Task | None = None
- self._cleanup_task: asyncio.Task | None = None
+ self._refresh_task: Optional[asyncio.Task] = None
+ self._cleanup_task: Optional[asyncio.Task] = None
# Callbacks with info_hash filtering
# Maps info_hash -> list of callbacks, or None for global callbacks
@@ -534,7 +534,7 @@ def __init__(
] = {}
# BEP 27: Callback to check if a torrent is private
- self.is_private_torrent: Callable[[bytes], bool] | None = None
+ self.is_private_torrent: Optional[Callable[[bytes], bool]] = None
def _generate_node_id(self) -> bytes:
"""Generate a random node ID."""
@@ -987,7 +987,7 @@ async def _query_node_for_peers(
self,
node: DHTNode,
info_hash: bytes,
- ) -> dict[bytes, Any] | None:
+ ) -> Optional[dict[bytes, Any]]:
"""Query a single node for peers.
Args:
@@ -1050,7 +1050,7 @@ async def get_peers(
max_peers: int = 50,
alpha: int = 3, # Parallel queries (BEP 5)
k: int = 8, # Bucket size
- max_depth: int | None = None, # Override max depth (default: 10)
+ max_depth: Optional[int] = None, # Override max depth (default: 10)
) -> list[tuple[str, int]]:
"""Get peers for an info hash using proper Kademlia iterative lookup (BEP 5).
@@ -1537,8 +1537,8 @@ async def announce_peer(self, info_hash: bytes, port: int) -> int:
async def get_data(
self,
key: bytes,
- _public_key: bytes | None = None,
- ) -> bytes | None:
+ _public_key: Optional[bytes] = None,
+ ) -> Optional[bytes]:
"""Get data from DHT using BEP 44 get_mutable query.
Args:
@@ -1559,7 +1559,7 @@ async def get_data(
async def put_data(
self,
key: bytes,
- value: bytes | dict[bytes, bytes],
+ value: Union[bytes, dict[bytes, bytes]],
) -> int:
"""Put data to DHT using BEP 44 put_mutable query.
@@ -1628,7 +1628,7 @@ async def query_infohash_index(
self,
query: str,
max_results: int = 50,
- public_key: bytes | None = None,
+ public_key: Optional[bytes] = None,
) -> list:
"""Query the infohash index (BEP 51).
@@ -1684,7 +1684,7 @@ async def _send_query(
addr: tuple[str, int],
query: str,
args: dict[bytes, Any],
- ) -> dict[bytes, Any] | None:
+ ) -> Optional[dict[bytes, Any]]:
"""Send a DHT query and wait for response, tracking response time for quality metrics."""
# Calculate adaptive timeout based on peer health
query_timeout = self._calculate_adaptive_query_timeout()
@@ -1709,7 +1709,7 @@ async def _send_query(
# Track response time for quality metrics
start_time = time.time()
- response_time: float | None = None
+ response_time: Optional[float] = None
# Wait for response
try:
@@ -1985,7 +1985,7 @@ def _invoke_peer_callbacks(
def add_peer_callback(
self,
callback: Callable[[list[tuple[str, int]]], None],
- info_hash: bytes | None = None,
+ info_hash: Optional[bytes] = None,
) -> None:
"""Add callback for new peers.
@@ -2015,7 +2015,7 @@ def add_peer_callback(
def remove_peer_callback(
self,
callback: Callable[[list[tuple[str, int]]], None],
- info_hash: bytes | None = None,
+ info_hash: Optional[bytes] = None,
) -> None:
"""Remove peer callback.
@@ -2063,7 +2063,7 @@ def error_received(self, exc: Exception) -> None:
# Global DHT client instance
-_dht_client: AsyncDHTClient | None = None
+_dht_client: Optional[AsyncDHTClient] = None
def get_dht_client() -> AsyncDHTClient:
diff --git a/ccbt/discovery/dht_indexing.py b/ccbt/discovery/dht_indexing.py
index 75b0822..7de72fa 100644
--- a/ccbt/discovery/dht_indexing.py
+++ b/ccbt/discovery/dht_indexing.py
@@ -10,7 +10,7 @@
import logging
import time
from dataclasses import dataclass, field
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
from ccbt.discovery.dht_storage import (
DHTMutableData,
@@ -69,7 +69,7 @@ async def store_infohash_sample(
public_key: bytes,
private_key: bytes,
salt: bytes = b"",
- dht_client: AsyncDHTClient | None = None,
+ dht_client: Optional[AsyncDHTClient] = None,
) -> bytes:
"""Store an infohash sample in the index (BEP 51) using BEP 44.
@@ -201,8 +201,8 @@ async def store_infohash_sample(
async def query_index(
query: str,
max_results: int = 50,
- dht_client: AsyncDHTClient | None = None,
- public_key: bytes | None = None,
+ dht_client: Optional[AsyncDHTClient] = None,
+ public_key: Optional[bytes] = None,
) -> list[DHTInfohashSample]:
"""Query the index for matching infohash samples (BEP 51) using BEP 44.
@@ -317,7 +317,7 @@ async def query_index(
def update_index_entry(
key: bytes, # noqa: ARG001
sample: DHTInfohashSample,
- existing_entry: DHTIndexEntry | None = None,
+ existing_entry: Optional[DHTIndexEntry] = None,
max_samples: int = 8,
) -> DHTIndexEntry:
"""Update an index entry with a new sample (BEP 51).
diff --git a/ccbt/discovery/dht_multiaddr.py b/ccbt/discovery/dht_multiaddr.py
index 8e0eb73..9d640e7 100644
--- a/ccbt/discovery/dht_multiaddr.py
+++ b/ccbt/discovery/dht_multiaddr.py
@@ -9,7 +9,7 @@
import ipaddress
import logging
from enum import Enum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING: # pragma: no cover
from ccbt.discovery.dht import DHTNode
@@ -125,7 +125,7 @@ def encode_multi_address_node(node: DHTNode) -> dict[bytes, Any]:
def decode_multi_address_node(
- data: dict[bytes, Any], node_id: bytes | None = None
+ data: dict[bytes, Any], node_id: Optional[bytes] = None
) -> DHTNode:
"""Decode a DHT node from response with multiple addresses (BEP 45).
@@ -286,8 +286,8 @@ def validate_address(ip: str, port: int) -> bool:
async def discover_node_addresses(
known_addresses: list[tuple[str, int]],
max_results: int = 4,
- node_id: bytes | None = None,
- dht_client: Any | None = None,
+ node_id: Optional[bytes] = None,
+ dht_client: Optional[Any] = None,
) -> list[tuple[str, int]]:
"""Discover additional addresses for a node from known addresses and DHT.
diff --git a/ccbt/discovery/dht_storage.py b/ccbt/discovery/dht_storage.py
index a039af7..05ef80d 100644
--- a/ccbt/discovery/dht_storage.py
+++ b/ccbt/discovery/dht_storage.py
@@ -11,7 +11,7 @@
import time
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any
+from typing import Any, Optional, Union
try:
from cryptography.hazmat.primitives import hashes as crypto_hashes
@@ -276,7 +276,7 @@ def verify_mutable_data_signature(
def encode_storage_value(
- data: DHTImmutableData | DHTMutableData,
+ data: Union[DHTImmutableData, DHTMutableData],
) -> dict[bytes, Any]:
"""Encode storage value for DHT message (BEP 44).
@@ -325,7 +325,7 @@ def encode_storage_value(
def decode_storage_value(
value_dict: dict[bytes, Any],
key_type: DHTStorageKeyType,
-) -> DHTImmutableData | DHTMutableData:
+) -> Union[DHTImmutableData, DHTMutableData]:
"""Decode storage value from DHT message (BEP 44).
Args:
@@ -389,7 +389,7 @@ class DHTStorageCacheEntry:
"""Cache entry for stored DHT data."""
key: bytes
- value: DHTImmutableData | DHTMutableData
+ value: Union[DHTImmutableData, DHTMutableData]
stored_at: float = field(default_factory=time.time)
expires_at: float = field(default_factory=lambda: time.time() + 3600.0)
@@ -407,7 +407,7 @@ def __init__(self, default_ttl: int = 3600):
self.cache: dict[bytes, DHTStorageCacheEntry] = {}
self.default_ttl = default_ttl
- def get(self, key: bytes) -> DHTImmutableData | DHTMutableData | None:
+ def get(self, key: bytes) -> Optional[Union[DHTImmutableData, DHTMutableData]]:
"""Get cached value.
Args:
@@ -431,8 +431,8 @@ def get(self, key: bytes) -> DHTImmutableData | DHTMutableData | None:
def put(
self,
key: bytes,
- value: DHTImmutableData | DHTMutableData,
- ttl: int | None = None,
+ value: Union[DHTImmutableData, DHTMutableData],
+ ttl: Optional[int] = None,
) -> None:
"""Store value in cache.
diff --git a/ccbt/discovery/distributed_tracker.py b/ccbt/discovery/distributed_tracker.py
index 6f23887..fc92bf2 100644
--- a/ccbt/discovery/distributed_tracker.py
+++ b/ccbt/discovery/distributed_tracker.py
@@ -8,7 +8,7 @@
import hashlib
import logging
import time
-from typing import Any
+from typing import Any, Optional
from ccbt.models import PeerInfo
@@ -46,7 +46,7 @@ def __init__(
self.sync_interval = sync_interval
# Tracker data: info_hash -> list of (ip, port, peer_id)
- self.tracker_data: dict[bytes, list[tuple[str, int, bytes | None]]] = {}
+ self.tracker_data: dict[bytes, list[tuple[str, int, Optional[bytes]]]] = {}
self.last_sync = 0.0
async def announce(
@@ -54,7 +54,7 @@ async def announce(
info_hash: bytes,
peer_ip: str,
peer_port: int,
- peer_id: bytes | None = None,
+ peer_id: Optional[bytes] = None,
) -> None:
"""Announce peer for torrent.
diff --git a/ccbt/discovery/flooding.py b/ccbt/discovery/flooding.py
index 101a346..f0f5cfe 100644
--- a/ccbt/discovery/flooding.py
+++ b/ccbt/discovery/flooding.py
@@ -8,7 +8,7 @@
import hashlib
import logging
import time
-from typing import Any, Callable
+from typing import Any, Callable, Optional
logger = logging.getLogger(__name__)
@@ -30,7 +30,7 @@ def __init__(
self,
node_id: str,
max_hops: int = 10,
- message_callback: Callable[[dict[str, Any], str, int], None] | None = None,
+ message_callback: Optional[Callable[[dict[str, Any], str, int], None]] = None,
):
"""Initialize controlled flooding.
@@ -65,7 +65,7 @@ async def flood_message(
self,
message: dict[str, Any],
priority: int = 0,
- target_peers: list[str] | None = None,
+ target_peers: Optional[list[str]] = None,
) -> None:
"""Flood a message to peers.
diff --git a/ccbt/discovery/gossip.py b/ccbt/discovery/gossip.py
index e998a85..c986e37 100644
--- a/ccbt/discovery/gossip.py
+++ b/ccbt/discovery/gossip.py
@@ -11,7 +11,7 @@
import logging
import random
import time
-from typing import Any, Callable
+from typing import Any, Callable, Optional
logger = logging.getLogger(__name__)
@@ -37,7 +37,7 @@ def __init__(
fanout: int = 3,
interval: float = 5.0,
message_ttl: float = 300.0, # 5 minutes
- peer_callback: Callable[[str], list[str]] | None = None,
+ peer_callback: Optional[Callable[[str], list[str]]] = None,
):
"""Initialize gossip protocol.
@@ -61,8 +61,8 @@ def __init__(
self.received_messages: set[str] = set() # For deduplication
self.running = False
- self._gossip_task: asyncio.Task | None = None
- self._cleanup_task: asyncio.Task | None = None
+ self._gossip_task: Optional[asyncio.Task] = None
+ self._cleanup_task: Optional[asyncio.Task] = None
async def start(self) -> None:
"""Start gossip protocol."""
diff --git a/ccbt/discovery/lpd.py b/ccbt/discovery/lpd.py
index e2d8970..77accc5 100644
--- a/ccbt/discovery/lpd.py
+++ b/ccbt/discovery/lpd.py
@@ -10,7 +10,7 @@
import logging
import socket
import struct
-from typing import Callable
+from typing import Callable, Optional
logger = logging.getLogger(__name__)
@@ -38,7 +38,7 @@ def __init__(
listen_port: int,
multicast_address: str = LPD_MULTICAST_ADDRESS,
multicast_port: int = LPD_MULTICAST_PORT,
- peer_callback: Callable[[str, int], None] | None = None,
+ peer_callback: Optional[Callable[[str, int], None]] = None,
):
"""Initialize Local Peer Discovery.
@@ -54,9 +54,9 @@ def __init__(
self.multicast_port = multicast_port
self.peer_callback = peer_callback
self.running = False
- self._socket: socket.socket | None = None
- self._listen_task: asyncio.Task | None = None
- self._announce_task: asyncio.Task | None = None
+ self._socket: Optional[socket.socket] = None
+ self._listen_task: Optional[asyncio.Task] = None
+ self._announce_task: Optional[asyncio.Task] = None
self._announce_interval = 300.0 # 5 minutes (BEP 14 recommendation)
async def start(self) -> None:
diff --git a/ccbt/discovery/pex.py b/ccbt/discovery/pex.py
index 045a244..2049b43 100644
--- a/ccbt/discovery/pex.py
+++ b/ccbt/discovery/pex.py
@@ -14,7 +14,7 @@
import time
from collections import defaultdict, deque
from dataclasses import dataclass, field
-from typing import Awaitable, Callable
+from typing import Awaitable, Callable, Optional
from ccbt.config import get_config
@@ -25,7 +25,7 @@ class PexPeer:
ip: str
port: int
- peer_id: bytes | None = None
+ peer_id: Optional[bytes] = None
added_time: float = field(default_factory=time.time)
source: str = "pex" # Source of this peer (pex, tracker, dht, etc.)
reliability_score: float = 1.0
@@ -36,7 +36,7 @@ class PexSession:
"""PEX session with a single peer."""
peer_key: str
- ut_pex_id: int | None = None
+ ut_pex_id: Optional[int] = None
last_pex_time: float = 0.0
pex_interval: float = 30.0
is_supported: bool = False
@@ -67,19 +67,19 @@ def __init__(self):
self.throttle_interval = 10.0
# Background tasks
- self._pex_task: asyncio.Task | None = None
- self._cleanup_task: asyncio.Task | None = None
+ self._pex_task: Optional[asyncio.Task] = None
+ self._cleanup_task: Optional[asyncio.Task] = None
# Callback for sending PEX messages via extension protocol
# Signature: (peer_key: str, peer_data: bytes, is_added: bool) -> bool
- self.send_pex_callback: Callable[[str, bytes, bool], Awaitable[bool]] | None = (
- None
- )
+ self.send_pex_callback: Optional[
+ Callable[[str, bytes, bool], Awaitable[bool]]
+ ] = None
# Callback to get connected peers for PEX messages
- self.get_connected_peers_callback: (
- Callable[[], Awaitable[list[tuple[str, int]]]] | None
- ) = None
+ self.get_connected_peers_callback: Optional[
+ Callable[[], Awaitable[list[tuple[str, int]]]]
+ ] = None
# Track peers we've already sent to each session (to avoid duplicates)
self.peers_sent_to_session: dict[str, set[tuple[str, int]]] = defaultdict(set)
diff --git a/ccbt/discovery/tracker.py b/ccbt/discovery/tracker.py
index 507166f..bec687c 100644
--- a/ccbt/discovery/tracker.py
+++ b/ccbt/discovery/tracker.py
@@ -16,7 +16,7 @@
import urllib.parse
import urllib.request
from dataclasses import dataclass
-from typing import Any, Callable
+from typing import Any, Callable, Optional, Union
import aiohttp
@@ -103,11 +103,11 @@ class TrackerResponse:
peers: (
list[PeerInfo] | list[dict[str, Any]]
) # Support both formats for backward compatibility
- complete: int | None = None
- incomplete: int | None = None
- download_url: str | None = None
- tracker_id: str | None = None
- warning_message: str | None = None
+ complete: Optional[int] = None
+ incomplete: Optional[int] = None
+ download_url: Optional[str] = None
+ tracker_id: Optional[str] = None
+ warning_message: Optional[str] = None
@dataclass
@@ -142,16 +142,16 @@ class TrackerSession:
url: str
last_announce: float = 0.0
interval: int = 1800
- min_interval: int | None = None
- tracker_id: str | None = None
+ min_interval: Optional[int] = None
+ tracker_id: Optional[str] = None
failure_count: int = 0
last_failure: float = 0.0
backoff_delay: float = 1.0
performance: TrackerPerformance = None # type: ignore[assignment]
# Statistics from last tracker response (announce or scrape)
- last_complete: int | None = None # Number of seeders (complete peers)
- last_incomplete: int | None = None # Number of leechers (incomplete peers)
- last_downloaded: int | None = None # Total number of completed downloads
+ last_complete: Optional[int] = None # Number of seeders (complete peers)
+ last_incomplete: Optional[int] = None # Number of leechers (incomplete peers)
+ last_downloaded: Optional[int] = None # Total number of completed downloads
last_scrape_time: float = 0.0 # Timestamp of last scrape/announce with statistics
def __post_init__(self):
@@ -163,7 +163,7 @@ def __post_init__(self):
class AsyncTrackerClient:
"""High-performance async client for communicating with BitTorrent trackers."""
- def __init__(self, peer_id_prefix: bytes | None = None):
+ def __init__(self, peer_id_prefix: Optional[bytes] = None):
"""Initialize the async tracker client.
Args:
@@ -184,7 +184,7 @@ def __init__(self, peer_id_prefix: bytes | None = None):
self.user_agent = get_user_agent()
# HTTP session
- self.session: aiohttp.ClientSession | None = None
+ self.session: Optional[aiohttp.ClientSession] = None
# Tracker sessions
self.sessions: dict[str, TrackerSession] = {}
@@ -193,7 +193,7 @@ def __init__(self, peer_id_prefix: bytes | None = None):
self.health_manager = TrackerHealthManager()
# Background tasks
- self._announce_task: asyncio.Task | None = None
+ self._announce_task: Optional[asyncio.Task] = None
# Session metrics
self._session_metrics: dict[str, dict[str, Any]] = {}
@@ -203,9 +203,9 @@ def __init__(self, peer_id_prefix: bytes | None = None):
# CRITICAL FIX: Immediate peer connection callback
# This allows sessions to connect peers immediately when tracker responses arrive
# instead of waiting for the announce loop to process them
- self.on_peers_received: (
- Callable[[list[PeerInfo] | list[dict[str, Any]], str], None] | None
- ) = None
+ self.on_peers_received: Optional[
+ Callable[[Union[list[PeerInfo], list[dict[str, Any]]], str], None]
+ ] = None
async def _call_immediate_connection(
self, peers: list[dict[str, Any]], tracker_url: str
@@ -475,7 +475,9 @@ async def stop(self) -> None:
self.logger.info("Async tracker client stopped")
- def get_healthy_trackers(self, exclude_urls: set[str] | None = None) -> list[str]:
+ def get_healthy_trackers(
+ self, exclude_urls: Optional[set[str]] = None
+ ) -> list[str]:
"""Get list of healthy trackers for use in announces.
Args:
@@ -487,7 +489,9 @@ def get_healthy_trackers(self, exclude_urls: set[str] | None = None) -> list[str
"""
return self.health_manager.get_healthy_trackers(exclude_urls)
- def get_fallback_trackers(self, exclude_urls: set[str] | None = None) -> list[str]:
+ def get_fallback_trackers(
+ self, exclude_urls: Optional[set[str]] = None
+ ) -> list[str]:
"""Get fallback trackers when no healthy trackers are available.
Args:
@@ -784,9 +788,9 @@ async def announce(
port: int = 6881,
uploaded: int = 0,
downloaded: int = 0,
- left: int | None = None,
+ left: Optional[int] = None,
event: str = "started",
- ) -> TrackerResponse | None:
+ ) -> Optional[TrackerResponse]:
"""Announce to the tracker and get peer list asynchronously.
Args:
@@ -977,7 +981,7 @@ async def announce(
# Track performance: start time
start_time = time.time()
- response_time: float | None = None
+ response_time: Optional[float] = None
# Emit tracker announce started event
try:
@@ -1506,7 +1510,7 @@ async def announce_to_multiple(
port: int = 6881,
uploaded: int = 0,
downloaded: int = 0,
- left: int | None = None,
+ left: Optional[int] = None,
event: str = "started",
) -> list[TrackerResponse]:
"""Announce to multiple trackers concurrently.
@@ -1708,9 +1712,9 @@ async def _announce_to_tracker(
port: int,
uploaded: int,
downloaded: int,
- left: int | None,
+ left: Optional[int],
event: str,
- ) -> TrackerResponse | None:
+ ) -> Optional[TrackerResponse]:
"""Announce to a single tracker.
Returns:
@@ -2595,7 +2599,7 @@ async def scrape(self, torrent_data: dict[str, Any]) -> dict[str, Any]:
self.logger.exception("HTTP scrape failed")
return {}
- def _build_scrape_url(self, info_hash: bytes, announce_url: str) -> str | None:
+ def _build_scrape_url(self, info_hash: bytes, announce_url: str) -> Optional[str]:
"""Build scrape URL from tracker URL.
Args:
@@ -2842,7 +2846,7 @@ def __init__(self):
}
# Background cleanup task
- self._cleanup_task: asyncio.Task | None = None
+ self._cleanup_task: Optional[asyncio.Task] = None
self._running = False
async def start(self):
@@ -2927,7 +2931,9 @@ def record_tracker_result(
else:
metrics.record_failure()
- def get_healthy_trackers(self, exclude_urls: set[str] | None = None) -> list[str]:
+ def get_healthy_trackers(
+ self, exclude_urls: Optional[set[str]] = None
+ ) -> list[str]:
"""Get list of healthy trackers, optionally excluding some URLs."""
if exclude_urls is None:
exclude_urls = set()
@@ -2942,7 +2948,9 @@ def get_healthy_trackers(self, exclude_urls: set[str] | None = None) -> list[str
return [url for url, _ in healthy]
- def get_fallback_trackers(self, exclude_urls: set[str] | None = None) -> list[str]:
+ def get_fallback_trackers(
+ self, exclude_urls: Optional[set[str]] = None
+ ) -> list[str]:
"""Get fallback trackers that aren't already in use."""
if exclude_urls is None:
exclude_urls = set()
@@ -2977,7 +2985,7 @@ def get_tracker_stats(self) -> dict[str, Any]:
class TrackerClient:
"""Synchronous tracker client for backward compatibility."""
- def __init__(self, peer_id_prefix: bytes | None = None):
+ def __init__(self, peer_id_prefix: Optional[bytes] = None):
"""Initialize the tracker client.
Args:
@@ -3248,7 +3256,7 @@ def announce(
port: int = 6881,
uploaded: int = 0,
downloaded: int = 0,
- left: int | None = None,
+ left: Optional[int] = None,
event: str = "started",
) -> dict[str, Any]:
"""Announce to the tracker and get peer list.
diff --git a/ccbt/discovery/tracker_udp_client.py b/ccbt/discovery/tracker_udp_client.py
index 16ec539..a9ab3c6 100644
--- a/ccbt/discovery/tracker_udp_client.py
+++ b/ccbt/discovery/tracker_udp_client.py
@@ -13,7 +13,7 @@
import time
from dataclasses import dataclass
from enum import Enum
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.config.config import get_config
@@ -45,16 +45,16 @@ class TrackerResponse:
action: TrackerAction
transaction_id: int
- connection_id: int | None = None
- interval: int | None = None
- leechers: int | None = None
- seeders: int | None = None
- peers: list[dict[str, Any]] | None = None
- error_message: str | None = None
+ connection_id: Optional[int] = None
+ interval: Optional[int] = None
+ leechers: Optional[int] = None
+ seeders: Optional[int] = None
+ peers: Optional[list[dict[str, Any]]] = None
+ error_message: Optional[str] = None
# Scrape-specific fields
- complete: int | None = None # Seeders in scrape response
- downloaded: int | None = None # Completed downloads in scrape response
- incomplete: int | None = None # Leechers in scrape response
+ complete: Optional[int] = None # Seeders in scrape response
+ downloaded: Optional[int] = None # Completed downloads in scrape response
+ incomplete: Optional[int] = None # Leechers in scrape response
@dataclass
@@ -64,22 +64,22 @@ class TrackerSession:
url: str
host: str
port: int
- connection_id: int | None = None
+ connection_id: Optional[int] = None
connection_time: float = 0.0
last_announce: float = 0.0
# Interval suggested by tracker for next announce (seconds)
- interval: int | None = None
+ interval: Optional[int] = None
retry_count: int = 0
backoff_delay: float = 1.0
max_retries: int = 3
is_connected: bool = False
- last_response_time: float | None = None
+ last_response_time: Optional[float] = None
class AsyncUDPTrackerClient:
"""High-performance async UDP tracker client."""
- def __init__(self, peer_id: bytes | None = None, test_mode: bool = False):
+ def __init__(self, peer_id: Optional[bytes] = None, test_mode: bool = False):
"""Initialize UDP tracker client.
Args:
@@ -99,15 +99,15 @@ def __init__(self, peer_id: bytes | None = None, test_mode: bool = False):
self.sessions: dict[str, TrackerSession] = {}
# UDP socket
- self.socket: asyncio.DatagramProtocol | None = None
- self.transport: asyncio.DatagramTransport | None = None
+ self.socket: Optional[asyncio.DatagramProtocol] = None
+ self.transport: Optional[asyncio.DatagramTransport] = None
self.transaction_counter = 0
# Pending requests
self.pending_requests: dict[int, asyncio.Future] = {}
# Background tasks
- self._cleanup_task: asyncio.Task | None = None
+ self._cleanup_task: Optional[asyncio.Task] = None
# CRITICAL FIX: Add lock to prevent concurrent socket operations
# Windows requires serialized access to UDP sockets to prevent WinError 10022
@@ -132,9 +132,9 @@ def __init__(self, peer_id: bytes | None = None, test_mode: bool = False):
# CRITICAL FIX: Immediate peer connection callback
# This allows sessions to connect peers immediately when tracker responses arrive
# instead of waiting for the announce loop to process them
- self.on_peers_received: Callable[[list[dict[str, Any]], str], None] | None = (
- None
- )
+ self.on_peers_received: Optional[
+ Callable[[list[dict[str, Any]], str], None]
+ ] = None
# Test mode: bypass socket validation for testing
self._test_mode: bool = test_mode
@@ -155,12 +155,14 @@ async def announce_to_tracker_full(
self,
url: str,
torrent_data: dict[str, Any],
- port: int | None = None,
+ port: Optional[int] = None,
uploaded: int = 0,
downloaded: int = 0,
left: int = 0,
event: TrackerEvent = TrackerEvent.STARTED,
- ) -> tuple[list[dict[str, Any]], int | None, int | None, int | None] | None:
+ ) -> Optional[
+ tuple[list[dict[str, Any]], Optional[int], Optional[int], Optional[int]]
+ ]:
"""Announce to tracker with full response (public API wrapper).
Args:
@@ -668,7 +670,7 @@ async def announce(
torrent_data: dict[str, Any],
uploaded: int = 0,
downloaded: int = 0,
- left: int | None = None,
+ left: Optional[int] = None,
event: TrackerEvent = TrackerEvent.STARTED,
) -> list[dict[str, Any]]:
"""Announce to UDP trackers and get peer list.
@@ -765,7 +767,7 @@ async def _announce_to_tracker(
self,
url: str,
torrent_data: dict[str, Any],
- port: int | None = None,
+ port: Optional[int] = None,
uploaded: int = 0,
downloaded: int = 0,
left: int = 0,
@@ -876,12 +878,14 @@ async def _announce_to_tracker_full(
self,
url: str,
torrent_data: dict[str, Any],
- port: int | None = None,
+ port: Optional[int] = None,
uploaded: int = 0,
downloaded: int = 0,
left: int = 0,
event: TrackerEvent = TrackerEvent.STARTED,
- ) -> tuple[list[dict[str, Any]], int | None, int | None, int | None] | None:
+ ) -> Optional[
+ tuple[list[dict[str, Any]], Optional[int], Optional[int], Optional[int]]
+ ]:
"""Announce to a single UDP tracker and return full response info.
Returns:
@@ -1421,7 +1425,7 @@ async def _send_announce(
self,
session: TrackerSession,
torrent_data: dict[str, Any],
- port: int | None = None,
+ port: Optional[int] = None,
uploaded: int = 0,
downloaded: int = 0,
left: int = 0,
@@ -1716,12 +1720,14 @@ async def _send_announce_full(
self,
session: TrackerSession,
torrent_data: dict[str, Any],
- port: int | None = None,
+ port: Optional[int] = None,
uploaded: int = 0,
downloaded: int = 0,
left: int = 0,
event: TrackerEvent = TrackerEvent.STARTED,
- ) -> tuple[list[dict[str, Any]], int | None, int | None, int | None] | None:
+ ) -> Optional[
+ tuple[list[dict[str, Any]], Optional[int], Optional[int], Optional[int]]
+ ]:
"""Send announce request to tracker and return full response info.
Returns:
@@ -1974,7 +1980,7 @@ async def _wait_for_response(
self,
transaction_id: int,
timeout: float,
- ) -> TrackerResponse | None:
+ ) -> Optional[TrackerResponse]:
"""Wait for UDP tracker response."""
future = asyncio.Future()
self.pending_requests[transaction_id] = future
diff --git a/ccbt/discovery/xet_bloom.py b/ccbt/discovery/xet_bloom.py
index 1866cc7..201ffc4 100644
--- a/ccbt/discovery/xet_bloom.py
+++ b/ccbt/discovery/xet_bloom.py
@@ -7,6 +7,7 @@
from __future__ import annotations
import logging
+from typing import Optional
from ccbt.discovery.bloom_filter import BloomFilter
@@ -29,7 +30,7 @@ def __init__(
size: int = 1024 * 8, # 1KB default
hash_count: int = 3,
chunk_size: int = 1000,
- bloom_filter: BloomFilter | None = None,
+ bloom_filter: Optional[BloomFilter] = None,
):
"""Initialize XET chunk bloom filter.
diff --git a/ccbt/discovery/xet_cas.py b/ccbt/discovery/xet_cas.py
index 7ff332d..3d01dbe 100644
--- a/ccbt/discovery/xet_cas.py
+++ b/ccbt/discovery/xet_cas.py
@@ -9,7 +9,7 @@
import asyncio
import logging
import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.models import PeerInfo
from ccbt.peer.peer import Handshake
@@ -47,11 +47,11 @@ class P2PCASClient:
def __init__(
self,
- dht_client: Any | None = None, # type: ignore[assignment]
- tracker_client: Any | None = None, # type: ignore[assignment]
+ dht_client: Optional[Any] = None, # type: ignore[assignment]
+ tracker_client: Optional[Any] = None, # type: ignore[assignment]
key_manager: Any = None, # Ed25519KeyManager
- bloom_filter: Any | None = None, # XetChunkBloomFilter
- catalog: Any | None = None, # XetChunkCatalog
+ bloom_filter: Optional[Any] = None, # XetChunkBloomFilter
+ catalog: Optional[Any] = None, # XetChunkCatalog
):
"""Initialize P2P CAS with DHT and tracker clients.
@@ -416,8 +416,8 @@ async def download_chunk(
self,
chunk_hash: bytes,
peer: PeerInfo,
- torrent_data: dict[str, Any] | None = None,
- connection_manager: Any | None = None, # type: ignore[assignment]
+ torrent_data: Optional[dict[str, Any]] = None,
+ connection_manager: Optional[Any] = None, # type: ignore[assignment]
) -> bytes:
"""Download chunk from peer using BitTorrent protocol extension.
@@ -650,7 +650,7 @@ async def download_chunk(
cleanup_error,
) # pragma: no cover - Same context
- def _extract_peer_from_dht(self, dht_result: Any) -> PeerInfo | None: # type: ignore[return]
+ def _extract_peer_from_dht(self, dht_result: Any) -> Optional[PeerInfo]: # type: ignore[return]
"""Extract PeerInfo from DHT result.
Args:
@@ -681,7 +681,7 @@ def _extract_peer_from_dht(self, dht_result: Any) -> PeerInfo | None: # type: i
return None
- def _extract_peer_from_dht_value(self, value: Any) -> PeerInfo | None: # type: ignore[return]
+ def _extract_peer_from_dht_value(self, value: Any) -> Optional[PeerInfo]: # type: ignore[return]
"""Extract PeerInfo from DHT stored value (BEP 44).
Args:
@@ -747,7 +747,7 @@ def register_local_chunk(self, chunk_hash: bytes, local_path: str) -> None:
local_path,
)
- def get_local_chunk_path(self, chunk_hash: bytes) -> str | None:
+ def get_local_chunk_path(self, chunk_hash: bytes) -> Optional[str]:
"""Get local path for a chunk if available.
Args:
diff --git a/ccbt/discovery/xet_catalog.py b/ccbt/discovery/xet_catalog.py
index 6972dae..476be96 100644
--- a/ccbt/discovery/xet_catalog.py
+++ b/ccbt/discovery/xet_catalog.py
@@ -10,7 +10,7 @@
import logging
import time
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
logger = logging.getLogger(__name__)
@@ -31,7 +31,7 @@ class XetChunkCatalog:
def __init__(
self,
- catalog_path: Path | str | None = None,
+ catalog_path: Optional[Path | str] = None,
sync_interval: float = 300.0, # 5 minutes
):
"""Initialize chunk catalog.
@@ -51,7 +51,7 @@ def __init__(
async def add_chunk(
self,
chunk_hash: bytes,
- peer_info: tuple[str, int] | None = None,
+ peer_info: Optional[tuple[str, int]] = None,
) -> None:
"""Add chunk to catalog.
@@ -78,7 +78,7 @@ async def add_chunk(
async def remove_chunk(
self,
chunk_hash: bytes,
- peer_info: tuple[str, int] | None = None,
+ peer_info: Optional[tuple[str, int]] = None,
) -> None:
"""Remove chunk from catalog.
@@ -154,8 +154,8 @@ async def get_peers_by_chunks(
async def query_catalog(
self,
- chunk_hashes: list[bytes] | None = None,
- peer_info: tuple[str, int] | None = None,
+ chunk_hashes: Optional[list[bytes]] = None,
+ peer_info: Optional[tuple[str, int]] = None,
) -> dict[bytes, set[tuple[str, int]]]:
"""Query catalog for chunk-to-peer mappings.
diff --git a/ccbt/discovery/xet_gossip.py b/ccbt/discovery/xet_gossip.py
index 2d05833..efb9725 100644
--- a/ccbt/discovery/xet_gossip.py
+++ b/ccbt/discovery/xet_gossip.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.discovery.gossip import GossipProtocol
@@ -30,7 +30,7 @@ def __init__(
node_id: str,
fanout: int = 3,
interval: float = 5.0,
- peer_callback: Callable[[str], list[str]] | None = None,
+ peer_callback: Optional[Callable[[str], list[str]]] = None,
):
"""Initialize XET gossip manager.
@@ -80,8 +80,8 @@ def remove_peer(self, peer_id: str) -> None:
async def propagate_chunk_update(
self,
chunk_hash: bytes,
- peer_ip: str | None = None,
- peer_port: int | None = None,
+ peer_ip: Optional[str] = None,
+ peer_port: Optional[int] = None,
) -> None:
"""Propagate chunk update via gossip.
@@ -107,8 +107,8 @@ async def propagate_chunk_update(
async def propagate_folder_update(
self,
update_data: dict[str, Any],
- peer_ip: str | None = None,
- peer_port: int | None = None,
+ peer_ip: Optional[str] = None,
+ peer_port: Optional[int] = None,
) -> None:
"""Propagate folder update via gossip.
diff --git a/ccbt/discovery/xet_multicast.py b/ccbt/discovery/xet_multicast.py
index af3fd2b..08ac19e 100644
--- a/ccbt/discovery/xet_multicast.py
+++ b/ccbt/discovery/xet_multicast.py
@@ -12,7 +12,7 @@
import socket
import struct
import time
-from typing import Any, Callable
+from typing import Any, Callable, Optional
logger = logging.getLogger(__name__)
@@ -34,8 +34,8 @@ def __init__(
self,
multicast_address: str = "239.255.255.250",
multicast_port: int = 6882,
- chunk_callback: Callable[[bytes, str, int], None] | None = None,
- update_callback: Callable[[dict[str, Any], str, int], None] | None = None,
+ chunk_callback: Optional[Callable[[bytes, str, int], None]] = None,
+ update_callback: Optional[Callable[[dict[str, Any], str, int], None]] = None,
):
"""Initialize XET multicast broadcaster.
@@ -51,8 +51,8 @@ def __init__(
self.chunk_callback = chunk_callback
self.update_callback = update_callback
self.running = False
- self._socket: socket.socket | None = None
- self._listen_task: asyncio.Task | None = None
+ self._socket: Optional[socket.socket] = None
+ self._listen_task: Optional[asyncio.Task] = None
async def start(self) -> None:
"""Start multicast broadcaster."""
@@ -126,8 +126,8 @@ async def stop(self) -> None:
async def broadcast_chunk_announcement(
self,
chunk_hash: bytes,
- peer_ip: str | None = None,
- peer_port: int | None = None,
+ peer_ip: Optional[str] = None,
+ peer_port: Optional[int] = None,
) -> None:
"""Broadcast chunk announcement.
@@ -177,8 +177,8 @@ async def broadcast_chunk_announcement(
async def broadcast_update(
self,
update_data: dict[str, Any],
- peer_ip: str | None = None,
- peer_port: int | None = None,
+ peer_ip: Optional[str] = None,
+ peer_port: Optional[int] = None,
) -> None:
"""Broadcast folder update.
diff --git a/ccbt/executor/base.py b/ccbt/executor/base.py
index e6f4910..db7dd80 100644
--- a/ccbt/executor/base.py
+++ b/ccbt/executor/base.py
@@ -7,7 +7,7 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.executor.session_adapter import SessionAdapter
@@ -21,7 +21,7 @@ class CommandContext:
"""
adapter: SessionAdapter
- config: Any | None = None
+ config: Optional[Any] = None
metadata: dict[str, Any] = field(default_factory=dict)
@@ -39,7 +39,7 @@ class CommandResult:
success: bool
data: Any = None
- error: str | None = None
+ error: Optional[str] = None
metadata: dict[str, Any] = field(default_factory=dict)
diff --git a/ccbt/executor/manager.py b/ccbt/executor/manager.py
index 2427e8b..6b69a41 100644
--- a/ccbt/executor/manager.py
+++ b/ccbt/executor/manager.py
@@ -8,7 +8,7 @@
import logging
import weakref
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.daemon.ipc_client import IPCClient
@@ -26,7 +26,7 @@ class ExecutorManager:
duplicate executors and session reference mismatches.
"""
- _instance: ExecutorManager | None = None
+ _instance: Optional[ExecutorManager] = None
_lock: Any = None # threading.Lock, but avoid import if not needed
def __init__(self) -> None:
@@ -91,8 +91,8 @@ def _cleanup_dead_references(self) -> None:
def get_executor(
self,
- session_manager: AsyncSessionManager | None = None,
- ipc_client: IPCClient | None = None,
+ session_manager: Optional[AsyncSessionManager] = None,
+ ipc_client: Optional[IPCClient] = None,
) -> UnifiedCommandExecutor:
"""Get or create executor for session manager or IPC client.
@@ -248,8 +248,8 @@ def get_executor(
def remove_executor(
self,
- session_manager: AsyncSessionManager | None = None,
- ipc_client: IPCClient | None = None,
+ session_manager: Optional[AsyncSessionManager] = None,
+ ipc_client: Optional[IPCClient] = None,
) -> None:
"""Remove executor for session manager or IPC client.
diff --git a/ccbt/executor/nat_executor.py b/ccbt/executor/nat_executor.py
index f73d081..7912ad6 100644
--- a/ccbt/executor/nat_executor.py
+++ b/ccbt/executor/nat_executor.py
@@ -5,7 +5,7 @@
from __future__ import annotations
-from typing import Any
+from typing import Any, Optional
from ccbt.executor.base import CommandExecutor, CommandResult
from ccbt.executor.session_adapter import LocalSessionAdapter
@@ -69,7 +69,7 @@ async def _discover_nat(self) -> CommandResult:
async def _map_nat_port(
self,
internal_port: int,
- external_port: int | None = None,
+ external_port: Optional[int] = None,
protocol: str = "tcp",
) -> CommandResult:
"""Map a port via NAT."""
diff --git a/ccbt/executor/registry.py b/ccbt/executor/registry.py
index 1d5145d..b315ab5 100644
--- a/ccbt/executor/registry.py
+++ b/ccbt/executor/registry.py
@@ -5,7 +5,7 @@
from __future__ import annotations
-from typing import Any, Callable
+from typing import Any, Callable, Optional
class CommandRegistry:
@@ -28,7 +28,7 @@ def register(self, command: str, handler: Callable[..., Any]) -> None:
"""
self._handlers[command] = handler
- def get(self, command: str) -> Callable[..., Any] | None:
+ def get(self, command: str) -> Optional[Callable[..., Any]]:
"""Get command handler.
Args:
diff --git a/ccbt/executor/session_adapter.py b/ccbt/executor/session_adapter.py
index 5c2b6d9..b512da7 100644
--- a/ccbt/executor/session_adapter.py
+++ b/ccbt/executor/session_adapter.py
@@ -7,7 +7,7 @@
import logging
from abc import ABC, abstractmethod
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
try:
import aiohttp
@@ -58,7 +58,7 @@ class SessionAdapter(ABC):
async def add_torrent(
self,
path_or_magnet: str,
- output_dir: str | None = None,
+ output_dir: Optional[str] = None,
resume: bool = False,
) -> str:
"""Add torrent or magnet.
@@ -95,7 +95,9 @@ async def list_torrents(self) -> list[TorrentStatusResponse]:
"""
@abstractmethod
- async def get_torrent_status(self, info_hash: str) -> TorrentStatusResponse | None:
+ async def get_torrent_status(
+ self, info_hash: str
+ ) -> Optional[TorrentStatusResponse]:
"""Get torrent status.
Args:
@@ -329,7 +331,7 @@ async def discover_nat(self) -> dict[str, Any]:
async def map_nat_port(
self,
internal_port: int,
- external_port: int | None = None,
+ external_port: Optional[int] = None,
protocol: str = "tcp",
) -> dict[str, Any]:
"""Map a port via NAT.
@@ -469,11 +471,11 @@ async def remove_tracker(self, info_hash: str, tracker_url: str) -> dict[str, An
async def add_xet_folder(
self,
folder_path: str,
- tonic_file: str | None = None,
- tonic_link: str | None = None,
- sync_mode: str | None = None,
- source_peers: list[str] | None = None,
- check_interval: float | None = None,
+ tonic_file: Optional[str] = None,
+ tonic_link: Optional[str] = None,
+ sync_mode: Optional[str] = None,
+ source_peers: Optional[list[str]] = None,
+ check_interval: Optional[float] = None,
) -> str:
"""Add XET folder for synchronization.
@@ -512,7 +514,7 @@ async def list_xet_folders(self) -> list[dict[str, Any]]:
"""
@abstractmethod
- async def get_xet_folder_status(self, folder_key: str) -> dict[str, Any] | None:
+ async def get_xet_folder_status(self, folder_key: str) -> Optional[dict[str, Any]]:
"""Get XET folder status.
Args:
@@ -667,7 +669,7 @@ async def set_per_peer_rate_limit(
@abstractmethod
async def get_per_peer_rate_limit(
self, info_hash: str, peer_key: str
- ) -> int | None:
+ ) -> Optional[int]:
"""Get per-peer upload rate limit for a specific peer.
Args:
@@ -696,7 +698,7 @@ async def resume_from_checkpoint(
self,
info_hash: bytes,
checkpoint: Any,
- torrent_path: str | None = None,
+ torrent_path: Optional[str] = None,
) -> str:
"""Resume download from checkpoint.
@@ -712,7 +714,7 @@ async def resume_from_checkpoint(
"""
@abstractmethod
- async def get_scrape_result(self, info_hash: str) -> Any | None:
+ async def get_scrape_result(self, info_hash: str) -> Optional[Any]:
"""Get cached scrape result for a torrent.
Args:
@@ -747,7 +749,7 @@ async def get_torrent_option(
self,
info_hash: str,
key: str,
- ) -> Any | None:
+ ) -> Optional[Any]:
"""Get a per-torrent configuration option value.
Args:
@@ -778,7 +780,7 @@ async def get_torrent_config(
async def reset_torrent_options(
self,
info_hash: str,
- key: str | None = None,
+ key: Optional[str] = None,
) -> bool:
"""Reset per-torrent configuration options.
@@ -824,7 +826,7 @@ def __init__(self, session_manager: Any):
async def add_torrent(
self,
path_or_magnet: str,
- output_dir: str | None = None,
+ output_dir: Optional[str] = None,
resume: bool = False,
) -> str:
"""Add torrent or magnet."""
@@ -872,7 +874,9 @@ async def list_torrents(self) -> list[TorrentStatusResponse]:
)
return torrents
- async def get_torrent_status(self, info_hash: str) -> TorrentStatusResponse | None:
+ async def get_torrent_status(
+ self, info_hash: str
+ ) -> Optional[TorrentStatusResponse]:
"""Get torrent status."""
from ccbt.daemon.ipc_protocol import TorrentStatusResponse
@@ -1060,7 +1064,7 @@ async def set_file_priority(
async def verify_files(
self,
info_hash: str,
- progress_callback: Any | None = None,
+ progress_callback: Optional[Any] = None,
) -> dict[str, Any]:
"""Verify torrent files.
@@ -1580,7 +1584,7 @@ async def discover_nat(self) -> dict[str, Any]:
async def map_nat_port(
self,
internal_port: int,
- external_port: int | None = None,
+ external_port: Optional[int] = None,
protocol: str = "tcp",
) -> dict[str, Any]:
"""Map a port via NAT."""
@@ -1807,11 +1811,11 @@ async def remove_tracker(self, info_hash: str, tracker_url: str) -> dict[str, An
async def add_xet_folder(
self,
folder_path: str,
- tonic_file: str | None = None,
- tonic_link: str | None = None,
- sync_mode: str | None = None,
- source_peers: list[str] | None = None,
- check_interval: float | None = None,
+ tonic_file: Optional[str] = None,
+ tonic_link: Optional[str] = None,
+ sync_mode: Optional[str] = None,
+ source_peers: Optional[list[str]] = None,
+ check_interval: Optional[float] = None,
) -> str:
"""Add XET folder for synchronization."""
return await self.session_manager.add_xet_folder(
@@ -1831,7 +1835,7 @@ async def list_xet_folders(self) -> list[dict[str, Any]]:
"""List all registered XET folders."""
return await self.session_manager.list_xet_folders()
- async def get_xet_folder_status(self, folder_key: str) -> dict[str, Any] | None:
+ async def get_xet_folder_status(self, folder_key: str) -> Optional[dict[str, Any]]:
"""Get XET folder status."""
folder = await self.session_manager.get_xet_folder(folder_key)
if not folder:
@@ -1909,7 +1913,7 @@ async def set_per_peer_rate_limit(
async def get_per_peer_rate_limit(
self, info_hash: str, peer_key: str
- ) -> int | None:
+ ) -> Optional[int]:
"""Get per-peer upload rate limit."""
return await self.session_manager.get_per_peer_rate_limit(info_hash, peer_key)
@@ -1921,7 +1925,7 @@ async def resume_from_checkpoint(
self,
info_hash: bytes,
checkpoint: Any,
- torrent_path: str | None = None,
+ torrent_path: Optional[str] = None,
) -> str:
"""Resume download from checkpoint."""
return await self.session_manager.resume_from_checkpoint(
@@ -1930,7 +1934,7 @@ async def resume_from_checkpoint(
torrent_path=torrent_path,
)
- async def get_scrape_result(self, info_hash: str) -> Any | None:
+ async def get_scrape_result(self, info_hash: str) -> Optional[Any]:
"""Get cached scrape result for a torrent."""
# Access scrape_cache via scrape_cache_lock
if not hasattr(self.session_manager, "scrape_cache") or not hasattr(
@@ -1973,7 +1977,7 @@ async def get_torrent_option(
self,
info_hash: str,
key: str,
- ) -> Any | None:
+ ) -> Optional[Any]:
"""Get a per-torrent configuration option value."""
try:
info_hash_bytes = bytes.fromhex(info_hash)
@@ -2019,7 +2023,7 @@ async def get_torrent_config(
async def reset_torrent_options(
self,
info_hash: str,
- key: str | None = None,
+ key: Optional[str] = None,
) -> bool:
"""Reset per-torrent configuration options."""
try:
@@ -2174,7 +2178,7 @@ async def set_all_peers_rate_limit(self, upload_limit_kib: int) -> int:
async def add_torrent(
self,
path_or_magnet: str,
- output_dir: str | None = None,
+ output_dir: Optional[str] = None,
resume: bool = False,
) -> str:
"""Add torrent or magnet."""
@@ -2221,7 +2225,7 @@ async def get_torrent_option(
self,
info_hash: str,
key: str,
- ) -> Any | None:
+ ) -> Optional[Any]:
"""Get a per-torrent configuration option value."""
return await self.ipc_client.get_torrent_option(info_hash, key)
@@ -2235,7 +2239,7 @@ async def get_torrent_config(
async def reset_torrent_options(
self,
info_hash: str,
- key: str | None = None,
+ key: Optional[str] = None,
) -> bool:
"""Reset per-torrent configuration options."""
return await self.ipc_client.reset_torrent_options(info_hash, key=key)
@@ -2266,7 +2270,7 @@ async def resume_from_checkpoint(
self,
info_hash: bytes,
checkpoint: Any,
- torrent_path: str | None = None,
+ torrent_path: Optional[str] = None,
) -> str:
"""Resume download from checkpoint.
@@ -2343,7 +2347,9 @@ async def list_torrents(self) -> list[TorrentStatusResponse]:
"""List all torrents."""
return await self.ipc_client.list_torrents()
- async def get_torrent_status(self, info_hash: str) -> TorrentStatusResponse | None:
+ async def get_torrent_status(
+ self, info_hash: str
+ ) -> Optional[TorrentStatusResponse]:
"""Get torrent status."""
return await self.ipc_client.get_torrent_status(info_hash)
@@ -2431,7 +2437,7 @@ async def discover_nat(self) -> dict[str, Any]:
async def map_nat_port(
self,
internal_port: int,
- external_port: int | None = None,
+ external_port: Optional[int] = None,
protocol: str = "tcp",
) -> dict[str, Any]:
"""Map a port via NAT."""
@@ -2455,7 +2461,7 @@ async def list_scrape_results(self) -> ScrapeListResponse:
"""List all cached scrape results."""
return await self.ipc_client.list_scrape_results()
- async def get_scrape_result(self, info_hash: str) -> Any | None:
+ async def get_scrape_result(self, info_hash: str) -> Optional[Any]:
"""Get cached scrape result for a torrent."""
try:
return await self.ipc_client.get_scrape_result(info_hash)
@@ -2537,11 +2543,11 @@ async def get_peers_for_torrent(self, info_hash: str) -> list[dict[str, Any]]:
async def add_xet_folder(
self,
folder_path: str,
- tonic_file: str | None = None,
- tonic_link: str | None = None,
- sync_mode: str | None = None,
- source_peers: list[str] | None = None,
- check_interval: float | None = None,
+ tonic_file: Optional[str] = None,
+ tonic_link: Optional[str] = None,
+ sync_mode: Optional[str] = None,
+ source_peers: Optional[list[str]] = None,
+ check_interval: Optional[float] = None,
) -> str:
"""Add XET folder for synchronization."""
result = await self.ipc_client.add_xet_folder(
@@ -2569,7 +2575,7 @@ async def list_xet_folders(self) -> list[dict[str, Any]]:
return result["folders"]
return result if isinstance(result, list) else []
- async def get_xet_folder_status(self, folder_key: str) -> dict[str, Any] | None:
+ async def get_xet_folder_status(self, folder_key: str) -> Optional[dict[str, Any]]:
"""Get XET folder status."""
result = await self.ipc_client.get_xet_folder_status(folder_key)
if not result:
diff --git a/ccbt/executor/torrent_executor.py b/ccbt/executor/torrent_executor.py
index fa96e62..2ba8eec 100644
--- a/ccbt/executor/torrent_executor.py
+++ b/ccbt/executor/torrent_executor.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import asyncio
-from typing import Any
+from typing import Any, Optional
from ccbt.executor.base import CommandExecutor, CommandResult
@@ -114,7 +114,7 @@ async def execute(
async def _add_torrent(
self,
path_or_magnet: str,
- output_dir: str | None = None,
+ output_dir: Optional[str] = None,
resume: bool = False,
) -> CommandResult:
"""Add torrent or magnet."""
@@ -350,7 +350,7 @@ async def _resume_from_checkpoint(
self,
info_hash: bytes,
checkpoint: Any,
- torrent_path: str | None = None,
+ torrent_path: Optional[str] = None,
) -> CommandResult:
"""Resume download from checkpoint."""
try:
@@ -726,7 +726,7 @@ async def _get_torrent_config(
async def _reset_torrent_options(
self,
info_hash: str,
- key: str | None = None,
+ key: Optional[str] = None,
) -> CommandResult:
"""Reset per-torrent configuration options.
diff --git a/ccbt/executor/xet_executor.py b/ccbt/executor/xet_executor.py
index cb3e8fc..bbaf264 100644
--- a/ccbt/executor/xet_executor.py
+++ b/ccbt/executor/xet_executor.py
@@ -6,7 +6,7 @@
from __future__ import annotations
from dataclasses import asdict
-from typing import Any
+from typing import Any, Optional
from ccbt.executor.base import CommandExecutor, CommandResult
@@ -83,12 +83,12 @@ async def execute(
async def _create_tonic(
self,
folder_path: str,
- output_path: str | None = None,
+ output_path: Optional[str] = None,
sync_mode: str = "best_effort",
- source_peers: list[str] | None = None,
- allowlist_path: str | None = None,
- git_ref: str | None = None,
- announce: str | None = None,
+ source_peers: Optional[list[str]] = None,
+ allowlist_path: Optional[str] = None,
+ git_ref: Optional[str] = None,
+ announce: Optional[str] = None,
) -> CommandResult:
"""Create .tonic file from folder."""
try:
@@ -116,8 +116,8 @@ async def _create_tonic(
async def _generate_link(
self,
- folder_path: str | None = None,
- tonic_file: str | None = None,
+ folder_path: Optional[str] = None,
+ tonic_file: Optional[str] = None,
) -> CommandResult:
"""Generate tonic?: link."""
try:
@@ -137,7 +137,7 @@ async def _generate_link(
source_peers = parsed_data.get("source_peers")
allowlist_hash = parsed_data.get("allowlist_hash")
- tracker_list: list[str] | None = None
+ tracker_list: Optional[list[str]] = None
if trackers:
tracker_list = [url for tier in trackers for url in tier]
@@ -168,7 +168,7 @@ async def _generate_link(
async def _sync_folder(
self,
tonic_input: str,
- output_dir: str | None = None,
+ output_dir: Optional[str] = None,
check_interval: float = 5.0,
) -> CommandResult:
"""Start syncing folder from .tonic file or tonic?: link."""
@@ -236,7 +236,7 @@ async def _allowlist_add(
self,
allowlist_path: str,
peer_id: str,
- public_key: str | None = None,
+ public_key: Optional[str] = None,
) -> CommandResult:
"""Add peer to allowlist."""
try:
@@ -430,7 +430,7 @@ async def _set_sync_mode(
self,
folder_path: str,
sync_mode: str,
- source_peers: list[str] | None = None,
+ source_peers: Optional[list[str]] = None,
) -> CommandResult:
"""Set synchronization mode for folder."""
try:
@@ -574,11 +574,11 @@ async def _get_config(self) -> CommandResult:
async def _add_xet_folder_session(
self,
folder_path: str,
- tonic_file: str | None = None,
- tonic_link: str | None = None,
- sync_mode: str | None = None,
- source_peers: list[str] | None = None,
- check_interval: float | None = None,
+ tonic_file: Optional[str] = None,
+ tonic_link: Optional[str] = None,
+ sync_mode: Optional[str] = None,
+ source_peers: Optional[list[str]] = None,
+ check_interval: Optional[float] = None,
) -> CommandResult:
"""Add XET folder session via session manager."""
try:
diff --git a/ccbt/extensions/dht.py b/ccbt/extensions/dht.py
index ed079a3..1e81e26 100644
--- a/ccbt/extensions/dht.py
+++ b/ccbt/extensions/dht.py
@@ -14,7 +14,7 @@
import time
from dataclasses import dataclass
from enum import IntEnum
-from typing import Any
+from typing import Any, Optional
from ccbt.core import bencode
from ccbt.models import PeerInfo
@@ -65,7 +65,7 @@ def __eq__(self, other):
class DHTExtension:
"""DHT (Distributed Hash Table) implementation."""
- def __init__(self, node_id: bytes | None = None):
+ def __init__(self, node_id: Optional[bytes] = None):
"""Initialize DHT implementation."""
self.node_id = node_id or self._generate_node_id()
self.nodes: dict[bytes, DHTNode] = {}
@@ -335,7 +335,7 @@ async def handle_dht_message(
peer_ip: str,
peer_port: int,
data: bytes,
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Handle incoming DHT message."""
try:
message = self._decode_dht_message(data)
@@ -441,7 +441,7 @@ async def _handle_response(
# Announcement was successful
token = message["a"]["token"]
info_hash = message.get("a", {}).get("info_hash")
- info_hash_bytes: bytes | None = None
+ info_hash_bytes: Optional[bytes] = None
# Store token for this info_hash if available
if info_hash:
diff --git a/ccbt/extensions/manager.py b/ccbt/extensions/manager.py
index a061988..ef18510 100644
--- a/ccbt/extensions/manager.py
+++ b/ccbt/extensions/manager.py
@@ -12,7 +12,7 @@
import time
from dataclasses import dataclass
from enum import Enum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.extensions.compact import CompactPeerLists
from ccbt.extensions.dht import DHTExtension
@@ -46,7 +46,7 @@ class ExtensionState:
capabilities: dict[str, Any]
last_activity: float
error_count: int = 0
- last_error: str | None = None
+ last_error: Optional[str] = None
class ExtensionManager:
@@ -255,11 +255,11 @@ async def stop(self) -> None:
),
)
- def get_extension(self, name: str) -> Any | None:
+ def get_extension(self, name: str) -> Optional[Any]:
"""Get extension by name."""
return self.extensions.get(name)
- def get_extension_state(self, name: str) -> ExtensionState | None:
+ def get_extension_state(self, name: str) -> Optional[ExtensionState]:
"""Get extension state."""
return self.extension_states.get(name)
@@ -384,7 +384,7 @@ async def handle_dht_message(
peer_ip: str,
peer_port: int,
data: bytes,
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Handle DHT message."""
if not self.is_extension_active("dht"):
return None
@@ -406,7 +406,7 @@ async def download_piece_from_webseed(
self,
webseed_id: str,
piece_info: PieceInfo,
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Download piece from WebSeed."""
if not self.is_extension_active("webseed"):
return None
@@ -423,7 +423,7 @@ async def download_piece_from_webseed(
else:
return data
- def add_webseed(self, url: str, name: str | None = None) -> str:
+ def add_webseed(self, url: str, name: Optional[str] = None) -> str:
"""Add WebSeed."""
if not self.is_extension_active("webseed"):
msg = "WebSeed extension not active"
@@ -448,7 +448,7 @@ async def handle_ssl_message(
peer_id: str,
message_type: int, # noqa: ARG002 - Required by interface signature
data: bytes,
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Handle SSL Extension message.
Args:
@@ -510,7 +510,7 @@ async def handle_xet_message(
peer_id: str,
message_type: int, # noqa: ARG002 - Required by interface signature
data: bytes,
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Handle Xet Extension message.
Args:
@@ -714,7 +714,7 @@ def get_all_statistics(self) -> dict[str, Any]:
# Singleton pattern removed - ExtensionManager is now managed via AsyncSessionManager.extension_manager
# This ensures proper lifecycle management and prevents conflicts between multiple session managers
# Deprecated singleton kept for backward compatibility
-_extension_manager: ExtensionManager | None = (
+_extension_manager: Optional[ExtensionManager] = (
None # Deprecated - use session_manager.extension_manager
)
diff --git a/ccbt/extensions/protocol.py b/ccbt/extensions/protocol.py
index 1a79f09..1f2c2d1 100644
--- a/ccbt/extensions/protocol.py
+++ b/ccbt/extensions/protocol.py
@@ -12,7 +12,7 @@
import time
from dataclasses import dataclass
from enum import IntEnum
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -30,7 +30,7 @@ class ExtensionInfo:
name: str
version: str
message_id: int
- handler: Callable | None = None
+ handler: Optional[Callable] = None
class ExtensionProtocol:
@@ -47,7 +47,7 @@ def register_extension(
self,
name: str,
version: str,
- handler: Callable | None = None,
+ handler: Optional[Callable] = None,
) -> int:
"""Register a new extension."""
if name in self.extensions:
@@ -82,7 +82,7 @@ def unregister_extension(self, name: str) -> None:
del self.extensions[name]
- def get_extension_info(self, name: str) -> ExtensionInfo | None:
+ def get_extension_info(self, name: str) -> Optional[ExtensionInfo]:
"""Get extension information."""
return self.extensions.get(name)
@@ -315,7 +315,7 @@ def get_peer_extension_info(
self,
peer_id: str,
extension_name: str,
- ) -> dict[str, Any] | None:
+ ) -> Optional[dict[str, Any]]:
"""Get peer extension information."""
peer_extensions = self.peer_extensions.get(peer_id, {})
return peer_extensions.get(extension_name)
diff --git a/ccbt/extensions/ssl.py b/ccbt/extensions/ssl.py
index 0920ef0..0abc56a 100644
--- a/ccbt/extensions/ssl.py
+++ b/ccbt/extensions/ssl.py
@@ -12,7 +12,7 @@
import time
from dataclasses import dataclass
from enum import IntEnum
-from typing import Any
+from typing import Any, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -33,7 +33,7 @@ class SSLNegotiationState:
peer_id: str
state: str # "idle", "requested", "accepted", "rejected"
timestamp: float
- request_id: int | None = None
+ request_id: Optional[int] = None
class SSLExtension:
@@ -297,7 +297,7 @@ async def handle_response(
),
)
- def get_negotiation_state(self, peer_id: str) -> SSLNegotiationState | None:
+ def get_negotiation_state(self, peer_id: str) -> Optional[SSLNegotiationState]:
"""Get SSL negotiation state for peer.
Args:
diff --git a/ccbt/extensions/webseed.py b/ccbt/extensions/webseed.py
index 2bed82c..908d67f 100644
--- a/ccbt/extensions/webseed.py
+++ b/ccbt/extensions/webseed.py
@@ -13,7 +13,7 @@
import asyncio
import time
from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from urllib.parse import urlparse
import aiohttp
@@ -29,7 +29,7 @@ class WebSeedInfo:
"""WebSeed information."""
url: str
- name: str | None = None
+ name: Optional[str] = None
is_active: bool = True
last_accessed: float = 0.0
bytes_downloaded: int = 0
@@ -45,7 +45,7 @@ def __init__(self):
import logging
self.webseeds: dict[str, WebSeedInfo] = {}
- self.session: aiohttp.ClientSession | None = None
+ self.session: Optional[aiohttp.ClientSession] = None
self.timeout = aiohttp.ClientTimeout(total=30.0)
self.logger = logging.getLogger(__name__)
@@ -63,7 +63,7 @@ async def start(self) -> None:
timeout=self.timeout, connector=connector
)
- def _create_connector(self) -> aiohttp.BaseConnector | None:
+ def _create_connector(self) -> Optional[aiohttp.BaseConnector]:
"""Create appropriate connector (proxy or direct).
Returns:
@@ -138,7 +138,7 @@ async def stop(self) -> None:
finally:
self.session = None
- def add_webseed(self, url: str, name: str | None = None) -> str:
+ def add_webseed(self, url: str, name: Optional[str] = None) -> str:
"""Add WebSeed URL."""
webseed_id = url
self.webseeds[webseed_id] = WebSeedInfo(
@@ -195,7 +195,7 @@ def remove_webseed(self, webseed_id: str) -> None:
# No event loop running, skip event emission
pass
- def get_webseed(self, webseed_id: str) -> WebSeedInfo | None:
+ def get_webseed(self, webseed_id: str) -> Optional[WebSeedInfo]:
"""Get WebSeed information."""
return self.webseeds.get(webseed_id)
@@ -208,7 +208,7 @@ async def download_piece(
webseed_id: str,
piece_info: PieceInfo,
_piece_data: bytes,
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Download piece from WebSeed."""
if webseed_id not in self.webseeds:
return None
@@ -327,7 +327,7 @@ async def download_piece_range(
webseed_id: str,
start_byte: int,
length: int,
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Download specific byte range from WebSeed."""
if webseed_id not in self.webseeds:
return None
@@ -401,7 +401,7 @@ async def download_piece_range(
return None
- def get_best_webseed(self) -> str | None:
+ def get_best_webseed(self) -> Optional[str]:
"""Get best WebSeed based on success rate and activity."""
if not self.webseeds:
return None
@@ -427,7 +427,7 @@ def get_best_webseed(self) -> str | None:
return best_webseed_id
- def get_webseed_statistics(self, webseed_id: str) -> dict[str, Any] | None:
+ def get_webseed_statistics(self, webseed_id: str) -> Optional[dict[str, Any]]:
"""Get WebSeed statistics."""
webseed = self.webseeds.get(webseed_id)
if not webseed:
diff --git a/ccbt/extensions/xet.py b/ccbt/extensions/xet.py
index 8c9633f..198a046 100644
--- a/ccbt/extensions/xet.py
+++ b/ccbt/extensions/xet.py
@@ -13,7 +13,7 @@
import time
from dataclasses import dataclass
from enum import IntEnum
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -56,7 +56,7 @@ class XetExtension:
def __init__(
self,
- folder_sync_handshake: Any | None = None, # XetHandshakeExtension
+ folder_sync_handshake: Optional[Any] = None, # XetHandshakeExtension
):
"""Initialize Xet Extension.
@@ -68,10 +68,10 @@ def __init__(
tuple[str, int], XetChunkRequest
] = {} # (peer_id, request_id) -> request
self.request_counter = 0
- self.chunk_provider: Callable[[bytes], bytes | None] | None = None
+ self.chunk_provider: Optional[Callable[[bytes], Optional[bytes]]] = None
self.folder_sync_handshake = folder_sync_handshake
- def set_chunk_provider(self, provider: Callable[[bytes], bytes | None]) -> None:
+ def set_chunk_provider(self, provider: Callable[[bytes], Optional[bytes]]) -> None:
"""Set function to provide chunks by hash.
Args:
@@ -424,7 +424,7 @@ def encode_version_request(self) -> bytes:
# Pack:
return struct.pack("!B", XetMessageType.FOLDER_VERSION_REQUEST)
- def encode_version_response(self, git_ref: str | None) -> bytes:
+ def encode_version_response(self, git_ref: Optional[str]) -> bytes:
"""Encode folder version response message.
Args:
@@ -444,7 +444,7 @@ def encode_version_response(self, git_ref: str | None) -> bytes:
)
return struct.pack("!BB", XetMessageType.FOLDER_VERSION_RESPONSE, 0)
- def decode_version_response(self, data: bytes) -> str | None:
+ def decode_version_response(self, data: bytes) -> Optional[str]:
"""Decode folder version response message.
Args:
@@ -479,7 +479,7 @@ def decode_version_response(self, data: bytes) -> str | None:
return ref_bytes.decode("utf-8")
def encode_update_notify(
- self, file_path: str, chunk_hash: bytes, git_ref: str | None = None
+ self, file_path: str, chunk_hash: bytes, git_ref: Optional[str] = None
) -> bytes:
"""Encode folder update notification message.
@@ -510,7 +510,7 @@ def encode_update_notify(
return b"".join(parts)
- def decode_update_notify(self, data: bytes) -> tuple[str, bytes, str | None]:
+ def decode_update_notify(self, data: bytes) -> tuple[str, bytes, Optional[str]]:
"""Decode folder update notification message.
Args:
@@ -548,7 +548,7 @@ def decode_update_notify(self, data: bytes) -> tuple[str, bytes, str | None]:
chunk_hash = data[offset : offset + 32]
offset += 32
- git_ref: str | None = None
+ git_ref: Optional[str] = None
if len(data) > offset:
has_ref = data[offset]
offset += 1
diff --git a/ccbt/extensions/xet_handshake.py b/ccbt/extensions/xet_handshake.py
index 881b1d2..10b6455 100644
--- a/ccbt/extensions/xet_handshake.py
+++ b/ccbt/extensions/xet_handshake.py
@@ -11,7 +11,7 @@
from __future__ import annotations
import logging
-from typing import Any
+from typing import Any, Optional
logger = logging.getLogger(__name__)
@@ -21,10 +21,10 @@ class XetHandshakeExtension:
def __init__(
self,
- allowlist_hash: bytes | None = None,
+ allowlist_hash: Optional[bytes] = None,
sync_mode: str = "best_effort",
- git_ref: str | None = None,
- key_manager: Any | None = None, # Ed25519KeyManager
+ git_ref: Optional[str] = None,
+ key_manager: Optional[Any] = None, # Ed25519KeyManager
) -> None:
"""Initialize XET handshake extension.
@@ -89,7 +89,7 @@ def encode_handshake(self) -> dict[str, Any]:
def decode_handshake(
self, peer_id: str, data: dict[str, Any]
- ) -> dict[str, Any] | None:
+ ) -> Optional[dict[str, Any]]:
"""Decode XET folder sync handshake from peer.
Args:
@@ -140,7 +140,7 @@ def decode_handshake(
return handshake_info
def verify_peer_allowlist(
- self, peer_id: str, peer_allowlist_hash: bytes | None
+ self, peer_id: str, peer_allowlist_hash: Optional[bytes]
) -> bool:
"""Verify peer's allowlist hash matches expected.
@@ -215,7 +215,7 @@ def verify_peer_identity(
self.logger.exception("Error verifying peer identity")
return False
- def negotiate_sync_mode(self, peer_id: str, peer_sync_mode: str) -> str | None:
+ def negotiate_sync_mode(self, peer_id: str, peer_sync_mode: str) -> Optional[str]:
"""Negotiate sync mode with peer.
Args:
@@ -262,7 +262,7 @@ def negotiate_sync_mode(self, peer_id: str, peer_sync_mode: str) -> str | None:
return self.sync_mode
return peer_sync_mode
- def get_peer_git_ref(self, peer_id: str) -> str | None:
+ def get_peer_git_ref(self, peer_id: str) -> Optional[str]:
"""Get git ref from peer handshake.
Args:
@@ -277,7 +277,9 @@ def get_peer_git_ref(self, peer_id: str) -> str | None:
return handshake.get("git_ref")
return None
- def compare_git_refs(self, local_ref: str | None, peer_ref: str | None) -> bool:
+ def compare_git_refs(
+ self, local_ref: Optional[str], peer_ref: Optional[str]
+ ) -> bool:
"""Compare git refs to check if versions match.
Args:
@@ -296,7 +298,7 @@ def compare_git_refs(self, local_ref: str | None, peer_ref: str | None) -> bool:
return local_ref == peer_ref
- def get_peer_handshake_info(self, peer_id: str) -> dict[str, Any] | None:
+ def get_peer_handshake_info(self, peer_id: str) -> Optional[dict[str, Any]]:
"""Get stored handshake information for a peer.
Args:
diff --git a/ccbt/extensions/xet_metadata.py b/ccbt/extensions/xet_metadata.py
index 6a3b455..f2b3da7 100644
--- a/ccbt/extensions/xet_metadata.py
+++ b/ccbt/extensions/xet_metadata.py
@@ -9,7 +9,7 @@
import asyncio
import logging
import struct
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.extensions.xet import XetExtension, XetMessageType
@@ -36,9 +36,11 @@ def __init__(self, extension: XetExtension) -> None:
self.metadata_state: dict[str, dict[str, Any]] = {}
# Metadata provider callback
- self.metadata_provider: Callable[[bytes], bytes | None] | None = None
+ self.metadata_provider: Optional[Callable[[bytes], Optional[bytes]]] = None
- def set_metadata_provider(self, provider: Callable[[bytes], bytes | None]) -> None:
+ def set_metadata_provider(
+ self, provider: Callable[[bytes], Optional[bytes]]
+ ) -> None:
"""Set function to provide metadata by info_hash.
Args:
diff --git a/ccbt/i18n/__init__.py b/ccbt/i18n/__init__.py
index fb6efaa..f8df228 100644
--- a/ccbt/i18n/__init__.py
+++ b/ccbt/i18n/__init__.py
@@ -10,12 +10,13 @@
import logging
import os
from pathlib import Path
+from typing import Optional
# Default locale
DEFAULT_LOCALE = "en"
# Translation instance (lazy-loaded)
-_translation: gettext.NullTranslations | None = None
+_translation: Optional[gettext.NullTranslations] = None
logger = logging.getLogger(__name__)
diff --git a/ccbt/i18n/manager.py b/ccbt/i18n/manager.py
index ec1bfbb..2c6dcd3 100644
--- a/ccbt/i18n/manager.py
+++ b/ccbt/i18n/manager.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import Any
+from typing import Any, Optional
from ccbt.i18n import _is_valid_locale, get_locale, set_locale
@@ -13,7 +13,7 @@
class TranslationManager:
"""Manages translations with config integration."""
- def __init__(self, config: Any | None = None) -> None:
+ def __init__(self, config: Optional[Any] = None) -> None:
"""Initialize translation manager.
Args:
diff --git a/ccbt/interface/commands/executor.py b/ccbt/interface/commands/executor.py
index e77d0ab..dc87aff 100644
--- a/ccbt/interface/commands/executor.py
+++ b/ccbt/interface/commands/executor.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import io
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
if TYPE_CHECKING:
from ccbt.session.session import AsyncSessionManager
@@ -192,8 +192,8 @@ async def execute_command(
async def execute_click_command(
self,
command_path: str,
- args: list[str] | None = None,
- ctx_obj: dict[str, Any] | None = None,
+ args: Optional[list[str]] = None,
+ ctx_obj: Optional[dict[str, Any]] = None,
) -> tuple[bool, str, Any]:
"""Execute a Click command group command.
diff --git a/ccbt/interface/daemon_session_adapter.py b/ccbt/interface/daemon_session_adapter.py
index 0c69b7e..0ab0349 100644
--- a/ccbt/interface/daemon_session_adapter.py
+++ b/ccbt/interface/daemon_session_adapter.py
@@ -7,7 +7,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any, Callable
+from typing import TYPE_CHECKING, Any, Callable, Optional, Union
if TYPE_CHECKING:
from ccbt.daemon.ipc_client import IPCClient
@@ -50,7 +50,7 @@ def __init__(self, ipc_client: IPCClient):
self._cache_lock = asyncio.Lock()
# WebSocket subscription
- self._websocket_task: asyncio.Task | None = None
+ self._websocket_task: Optional[asyncio.Task] = None
self._event_callbacks: dict[EventType, list[Callable[[dict[str, Any]], None]]] = {}
self._websocket_connected = False
@@ -58,29 +58,29 @@ def __init__(self, ipc_client: IPCClient):
self._widget_callbacks: list[Any] = [] # List of widget instances with event handler methods
# Callbacks (matching AsyncSessionManager interface)
- self.on_torrent_added: Callable[[bytes, str], None] | None = None
- self.on_torrent_removed: Callable[[bytes], None] | None = None
- self.on_torrent_complete: Callable[[bytes, str], None] | None = None
+ self.on_torrent_added: Optional[Callable[[bytes, str], None]] = None
+ self.on_torrent_removed: Optional[Callable[[bytes], None]] = None
+ self.on_torrent_complete: Optional[Callable[[bytes, str], None]] = None
# New async hooks for WebSocket-driven UI updates
- self.on_global_stats: Callable[[dict[str, Any]], None] | None = None
- self.on_torrent_list_delta: Callable[[dict[str, Any]], None] | None = None
- self.on_peer_metrics: Callable[[dict[str, Any]], None] | None = None
- self.on_tracker_event: Callable[[dict[str, Any]], None] | None = None
- self.on_metadata_event: Callable[[dict[str, Any]], None] | None = None
+ self.on_global_stats: Optional[Callable[[dict[str, Any]], None]] = None
+ self.on_torrent_list_delta: Optional[Callable[[dict[str, Any]], None]] = None
+ self.on_peer_metrics: Optional[Callable[[dict[str, Any]], None]] = None
+ self.on_tracker_event: Optional[Callable[[dict[str, Any]], None]] = None
+ self.on_metadata_event: Optional[Callable[[dict[str, Any]], None]] = None
# XET folder callbacks
- self.on_xet_folder_added: Callable[[str, str], None] | None = None
- self.on_xet_folder_removed: Callable[[str], None] | None = None
+ self.on_xet_folder_added: Optional[Callable[[str, str], None]] = None
+ self.on_xet_folder_removed: Optional[Callable[[str], None]] = None
# Properties matching AsyncSessionManager
self.torrents: dict[bytes, Any] = {} # Will be populated from cached status
self.xet_folders: dict[str, Any] = {} # Will be populated from cached status
self.lock = asyncio.Lock() # Compatibility with AsyncSessionManager
- self.dht_client: Any | None = None # Not available via IPC
- self.metrics: Any | None = None # Not directly available
- self.peer_service: Any | None = None # Not directly available
- self.security_manager: Any | None = None # Not directly available
- self.nat_manager: Any | None = None # Not directly available
- self.tcp_server: Any | None = None # Not directly available
+ self.dht_client: Optional[Any] = None # Not available via IPC
+ self.metrics: Optional[Any] = None # Not directly available
+ self.peer_service: Optional[Any] = None # Not directly available
+ self.security_manager: Optional[Any] = None # Not directly available
+ self.nat_manager: Optional[Any] = None # Not directly available
+ self.tcp_server: Optional[Any] = None # Not directly available
self.logger = logger
@@ -299,7 +299,7 @@ async def _websocket_event_loop(self) -> None:
async def _handle_websocket_event(self, event: WebSocketEvent) -> None:
"""Handle WebSocket event and update cache."""
try:
- async def _dispatch(callback: Callable[..., Any] | None, *args: Any) -> None:
+ async def _dispatch(callback: Optional[Callable[..., Any]], *args: Any) -> None:
"""Invoke optional callback, awaiting if it returns coroutine."""
if not callback:
return
@@ -629,7 +629,7 @@ async def get_status(self) -> dict[str, Any]:
async with self._cache_lock:
return dict(self._cached_torrents)
- async def get_torrent_status(self, info_hash_hex: str) -> dict[str, Any] | None:
+ async def get_torrent_status(self, info_hash_hex: str) -> Optional[dict[str, Any]]:
"""Get status of a specific torrent."""
try:
# CRITICAL: Use executor adapter (consistent with CLI)
@@ -656,7 +656,7 @@ async def get_torrent_status(self, info_hash_hex: str) -> dict[str, Any] | None:
async def add_torrent(
self,
- path: str | dict[str, Any],
+ path: Union[str, dict[str, Any]],
resume: bool = False,
) -> str:
"""Add a torrent file or torrent data to the session."""
@@ -806,11 +806,11 @@ async def get_peers_for_torrent(self, info_hash_hex: str) -> list[dict[str, Any]
async def add_xet_folder(
self,
folder_path: str,
- tonic_file: str | None = None,
- tonic_link: str | None = None,
- sync_mode: str | None = None,
- source_peers: list[str] | None = None,
- check_interval: float | None = None,
+ tonic_file: Optional[str] = None,
+ tonic_link: Optional[str] = None,
+ sync_mode: Optional[str] = None,
+ source_peers: Optional[list[str]] = None,
+ check_interval: Optional[float] = None,
) -> str:
"""Add XET folder for synchronization."""
try:
@@ -877,7 +877,7 @@ async def remove_xet_folder(self, folder_key: str) -> bool:
self.logger.debug("Error removing XET folder: %s", e)
return False
- async def get_xet_folder(self, folder_key: str) -> Any | None:
+ async def get_xet_folder(self, folder_key: str) -> Optional[Any]:
"""Get XET folder by key."""
await self._refresh_xet_folders_cache()
async with self._cache_lock:
@@ -889,7 +889,7 @@ async def list_xet_folders(self) -> list[dict[str, Any]]:
async with self._cache_lock:
return list(self.xet_folders.values())
- async def get_xet_folder_status(self, folder_key: str) -> dict[str, Any] | None:
+ async def get_xet_folder_status(self, folder_key: str) -> Optional[dict[str, Any]]:
"""Get XET folder status."""
try:
# Get adapter from executor
@@ -1026,11 +1026,11 @@ async def _peers_update_loop(self) -> None:
await asyncio.sleep(3.0)
@property
- def dht(self) -> Any | None:
+ def dht(self) -> Optional[Any]:
"""Get DHT instance (not available via IPC)."""
return None
- def parse_magnet_link(self, magnet_uri: str) -> dict[str, Any] | None:
+ def parse_magnet_link(self, magnet_uri: str) -> Optional[dict[str, Any]]:
"""Parse magnet link and return torrent data.
Args:
diff --git a/ccbt/interface/data_provider.py b/ccbt/interface/data_provider.py
index b608075..5710521 100644
--- a/ccbt/interface/data_provider.py
+++ b/ccbt/interface/data_provider.py
@@ -10,7 +10,7 @@
import logging
import time
from abc import ABC, abstractmethod
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.daemon.ipc_client import IPCClient
@@ -78,7 +78,7 @@ async def get_global_stats(self) -> dict[str, Any]:
pass
@abstractmethod
- async def get_torrent_status(self, info_hash_hex: str) -> dict[str, Any] | None:
+ async def get_torrent_status(self, info_hash_hex: str) -> Optional[dict[str, Any]]:
"""Get status for a specific torrent.
Args:
@@ -137,7 +137,7 @@ async def get_torrent_trackers(self, info_hash_hex: str) -> list[dict[str, Any]]
- peers: Number of peers from last scrape
- downloaders: Number of downloaders from last scrape
- last_update: Last update timestamp (float)
- - error: Error message if any (str | None)
+ - error: Error message if any (Optional[str])
"""
pass
@@ -293,10 +293,10 @@ async def get_per_torrent_performance(self, info_hash_hex: str) -> dict[str, Any
async def get_swarm_health_samples(
self,
- info_hash_hex: str | None = None,
+ info_hash_hex: Optional[str] = None,
limit: int = 6,
include_history: bool = False,
- history_seconds: int | None = None,
+ history_seconds: Optional[int] = None,
) -> list[dict[str, Any]]:
"""Get swarm health samples for global or per-torrent views.
@@ -474,7 +474,7 @@ class DaemonDataProvider(DataProvider):
Never access daemon session internals directly.
"""
- def __init__(self, ipc_client: IPCClient, executor: Any | None = None, adapter: Any | None = None) -> None:
+ def __init__(self, ipc_client: IPCClient, executor: Optional[Any] = None, adapter: Optional[Any] = None) -> None:
"""Initialize daemon data provider.
Args:
@@ -489,7 +489,7 @@ def __init__(self, ipc_client: IPCClient, executor: Any | None = None, adapter:
self._cache_ttl = 1.0 # 1.0 second TTL - balanced for responsiveness and reduced redundant requests
self._cache_lock = asyncio.Lock()
- def get_adapter(self) -> Any | None:
+ def get_adapter(self) -> Optional[Any]:
"""Get the DaemonInterfaceAdapter instance for widget registration.
Returns:
@@ -498,7 +498,7 @@ def get_adapter(self) -> Any | None:
return self._adapter
async def _get_cached(
- self, key: str, fetch_func: Any, ttl: float | None = None
+ self, key: str, fetch_func: Any, ttl: Optional[float] = None
) -> Any: # pragma: no cover
"""Get cached value or fetch if expired.
@@ -521,7 +521,7 @@ async def _get_cached(
self._cache[key] = (value, time.time())
return value
- def invalidate_cache(self, key: str | None = None) -> None: # pragma: no cover
+ def invalidate_cache(self, key: Optional[str] = None) -> None: # pragma: no cover
"""Invalidate cache entry or all cache if key is None.
Args:
@@ -548,7 +548,7 @@ async def _invalidate() -> None:
elif key in self._cache:
del self._cache[key]
- def invalidate_on_event(self, event_type: str, info_hash: str | None = None) -> None:
+ def invalidate_on_event(self, event_type: str, info_hash: Optional[str] = None) -> None:
"""Invalidate cache based on event type.
Args:
@@ -612,7 +612,7 @@ async def _fetch() -> dict[str, Any]:
}
return await self._get_cached("global_stats", _fetch)
- async def get_torrent_status(self, info_hash_hex: str) -> dict[str, Any] | None:
+ async def get_torrent_status(self, info_hash_hex: str) -> Optional[dict[str, Any]]:
"""Get torrent status from daemon."""
try:
status = await self._client.get_torrent_status(info_hash_hex)
@@ -1508,7 +1508,7 @@ def __init__(self, session: AsyncSessionManager) -> None:
self._cache_lock = asyncio.Lock()
async def _get_cached(
- self, key: str, fetch_func: Any, ttl: float | None = None
+ self, key: str, fetch_func: Any, ttl: Optional[float] = None
) -> Any: # pragma: no cover
"""Get cached value or fetch if expired."""
ttl = ttl or self._cache_ttl
@@ -1527,7 +1527,7 @@ async def _fetch() -> dict[str, Any]:
return await self._session.get_global_stats()
return await self._get_cached("global_stats", _fetch)
- async def get_torrent_status(self, info_hash_hex: str) -> dict[str, Any] | None:
+ async def get_torrent_status(self, info_hash_hex: str) -> Optional[dict[str, Any]]:
"""Get torrent status from local session."""
try:
status = await self._session.get_status()
@@ -1570,7 +1570,7 @@ async def get_torrent_files(self, info_hash_hex: str) -> list[dict[str, Any]]:
torrent_data = torrent_session.torrent_data
# Extract file_info from torrent_data
- file_info: dict[str, Any] | None = None
+ file_info: Optional[dict[str, Any]] = None
if isinstance(torrent_data, dict):
file_info = torrent_data.get("file_info")
elif hasattr(torrent_data, "file_info"):
@@ -2325,7 +2325,7 @@ async def get_per_torrent_performance(self, info_hash_hex: str) -> dict[str, Any
return {}
-def create_data_provider(session: AsyncSessionManager, executor: Any | None = None) -> DataProvider:
+def create_data_provider(session: AsyncSessionManager, executor: Optional[Any] = None) -> DataProvider:
"""Create appropriate data provider based on session type.
Args:
diff --git a/ccbt/interface/metrics/graph_series.py b/ccbt/interface/metrics/graph_series.py
index 4bf386a..fda4983 100644
--- a/ccbt/interface/metrics/graph_series.py
+++ b/ccbt/interface/metrics/graph_series.py
@@ -30,7 +30,7 @@ class GraphMetricSeries:
unit: str = "KiB/s"
color: str = "green"
style: str = "solid"
- description: str | None = None
+ description: Optional[str] = None
category: SeriesCategory = SeriesCategory.SPEED
source_path: Tuple[str, ...] = ("global_stats",)
scale: float = 1.0
diff --git a/ccbt/interface/reactive_updates.py b/ccbt/interface/reactive_updates.py
index b79a8b1..d2cc1b1 100644
--- a/ccbt/interface/reactive_updates.py
+++ b/ccbt/interface/reactive_updates.py
@@ -10,7 +10,7 @@
import time
from collections import deque
from enum import IntEnum
-from typing import TYPE_CHECKING, Any, Callable
+from typing import TYPE_CHECKING, Any, Callable, Optional
if TYPE_CHECKING:
from ccbt.interface.data_provider import DataProvider
@@ -40,7 +40,7 @@ def __init__(
event_type: str,
data: dict[str, Any],
priority: UpdatePriority = UpdatePriority.NORMAL,
- timestamp: float | None = None,
+ timestamp: Optional[float] = None,
) -> None:
"""Initialize update event.
@@ -88,7 +88,7 @@ def __init__(
self._last_update_times: dict[str, float] = {}
# Processing task
- self._processing_task: asyncio.Task | None = None
+ self._processing_task: Optional[asyncio.Task] = None
self._running = False
# Lock for thread safety
@@ -241,7 +241,7 @@ async def _process_updates(self) -> None: # pragma: no cover
while self._running:
try:
# Process events in priority order (CRITICAL -> HIGH -> NORMAL -> LOW)
- event: UpdateEvent | None = None
+ event: Optional[UpdateEvent] = None
for priority in [
UpdatePriority.CRITICAL,
diff --git a/ccbt/interface/screens/base.py b/ccbt/interface/screens/base.py
index fba97b9..9407da9 100644
--- a/ccbt/interface/screens/base.py
+++ b/ccbt/interface/screens/base.py
@@ -5,7 +5,7 @@
import asyncio
import contextlib
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from textual.screen import ModalScreen, Screen
@@ -125,7 +125,7 @@ def __init__(self, message: str, *args: Any, **kwargs: Any):
"""
super().__init__(*args, **kwargs)
self.message = message
- self.result: bool | None = None
+ self.result: Optional[bool] = None
def compose(self) -> ComposeResult: # pragma: no cover
"""Compose the confirmation dialog."""
@@ -203,7 +203,7 @@ def __init__(self, title: str, message: str, placeholder: str = "", *args: Any,
self.title = title
self.message = message
self.placeholder = placeholder
- self.result: str | None = None
+ self.result: Optional[str] = None
def compose(self) -> ComposeResult: # pragma: no cover
"""Compose the input dialog."""
@@ -315,12 +315,12 @@ def __init__(
self.metrics_collector = get_metrics_collector()
self.alert_manager = get_alert_manager()
self.plugin_manager = get_plugin_manager()
- self._refresh_task: asyncio.Task | None = None
- self._refresh_interval_id: Any | None = None
+ self._refresh_task: Optional[asyncio.Task] = None
+ self._refresh_interval_id: Optional[Any] = None
# Command executor for executing CLI commands (will be set in on_mount to avoid circular import)
- self._command_executor: Any | None = None
+ self._command_executor: Optional[Any] = None
# Status bar reference (will be set in on_mount if available)
- self.statusbar: Static | None = None
+ self.statusbar: Optional[Static] = None
async def on_mount(self) -> None: # type: ignore[override] # pragma: no cover
"""Mount the screen and start refresh interval."""
@@ -401,7 +401,7 @@ async def action_quit(self) -> None: # pragma: no cover
"""Quit the monitoring screen."""
await self.action_back()
- def _get_metrics_plugin(self) -> Any | None: # pragma: no cover
+ def _get_metrics_plugin(self) -> Optional[Any]: # pragma: no cover
"""Get MetricsPlugin instance if available.
Tries multiple methods:
diff --git a/ccbt/interface/screens/config/global_config.py b/ccbt/interface/screens/config/global_config.py
index de4a4fe..be5ff0a 100644
--- a/ccbt/interface/screens/config/global_config.py
+++ b/ccbt/interface/screens/config/global_config.py
@@ -4,7 +4,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from textual.app import ComposeResult
@@ -139,7 +139,7 @@ async def on_mount(self) -> None: # type: ignore[override] # pragma: no cover
)
)
- def _extract_row_key_value(self, row_key: Any) -> str | None:
+ def _extract_row_key_value(self, row_key: Any) -> Optional[str]:
"""Extract the actual value from a RowKey object.
Args:
@@ -528,7 +528,7 @@ def __init__(
self.section_name = section_name
self._editors: dict[str, ConfigValueEditor] = {}
self._original_config: Any = None
- self._section_schema: dict[str, Any] | None = None
+ self._section_schema: Optional[dict[str, Any]] = None
def compose(self) -> ComposeResult: # pragma: no cover
"""Compose the config detail screen."""
diff --git a/ccbt/interface/screens/config/torrent_config.py b/ccbt/interface/screens/config/torrent_config.py
index 6d34595..b13433c 100644
--- a/ccbt/interface/screens/config/torrent_config.py
+++ b/ccbt/interface/screens/config/torrent_config.py
@@ -8,7 +8,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
from rich.panel import Panel
from rich.table import Table
@@ -164,7 +164,7 @@ async def on_mount(self) -> None: # type: ignore[override] # pragma: no cover
await self._update_stats(stats_widget, None)
async def _update_stats(
- self, stats_widget: Static, selected_ih: str | None
+ self, stats_widget: Static, selected_ih: Optional[str]
) -> None: # pragma: no cover
"""Update stats panel with selected torrent information."""
if selected_ih:
diff --git a/ccbt/interface/screens/config/widget_factory.py b/ccbt/interface/screens/config/widget_factory.py
index 178f2eb..db2f395 100644
--- a/ccbt/interface/screens/config/widget_factory.py
+++ b/ccbt/interface/screens/config/widget_factory.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Union
from ccbt.config.config_schema import ConfigSchema
@@ -28,10 +28,10 @@ def create_config_widget(
option_key: str,
current_value: Any,
section_name: str,
- option_metadata: dict[str, Any] | None = None,
+ option_metadata: Optional[dict[str, Any]] = None,
*args: Any,
**kwargs: Any,
-) -> Checkbox | Select | ConfigValueEditor:
+) -> Union[Checkbox, Select, ConfigValueEditor]:
"""Create appropriate widget for a configuration option.
Args:
diff --git a/ccbt/interface/screens/config/widgets.py b/ccbt/interface/screens/config/widgets.py
index f238c8a..006d863 100644
--- a/ccbt/interface/screens/config/widgets.py
+++ b/ccbt/interface/screens/config/widgets.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from textual.widgets import Input, Static
@@ -23,7 +23,7 @@ def __init__(
current_value: Any,
value_type: str = "string",
description: str = "",
- constraints: dict[str, Any] | None = None,
+ constraints: Optional[dict[str, Any]] = None,
*args: Any,
**kwargs: Any,
): # pragma: no cover
@@ -47,7 +47,7 @@ def __init__(
self.description = description
self.constraints = normalized_constraints
self._original_value = current_value
- self._validation_error: str | None = None
+ self._validation_error: Optional[str] = None
# Format initial value for display
if value_type == "bool":
@@ -102,7 +102,7 @@ def get_parsed_value(self) -> Any: # pragma: no cover
return value_str
def validate_value(
- self, value: str | None = None
+ self, value: Optional[str] = None
) -> tuple[bool, str]: # pragma: no cover
"""Validate the current value or a provided value.
diff --git a/ccbt/interface/screens/dialogs.py b/ccbt/interface/screens/dialogs.py
index a425f5b..7c07807 100644
--- a/ccbt/interface/screens/dialogs.py
+++ b/ccbt/interface/screens/dialogs.py
@@ -5,7 +5,7 @@
import asyncio
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
from ccbt.i18n import _
@@ -427,7 +427,7 @@ def __init__(
)
# Torrent data (loaded after step 1)
- self.torrent_data: dict[str, Any] | None = (
+ self.torrent_data: Optional[dict[str, Any]] = (
None # pragma: no cover - AddTorrentScreen initialization
)
@@ -1186,9 +1186,9 @@ def __init__(
self.info_hash_hex = info_hash_hex
self.session = session
self.dashboard = dashboard
- self._status_widget: Static | None = None
- self._progress_widget: Static | None = None
- self._check_task: Any | None = None
+ self._status_widget: Optional[Static] = None
+ self._progress_widget: Optional[Static] = None
+ self._check_task: Optional[Any] = None
self._cancelled = False
self._all_files_selected = True # Default to selecting all files
@@ -1408,7 +1408,7 @@ def __init__(
self.info_hash_hex = info_hash_hex
self.session = session
self.dashboard = dashboard
- self._file_table: DataTable | None = None
+ self._file_table: Optional[DataTable] = None
self._selected_files: set[int] = set()
def compose(self) -> ComposeResult: # pragma: no cover
diff --git a/ccbt/interface/screens/language_selection_screen.py b/ccbt/interface/screens/language_selection_screen.py
index cb94619..242cbb8 100644
--- a/ccbt/interface/screens/language_selection_screen.py
+++ b/ccbt/interface/screens/language_selection_screen.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
from ccbt.i18n import _
@@ -76,8 +76,8 @@ class LanguageSelectionScreen(ModalScreen): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
- command_executor: CommandExecutor | None = None,
+ data_provider: Optional[DataProvider] = None,
+ command_executor: Optional[CommandExecutor] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -90,8 +90,8 @@ def __init__(
super().__init__(*args, **kwargs)
self._data_provider = data_provider
self._command_executor = command_executor
- self._language_selector: Any | None = None
- self._selected_locale: str | None = None
+ self._language_selector: Optional[Any] = None
+ self._selected_locale: Optional[str] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the language selection screen."""
diff --git a/ccbt/interface/screens/monitoring/ipfs.py b/ccbt/interface/screens/monitoring/ipfs.py
index ab343d5..2b1ba44 100644
--- a/ccbt/interface/screens/monitoring/ipfs.py
+++ b/ccbt/interface/screens/monitoring/ipfs.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from textual.app import ComposeResult
@@ -306,7 +306,7 @@ async def _refresh_data(self) -> None: # pragma: no cover
)
async def _refresh_ipfs_performance_metrics(
- self, widget: Static, protocol: Any | None
+ self, widget: Static, protocol: Optional[Any]
) -> None: # pragma: no cover
"""Refresh IPFS performance metrics."""
try:
@@ -355,7 +355,7 @@ async def _refresh_ipfs_performance_metrics(
except Exception:
widget.update("")
- async def _get_ipfs_protocol(self) -> Any | None: # pragma: no cover
+ async def _get_ipfs_protocol(self) -> Optional[Any]: # pragma: no cover
"""Get IPFS protocol instance from session."""
try:
from ccbt.protocols.base import ProtocolType
diff --git a/ccbt/interface/screens/monitoring/xet.py b/ccbt/interface/screens/monitoring/xet.py
index 8b434c0..79e984f 100644
--- a/ccbt/interface/screens/monitoring/xet.py
+++ b/ccbt/interface/screens/monitoring/xet.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from pathlib import Path
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from textual.app import ComposeResult
@@ -262,7 +262,7 @@ def format_bytes(b: int) -> str:
except Exception:
widget.update("")
- async def _get_xet_protocol(self) -> Any | None: # pragma: no cover
+ async def _get_xet_protocol(self) -> Optional[Any]: # pragma: no cover
"""Get Xet protocol instance from session."""
try:
from ccbt.protocols.base import ProtocolType
diff --git a/ccbt/interface/screens/per_peer_tab.py b/ccbt/interface/screens/per_peer_tab.py
index a08fe17..99d6159 100644
--- a/ccbt/interface/screens/per_peer_tab.py
+++ b/ccbt/interface/screens/per_peer_tab.py
@@ -7,7 +7,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.i18n import _
@@ -102,11 +102,11 @@ def __init__(
super().__init__(*args, **kwargs)
self._data_provider = data_provider
self._command_executor = command_executor
- self._global_peers_table: DataTable | None = None
- self._peer_detail_table: DataTable | None = None
- self._summary_widget: Static | None = None
- self._selected_peer_key: str | None = None
- self._update_task: Any | None = None
+ self._global_peers_table: Optional[DataTable] = None
+ self._peer_detail_table: Optional[DataTable] = None
+ self._summary_widget: Optional[Static] = None
+ self._selected_peer_key: Optional[str] = None
+ self._update_task: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the per-peer tab content."""
diff --git a/ccbt/interface/screens/per_torrent_files.py b/ccbt/interface/screens/per_torrent_files.py
index cee3fb5..0bcd252 100644
--- a/ccbt/interface/screens/per_torrent_files.py
+++ b/ccbt/interface/screens/per_torrent_files.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from ccbt.interface.commands.executor import CommandExecutor
@@ -104,7 +104,7 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._info_hash = info_hash
- self._files_table: DataTable | None = None
+ self._files_table: Optional[DataTable] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the files screen."""
@@ -374,7 +374,7 @@ def __init__(
"""
super().__init__(*args, **kwargs)
self._current_priority = current_priority
- self._selected_priority: str | None = None
+ self._selected_priority: Optional[str] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the priority selection dialog."""
diff --git a/ccbt/interface/screens/per_torrent_info.py b/ccbt/interface/screens/per_torrent_info.py
index 3fedf51..337bd71 100644
--- a/ccbt/interface/screens/per_torrent_info.py
+++ b/ccbt/interface/screens/per_torrent_info.py
@@ -9,7 +9,7 @@
import os
import platform
import subprocess
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from ccbt.interface.commands.executor import CommandExecutor
@@ -92,9 +92,9 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._info_hash = info_hash
- self._info_widget: Static | None = None
- self._health_bar: PieceAvailabilityHealthBar | None = None
- self._dht_aggressive_switch: Switch | None = None
+ self._info_widget: Optional[Static] = None
+ self._health_bar: Optional[PieceAvailabilityHealthBar] = None
+ self._dht_aggressive_switch: Optional[Switch] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the info screen."""
diff --git a/ccbt/interface/screens/per_torrent_peers.py b/ccbt/interface/screens/per_torrent_peers.py
index 1c9defe..674319e 100644
--- a/ccbt/interface/screens/per_torrent_peers.py
+++ b/ccbt/interface/screens/per_torrent_peers.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from ccbt.interface.commands.executor import CommandExecutor
@@ -73,7 +73,7 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._info_hash = info_hash
- self._peers_table: DataTable | None = None
+ self._peers_table: Optional[DataTable] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the peers screen."""
diff --git a/ccbt/interface/screens/per_torrent_tab.py b/ccbt/interface/screens/per_torrent_tab.py
index 4fc1da5..bdb2b2c 100644
--- a/ccbt/interface/screens/per_torrent_tab.py
+++ b/ccbt/interface/screens/per_torrent_tab.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.i18n import _
@@ -88,7 +88,7 @@ def __init__(
self,
data_provider: DataProvider,
command_executor: CommandExecutor,
- selected_info_hash: str | None = None,
+ selected_info_hash: Optional[str] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -102,11 +102,11 @@ def __init__(
super().__init__(*args, **kwargs)
self._data_provider = data_provider
self._command_executor = command_executor
- self._selected_info_hash: str | None = selected_info_hash
- self._sub_tabs: Tabs | None = None
- self._content_area: Container | None = None
- self._loading_sub_tab: str | None = None # Guard to prevent concurrent loading
- self._active_sub_tab_id: str | None = None
+ self._selected_info_hash: Optional[str] = selected_info_hash
+ self._sub_tabs: Optional[Tabs] = None
+ self._content_area: Optional[Container] = None
+ self._loading_sub_tab: Optional[str] = None # Guard to prevent concurrent loading
+ self._active_sub_tab_id: Optional[str] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the per-torrent tab with nested sub-tabs."""
@@ -230,7 +230,7 @@ def _on_torrent_selected(self, event: Any) -> None: # pragma: no cover
# Fallback to call_later
self.call_later(self._load_sub_tab_content, "sub-tab-files") # type: ignore[attr-defined]
- def set_selected_info_hash(self, info_hash: str | None) -> None: # pragma: no cover
+ def set_selected_info_hash(self, info_hash: Optional[str]) -> None: # pragma: no cover
"""Update the selected torrent info hash externally.
Args:
@@ -577,7 +577,7 @@ async def refresh_active_sub_tab(self) -> None: # pragma: no cover
# Reload the sub-tab content to ensure it's up-to-date
await self._load_sub_tab_content(self._active_sub_tab_id)
- def get_selected_info_hash(self) -> str | None: # pragma: no cover
+ def get_selected_info_hash(self) -> Optional[str]: # pragma: no cover
"""Get the currently selected torrent info hash.
Returns:
diff --git a/ccbt/interface/screens/per_torrent_trackers.py b/ccbt/interface/screens/per_torrent_trackers.py
index 43b0bb0..0b9be15 100644
--- a/ccbt/interface/screens/per_torrent_trackers.py
+++ b/ccbt/interface/screens/per_torrent_trackers.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from ccbt.interface.commands.executor import CommandExecutor
@@ -94,7 +94,7 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._info_hash = info_hash
- self._trackers_table: DataTable | None = None
+ self._trackers_table: Optional[DataTable] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the trackers screen."""
diff --git a/ccbt/interface/screens/preferences_tab.py b/ccbt/interface/screens/preferences_tab.py
index 86eef96..d268bd7 100644
--- a/ccbt/interface/screens/preferences_tab.py
+++ b/ccbt/interface/screens/preferences_tab.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.i18n import _
@@ -60,7 +60,7 @@ class PreferencesTabContent(Container): # type: ignore[misc]
def __init__(
self,
command_executor: CommandExecutor,
- session: Any | None = None,
+ session: Optional[Any] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -73,9 +73,9 @@ def __init__(
super().__init__(*args, **kwargs)
self._command_executor = command_executor
self._session = session
- self._sub_tabs: Tabs | None = None
- self._content_area: Container | None = None
- self._active_sub_tab_id: str | None = None
+ self._sub_tabs: Optional[Tabs] = None
+ self._content_area: Optional[Container] = None
+ self._active_sub_tab_id: Optional[str] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the preferences tab with nested sub-tabs."""
diff --git a/ccbt/interface/screens/tabbed_base.py b/ccbt/interface/screens/tabbed_base.py
index 76c24b1..ee6b00d 100644
--- a/ccbt/interface/screens/tabbed_base.py
+++ b/ccbt/interface/screens/tabbed_base.py
@@ -12,7 +12,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from ccbt.session.session import AsyncSessionManager
@@ -79,7 +79,7 @@ class PerTorrentTabScreen(MonitoringScreen): # type: ignore[misc]
def __init__(
self,
session: AsyncSessionManager,
- selected_info_hash: str | None = None,
+ selected_info_hash: Optional[str] = None,
*args: Any,
**kwargs: Any,
) -> None:
diff --git a/ccbt/interface/screens/theme_selection_screen.py b/ccbt/interface/screens/theme_selection_screen.py
index 1a19591..c32e128 100644
--- a/ccbt/interface/screens/theme_selection_screen.py
+++ b/ccbt/interface/screens/theme_selection_screen.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
from ccbt.i18n import _
@@ -99,7 +99,7 @@ def __init__(
) -> None:
"""Initialize theme selection screen."""
super().__init__(*args, **kwargs)
- self._selected_theme: str | None = None
+ self._selected_theme: Optional[str] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the theme selection screen."""
diff --git a/ccbt/interface/screens/torrents_tab.py b/ccbt/interface/screens/torrents_tab.py
index 1d82002..ba576ab 100644
--- a/ccbt/interface/screens/torrents_tab.py
+++ b/ccbt/interface/screens/torrents_tab.py
@@ -8,7 +8,7 @@
import asyncio
import contextlib
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
from ccbt.i18n import _
from ccbt.interface.widgets.core_widgets import GlobalTorrentMetricsPanel
@@ -111,7 +111,7 @@ def __init__(
self,
data_provider: DataProvider,
command_executor: CommandExecutor,
- selected_hash_callback: Any | None = None,
+ selected_hash_callback: Optional[Any] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -126,10 +126,10 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._selected_hash_callback = selected_hash_callback
- self._torrents_table: DataTable | None = None
- self._search_input: Input | None = None
- self._metrics_panel: GlobalTorrentMetricsPanel | None = None
- self._empty_message: Static | None = None
+ self._torrents_table: Optional[DataTable] = None
+ self._search_input: Optional[Input] = None
+ self._metrics_panel: Optional[GlobalTorrentMetricsPanel] = None
+ self._empty_message: Optional[Static] = None
self._filter_text = ""
def compose(self) -> Any: # pragma: no cover
@@ -271,7 +271,7 @@ async def refresh_torrents(self) -> None: # pragma: no cover
logger.warning("GlobalTorrentsScreen: Missing data provider, cannot refresh")
return
- stats: dict[str, Any] | None = None
+ stats: Optional[dict[str, Any]] = None
swarm_samples: list[dict[str, Any]] | None = None
try:
@@ -569,8 +569,8 @@ def __init__(
self,
data_provider: DataProvider,
command_executor: CommandExecutor,
- filter_status: str | None = None,
- selected_hash_callback: Any | None = None,
+ filter_status: Optional[str] = None,
+ selected_hash_callback: Optional[Any] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -587,7 +587,7 @@ def __init__(
self._command_executor = command_executor
self._filter_status = filter_status
self._selected_hash_callback = selected_hash_callback
- self._torrents_table: DataTable | None = None
+ self._torrents_table: Optional[DataTable] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the filtered torrents screen."""
@@ -827,7 +827,7 @@ def __init__(
self,
data_provider: DataProvider,
command_executor: CommandExecutor,
- selected_hash_callback: Any | None = None,
+ selected_hash_callback: Optional[Any] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -842,9 +842,9 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._selected_hash_callback = selected_hash_callback
- self._sub_tabs: Tabs | None = None
- self._content_area: Container | None = None
- self._active_sub_tab_id: str | None = None
+ self._sub_tabs: Optional[Tabs] = None
+ self._content_area: Optional[Container] = None
+ self._active_sub_tab_id: Optional[str] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the torrents tab with nested sub-tabs."""
diff --git a/ccbt/interface/screens/utility/file_selection.py b/ccbt/interface/screens/utility/file_selection.py
index 6723086..835d0bd 100644
--- a/ccbt/interface/screens/utility/file_selection.py
+++ b/ccbt/interface/screens/utility/file_selection.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import asyncio
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from ccbt.session.session import AsyncSessionManager
@@ -108,8 +108,8 @@ def __init__(
self.info_hash_bytes = bytes.fromhex(
info_hash_hex
) # pragma: no cover - UI initialization
- self.file_manager: Any | None = None # pragma: no cover - UI initialization
- self._refresh_task: asyncio.Task | None = (
+ self.file_manager: Optional[Any] = None # pragma: no cover - UI initialization
+ self._refresh_task: Optional[asyncio.Task] = (
None # pragma: no cover - UI initialization
)
diff --git a/ccbt/interface/splash/animation_adapter.py b/ccbt/interface/splash/animation_adapter.py
index fdf3d63..557ae59 100644
--- a/ccbt/interface/splash/animation_adapter.py
+++ b/ccbt/interface/splash/animation_adapter.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import asyncio
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from rich.console import Console
@@ -25,8 +25,8 @@ class MessageOverlay:
def __init__(
self,
- console: Any | None = None,
- textual_widget: Any | None = None,
+ console: Optional[Any] = None,
+ textual_widget: Optional[Any] = None,
position: str = "bottom_right",
max_lines: int = 1,
) -> None:
@@ -84,8 +84,8 @@ class AnimationAdapter:
def __init__(
self,
- console: Any | None = None,
- textual_widget: Any | None = None,
+ console: Optional[Any] = None,
+ textual_widget: Optional[Any] = None,
) -> None:
"""Initialize animation adapter.
@@ -104,8 +104,8 @@ async def render_with_template(
self,
template_name: str,
transition: Transition,
- bg_config: BackgroundConfig | None = None,
- update_callback: Any | None = None,
+ bg_config: Optional[BackgroundConfig] = None,
+ update_callback: Optional[Any] = None,
) -> None:
"""Render animation with template, transition, and background.
@@ -144,8 +144,8 @@ async def render_with_text(
self,
text: str,
transition: Transition,
- bg_config: BackgroundConfig | None = None,
- update_callback: Any | None = None,
+ bg_config: Optional[BackgroundConfig] = None,
+ update_callback: Optional[Any] = None,
) -> None:
"""Render animation with text, transition, and background.
@@ -186,7 +186,7 @@ def clear_messages(self) -> None:
def render_frame_with_overlay(
self,
frame_content: Any,
- messages: list[str] | None = None,
+ messages: Optional[list[str]] = None,
) -> Any:
"""Render frame (overlay removed - returns frame as-is).
@@ -202,9 +202,9 @@ def render_frame_with_overlay(
async def run_sequence(
self,
transitions: list[Transition],
- template_name: str | None = None,
- text: str | None = None,
- bg_config: BackgroundConfig | None = None,
+ template_name: Optional[str] = None,
+ text: Optional[str] = None,
+ bg_config: Optional[BackgroundConfig] = None,
) -> None:
"""Run a sequence of transitions.
diff --git a/ccbt/interface/splash/animation_config.py b/ccbt/interface/splash/animation_config.py
index e68dae2..b6da4ad 100644
--- a/ccbt/interface/splash/animation_config.py
+++ b/ccbt/interface/splash/animation_config.py
@@ -7,7 +7,7 @@
from __future__ import annotations
from dataclasses import dataclass, field
-from typing import Any
+from typing import Any, Optional, Union
@dataclass
@@ -18,19 +18,19 @@ class BackgroundConfig:
bg_type: str = "none" # none, solid, gradient, pattern, stars, waves, particles, flower
# Color configuration
- bg_color_start: str | list[str] | None = None # Single color or gradient start
- bg_color_finish: str | list[str] | None = None # Single color or gradient end
- bg_color_palette: list[str] | None = None # Full color palette for animated backgrounds
+ bg_color_start: Optional[Union[str, list[str]]] = None # Single color or gradient start
+ bg_color_finish: Optional[Union[str, list[str]]] = None # Single color or gradient end
+ bg_color_palette: Optional[list[str]] = None # Full color palette for animated backgrounds
# Text color (separate from background)
- text_color: str | list[str] | None = None # Text color (overrides main color_start for text)
+ text_color: Optional[Union[str, list[str]]] = None # Text color (overrides main color_start for text)
# Animation
bg_animate: bool = False # Whether background should animate
bg_direction: str = "left_to_right" # Animation direction
bg_speed: float = 2.0 # Background animation speed (for pattern movement)
bg_animation_speed: float = 1.0 # Background color animation speed (for palette cycling)
- bg_duration: float | None = None # Background animation duration (None = match logo)
+ bg_duration: Optional[float] = None # Background animation duration (None = match logo)
# Pattern-specific options
bg_pattern_char: str = "·" # Character for pattern backgrounds
@@ -77,9 +77,9 @@ class AnimationConfig:
logo_text: str = ""
# Color configuration
- color_start: str | list[str] | None = None # Single color or palette start
- color_finish: str | list[str] | None = None # Single color or palette end
- color_palette: list[str] | None = None # Full color palette
+ color_start: Optional[Union[str, list[str]]] = None # Single color or palette start
+ color_finish: Optional[Union[str, list[str]]] = None # Single color or palette end
+ color_palette: Optional[list[str]] = None # Full color palette
# Direction/flow
direction: str = "left_to_right" # left_to_right, right_to_left, top_to_bottom,
@@ -89,7 +89,7 @@ class AnimationConfig:
duration: float = 3.0
speed: float = 8.0
steps: int = 30
- sequence_total_duration: float | None = None # Total duration of entire sequence for adaptive timing
+ sequence_total_duration: Optional[float] = None # Total duration of entire sequence for adaptive timing
# Style-specific options
reveal_char: str = "█"
@@ -102,8 +102,8 @@ class AnimationConfig:
# New animation options
snake_length: int = 10
snake_thickness: int = 1 # Thickness of snake perpendicular to direction
- arc_center_x: int | None = None
- arc_center_y: int | None = None
+ arc_center_x: Optional[int] = None
+ arc_center_y: Optional[int] = None
whitespace_pattern: str = "|/—\\"
slide_direction: str = "left" # For letter_slide_in
diff --git a/ccbt/interface/splash/animation_executor.py b/ccbt/interface/splash/animation_executor.py
index 2b82084..2ee3ea6 100644
--- a/ccbt/interface/splash/animation_executor.py
+++ b/ccbt/interface/splash/animation_executor.py
@@ -6,17 +6,17 @@
from __future__ import annotations
import asyncio
-from typing import Any
+from typing import Any, Optional
from ccbt.interface.splash.animation_config import AnimationConfig, BackgroundConfig
from ccbt.interface.splash.animation_helpers import AnimationController
-from typing import Any
+from typing import Any, Optional
class AnimationExecutor:
"""Executes animations from AnimationConfig objects."""
- def __init__(self, controller: AnimationController | None = None) -> None:
+ def __init__(self, controller: Optional[AnimationController] = None) -> None:
"""Initialize animation executor.
Args:
diff --git a/ccbt/interface/splash/animation_helpers.py b/ccbt/interface/splash/animation_helpers.py
index ed7a007..f1591ba 100644
--- a/ccbt/interface/splash/animation_helpers.py
+++ b/ccbt/interface/splash/animation_helpers.py
@@ -8,7 +8,7 @@
import asyncio
import math
import random
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
if TYPE_CHECKING:
from rich.console import Group
@@ -69,7 +69,7 @@ class ColorPalette:
class FrameRenderer:
"""Renders ASCII art frames with Rich styling."""
- def __init__(self, console: Console | None = None, splash_screen: Any = None) -> None:
+ def __init__(self, console: Optional[Console] = None, splash_screen: Any = None) -> None:
"""Initialize frame renderer.
Args:
@@ -179,7 +179,7 @@ def render_multi_color_frame(
class BackgroundRenderer:
"""Renders animated backgrounds for splash screens."""
- def __init__(self, console: Console | None = None) -> None:
+ def __init__(self, console: Optional[Console] = None) -> None:
"""Initialize background renderer.
Args:
@@ -199,7 +199,7 @@ def generate_background(
width: int,
height: int,
bg_type: str = "none",
- bg_color: str | list[str] | None = None,
+ bg_color: Optional[Union[str, list[str]]] = None,
bg_pattern_char: str = "·",
bg_pattern_density: float = 0.1,
bg_star_count: int = 50,
@@ -489,7 +489,7 @@ def _generate_perspective_grid(
width: int,
height: int,
density: float,
- vanishing_point: int | None,
+ vanishing_point: Optional[int],
time_offset: float,
) -> list[str]:
"""Generate a faux 3D perspective grid background."""
@@ -576,7 +576,7 @@ class AnimationController:
def __init__(
self,
- frame_renderer: FrameRenderer | None = None,
+ frame_renderer: Optional[FrameRenderer] = None,
default_frame_duration: float = 0.016, # 60 FPS for ultra-smooth animations
) -> None:
"""Initialize animation controller.
@@ -590,7 +590,7 @@ def __init__(
self.background_renderer = BackgroundRenderer(self.renderer.console)
self.default_duration = default_frame_duration
- def _calculate_frame_duration(self, total_duration: float, num_frames: int | None = None) -> float:
+ def _calculate_frame_duration(self, total_duration: float, num_frames: Optional[int] = None) -> float:
"""Calculate frame duration based on total animation duration.
Args:
@@ -611,7 +611,7 @@ def _calculate_frame_duration(self, total_duration: float, num_frames: int | Non
# Clamp between 0.008 (120 FPS max) and 0.033 (30 FPS min) for ultra-fluid animations
return max(0.008, min(0.033, frame_duration))
- def _adapt_speed_to_duration(self, base_speed: float, duration: float, sequence_duration: float | None = None) -> float:
+ def _adapt_speed_to_duration(self, base_speed: float, duration: float, sequence_duration: Optional[float] = None) -> float:
"""Adapt animation speed based on duration.
Args:
@@ -695,7 +695,7 @@ def render_with_background(
logo_lines: list[Text],
bg_config: Any,
time_offset: float = 0.0,
- text_color: str | list[str] | None = None,
+ text_color: Optional[Union[str, list[str]]] = None,
) -> Group:
"""Render logo lines with background.
@@ -856,7 +856,7 @@ async def animate_columns_reveal(
color: str = "white",
steps: int = 30,
column_groups: int = 1,
- duration: float | None = None,
+ duration: Optional[float] = None,
) -> None:
"""Reveal text column by column or in column groups.
@@ -948,7 +948,7 @@ async def animate_columns_color(
self,
text: str,
direction: str = "left_to_right",
- color_palette: list[str] | None = None,
+ color_palette: Optional[list[str]] = None,
speed: float = 8.0,
duration: float = 3.0,
column_groups: int = 1,
@@ -1380,7 +1380,7 @@ async def animate_row_groups_color(
self,
text: str,
direction: str = "left_to_right",
- color_palette: list[str] | None = None,
+ color_palette: Optional[list[str]] = None,
speed: float = 8.0,
duration: float = 3.0,
group_by: str = "spaces",
@@ -1848,7 +1848,7 @@ async def animate_row_transition(
async def play_frames(
self,
frames: list[str],
- frame_duration: float | None = None,
+ frame_duration: Optional[float] = None,
color: str = "white",
clear_between: bool = True,
) -> None:
@@ -1874,7 +1874,7 @@ async def play_frames(
async def play_multi_color_frames(
self,
frames: list[list[tuple[str, str]]],
- frame_duration: float | None = None,
+ frame_duration: Optional[float] = None,
clear_between: bool = True,
) -> None:
"""Play a sequence of multi-color frames.
@@ -2077,7 +2077,7 @@ async def animate_color_per_direction(
self,
text: str,
direction: str = "left",
- color_palette: list[str] | None = None,
+ color_palette: Optional[list[str]] = None,
speed: float = 8.0,
duration: float = 3.0,
) -> None:
@@ -2182,7 +2182,7 @@ async def reveal_animation(
color: str = "white",
steps: int = 30,
reveal_char: str = "█",
- duration: float | None = None,
+ duration: Optional[float] = None,
) -> None:
"""Reveal text animation from different directions.
@@ -2395,7 +2395,7 @@ async def letter_by_letter_animation(
async def flag_effect(
self,
text: str,
- color_palette: list[str] | None = None,
+ color_palette: Optional[list[str]] = None,
wave_speed: float = 2.0,
wave_amplitude: float = 2.0,
duration: float = 3.0,
@@ -2622,7 +2622,7 @@ async def glitch_effect(
def _get_color_from_palette(
self,
- color_input: str | list[str] | None,
+ color_input: Optional[Union[str, list[str]]],
position: int = 0,
total_positions: int = 1,
default: str = "white",
@@ -2657,7 +2657,7 @@ def _get_color_from_palette(
def _get_color_at_position(
self,
- color_input: str | list[str] | None,
+ color_input: Optional[Union[str, list[str]]],
char_idx: int,
line_idx: int,
max_width: int,
@@ -2697,8 +2697,8 @@ def _get_color_at_position(
async def rainbow_to_color(
self,
text: str,
- target_color: str | list[str],
- color_palette: list[str] | None = None,
+ target_color: Union[str, list[str]],
+ color_palette: Optional[list[str]] = None,
duration: float = 3.0,
) -> None:
"""Transition from rainbow colors to a single target color.
@@ -2787,8 +2787,8 @@ async def column_swipe(
self,
text: str,
direction: str = "left_to_right",
- color_start: str | list[str] = "white",
- color_finish: str | list[str] = "cyan",
+ color_start: Union[str, list[str]] = "white",
+ color_finish: Union[str, list[str]] = "cyan",
duration: float = 3.0,
) -> None:
"""Swipe color across columns.
@@ -2878,8 +2878,8 @@ async def arc_reveal(
direction: str = "top_down",
color: str = "white",
steps: int = 30,
- arc_center_x: int | None = None,
- arc_center_y: int | None = None,
+ arc_center_x: Optional[int] = None,
+ arc_center_y: Optional[int] = None,
) -> None:
"""Reveal text in an arc pattern.
@@ -3618,8 +3618,8 @@ async def letter_reveal_by_position(
def _get_background_color(
self,
- bg_color_input: str | list[str] | None,
- position: tuple[int, int] | None = None,
+ bg_color_input: Optional[Union[str, list[str]]],
+ position: Optional[tuple[int, int]] = None,
time_offset: float = 0.0,
animation_speed: float = 1.0,
default: str = "dim white",
@@ -3662,7 +3662,7 @@ async def whitespace_background_animation(
self,
text: str,
pattern: str = "|/—\\",
- bg_color: str | list[str] = "dim white",
+ bg_color: Union[str, list[str]] = "dim white",
text_color: str = "white",
duration: float = 3.0,
animation_speed: float = 2.0,
@@ -3775,8 +3775,8 @@ async def animate_background_with_logo(
text: str,
bg_config: BackgroundConfig,
logo_animation_style: str = "rainbow",
- logo_color_start: str | list[str] | None = None,
- logo_color_finish: str | list[str] | None = None,
+ logo_color_start: Optional[Union[str, list[str]]] = None,
+ logo_color_finish: Optional[Union[str, list[str]]] = None,
duration: float = 5.0,
) -> None:
"""Animate background with logo using specified animation style.
@@ -3935,10 +3935,10 @@ async def animate_color_transition(
self,
text: str,
bg_config: BackgroundConfig,
- logo_color_start: str | list[str],
- logo_color_finish: str | list[str],
- bg_color_start: str | list[str] | None = None,
- bg_color_finish: str | list[str] | None = None,
+ logo_color_start: Union[str, list[str]],
+ logo_color_finish: Union[str, list[str]],
+ bg_color_start: Optional[Union[str, list[str]]] = None,
+ bg_color_finish: Optional[Union[str, list[str]]] = None,
duration: float = 6.0,
) -> None:
"""Animate color transition for both background and logo.
@@ -4158,10 +4158,10 @@ async def animate_color_transition(
def _interpolate_color_palette(
self,
- color_start: str | list[str],
- color_finish: str | list[str],
+ color_start: Union[str, list[str]],
+ color_finish: Union[str, list[str]],
progress: float,
- ) -> str | list[str]:
+ ) -> Union[str, list[str]]:
"""Interpolate between two color palettes.
Args:
@@ -4260,11 +4260,11 @@ async def animate_background_with_reveal(
self,
text: str,
bg_config: BackgroundConfig,
- logo_color: str | list[str] = "white",
+ logo_color: Union[str, list[str]] = "white",
direction: str = "top_down",
reveal_type: str = "reveal", # "reveal" or "disappear"
duration: float = 4.0,
- update_callback: Any | None = None,
+ update_callback: Optional[Any] = None,
) -> None:
"""Animate background with logo reveal/disappear effect.
@@ -4311,7 +4311,7 @@ async def animate_background_with_reveal(
steps = max(1, int(duration * adaptive_fps))
frame_duration = self._calculate_frame_duration(duration, num_frames=steps)
- static_bg_lines: list[str] | None = None
+ static_bg_lines: Optional[list[str]] = None
if not bg_config.bg_animate:
bg_color_base = (
bg_config.bg_color_palette
@@ -4509,10 +4509,10 @@ async def animate_background_with_fade(
self,
text: str,
bg_config: BackgroundConfig,
- logo_color: str | list[str] = "white",
+ logo_color: Union[str, list[str]] = "white",
fade_type: str = "fade_in", # "fade_in" or "fade_out"
duration: float = 3.0,
- update_callback: Any | None = None,
+ update_callback: Optional[Any] = None,
) -> None:
"""Animate background with logo fade in/out effect.
@@ -4690,10 +4690,10 @@ async def animate_background_with_glitch(
self,
text: str,
bg_config: BackgroundConfig,
- logo_color: str | list[str] = "white",
+ logo_color: Union[str, list[str]] = "white",
glitch_intensity: float = 0.15,
duration: float = 3.0,
- update_callback: Any | None = None,
+ update_callback: Optional[Any] = None,
) -> None:
"""Animate background with logo glitch effect.
@@ -4847,10 +4847,10 @@ async def animate_background_with_rainbow(
text: str,
bg_config: BackgroundConfig,
logo_color_palette: list[str],
- bg_color_palette: list[str] | None = None,
+ bg_color_palette: Optional[list[str]] = None,
direction: str = "left_to_right",
duration: float = 4.0,
- update_callback: Any | None = None,
+ update_callback: Optional[Any] = None,
) -> None:
"""Animate background with rainbow logo effect.
diff --git a/ccbt/interface/splash/animation_registry.py b/ccbt/interface/splash/animation_registry.py
index a57044c..a91af4c 100644
--- a/ccbt/interface/splash/animation_registry.py
+++ b/ccbt/interface/splash/animation_registry.py
@@ -6,7 +6,7 @@
from __future__ import annotations
from dataclasses import dataclass
-from typing import Any
+from typing import Any, Optional
from ccbt.interface.splash.animation_config import (
BackgroundConfig,
@@ -27,9 +27,9 @@ class AnimationMetadata:
max_duration: float = 2.5
weight: float = 1.0 # Weight for random selection
description: str = ""
- color_palettes: list[list[str]] | None = None
- background_types: list[str] | None = None
- directions: list[str] | None = None
+ color_palettes: Optional[list[list[str]]] = None
+ background_types: Optional[list[str]] = None
+ directions: Optional[list[str]] = None
class AnimationRegistry:
@@ -51,7 +51,7 @@ def register(
"""
self._animations[metadata.name] = metadata
- def get(self, name: str) -> AnimationMetadata | None:
+ def get(self, name: str) -> Optional[AnimationMetadata]:
"""Get animation metadata by name.
Args:
@@ -70,7 +70,7 @@ def list(self) -> list[str]:
"""
return list(self._animations.keys())
- def select_random(self, exclude: list[str] | None = None) -> AnimationMetadata | None:
+ def select_random(self, exclude: Optional[list[str]] = None) -> Optional[AnimationMetadata]:
"""Select a random animation based on weights.
Args:
@@ -342,7 +342,7 @@ def register_animation(metadata: AnimationMetadata) -> None:
_registry.register(metadata)
-def get_animation(name: str) -> AnimationMetadata | None:
+def get_animation(name: str) -> Optional[AnimationMetadata]:
"""Get animation metadata from the global registry.
Args:
@@ -354,7 +354,7 @@ def get_animation(name: str) -> AnimationMetadata | None:
return _registry.get(name)
-def select_random_animation(exclude: list[str] | None = None) -> AnimationMetadata | None:
+def select_random_animation(exclude: Optional[list[str]] = None) -> Optional[AnimationMetadata]:
"""Select a random animation from the global registry.
Args:
diff --git a/ccbt/interface/splash/color_matching.py b/ccbt/interface/splash/color_matching.py
index 8c1d790..36e77ca 100644
--- a/ccbt/interface/splash/color_matching.py
+++ b/ccbt/interface/splash/color_matching.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import random
-from typing import Any
+from typing import Any, Optional
from ccbt.interface.splash.animation_config import (
OCEAN_PALETTE,
@@ -79,7 +79,7 @@ def find_matching_color(
target_color: str,
palette: list[str],
min_similarity: float = 0.5,
-) -> str | None:
+) -> Optional[str]:
"""Find a color in a palette that matches the target color.
Args:
@@ -240,8 +240,8 @@ def generate_random_duration(min_duration: float = 1.5, max_duration: float = 2.
def select_matching_palettes(
- current_palette: list[str] | None = None,
- available_palettes: list[list[str]] | None = None,
+ current_palette: Optional[list[str]] = None,
+ available_palettes: Optional[list[list[str]]] = None,
) -> tuple[list[str], list[str]]:
"""Select two palettes that transition smoothly.
diff --git a/ccbt/interface/splash/color_themes.py b/ccbt/interface/splash/color_themes.py
index 91bdd81..8f06277 100644
--- a/ccbt/interface/splash/color_themes.py
+++ b/ccbt/interface/splash/color_themes.py
@@ -2,6 +2,8 @@
from __future__ import annotations
+from typing import Optional
+
from ccbt.interface.splash.animation_config import (
OCEAN_PALETTE,
RAINBOW_PALETTE,
@@ -66,7 +68,7 @@
}
-def get_color_template(name: str) -> list[str] | None:
+def get_color_template(name: str) -> Optional[list[str]]:
"""Return a copy of a registered color template."""
palette = COLOR_TEMPLATES.get(name)
if palette is None:
diff --git a/ccbt/interface/splash/message_overlay.py b/ccbt/interface/splash/message_overlay.py
index b9caa58..4043be6 100644
--- a/ccbt/interface/splash/message_overlay.py
+++ b/ccbt/interface/splash/message_overlay.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
if TYPE_CHECKING:
from rich.console import Console
@@ -22,8 +22,8 @@ class MessageOverlay:
def __init__(
self,
- console: Console | None = None,
- textual_widget: Static | None = None,
+ console: Optional[Console] = None,
+ textual_widget: Optional[Static] = None,
position: str = "bottom_right",
max_lines: int = 1,
clear_on_update: bool = True,
@@ -45,7 +45,7 @@ def __init__(
self.messages: list[str] = []
self._last_rendered: str = ""
- def add_message(self, message: str, clear: bool | None = None) -> None:
+ def add_message(self, message: str, clear: Optional[bool] = None) -> None:
"""Add a message to the overlay.
Args:
@@ -93,8 +93,8 @@ def _update_display(self) -> None:
def render_overlay(
self,
frame_content: Any,
- width: int | None = None,
- height: int | None = None,
+ width: Optional[int] = None,
+ height: Optional[int] = None,
) -> Any:
"""Render overlay on top of frame content.
@@ -173,11 +173,11 @@ class LoggingMessageOverlay(MessageOverlay):
def __init__(
self,
- console: Console | None = None,
- textual_widget: Static | None = None,
+ console: Optional[Console] = None,
+ textual_widget: Optional[Static] = None,
position: str = "bottom_right",
max_lines: int = 10, # Show last 10 log messages
- log_levels: list[str] | None = None,
+ log_levels: Optional[list[str]] = None,
) -> None:
"""Initialize logging message overlay.
@@ -191,7 +191,7 @@ def __init__(
# Initialize with clear_on_update=False to preserve messages between updates
super().__init__(console, textual_widget, position, max_lines, clear_on_update=False)
self.log_levels = log_levels # None = capture all levels
- self._log_handler: logging.Handler | None = None
+ self._log_handler: Optional[logging.Handler] = None
self._log_buffer: list[tuple[str, str]] = [] # List of (level, message) tuples
def capture_log_message(self, level: str, message: str) -> None:
diff --git a/ccbt/interface/splash/sequence_generator.py b/ccbt/interface/splash/sequence_generator.py
index 37b1293..76a62af 100644
--- a/ccbt/interface/splash/sequence_generator.py
+++ b/ccbt/interface/splash/sequence_generator.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import random
-from typing import Any
+from typing import Any, Optional
from ccbt.interface.splash.animation_config import (
AnimationConfig,
@@ -63,7 +63,7 @@ def generate(
"""
sequence = AnimationSequence()
current_duration = 0.0
- current_palette: list[str] | None = None
+ current_palette: Optional[list[str]] = None
used_animations: list[str] = []
# Generate segments until we reach target duration
diff --git a/ccbt/interface/splash/splash_manager.py b/ccbt/interface/splash/splash_manager.py
index ae6b25e..674c183 100644
--- a/ccbt/interface/splash/splash_manager.py
+++ b/ccbt/interface/splash/splash_manager.py
@@ -7,7 +7,7 @@
import asyncio
import threading
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from rich.console import Console
@@ -23,9 +23,9 @@ class SplashManager:
def __init__(
self,
- console: Any | None = None,
- textual_widget: Any | None = None,
- verbosity: VerbosityManager | None = None,
+ console: Optional[Any] = None,
+ textual_widget: Optional[Any] = None,
+ verbosity: Optional[VerbosityManager] = None,
) -> None:
"""Initialize splash manager.
@@ -37,10 +37,10 @@ def __init__(
self.console = console
self.textual_widget = textual_widget
self.verbosity = verbosity or VerbosityManager(0) # NORMAL by default
- self._splash_screen: SplashScreen | None = None
- self._adapter: AnimationAdapter | None = None
+ self._splash_screen: Optional[SplashScreen] = None
+ self._adapter: Optional[AnimationAdapter] = None
self._stop_event = threading.Event() # Event to signal splash to stop
- self._running_task: asyncio.Task[None] | None = None # Track running task for cancellation
+ self._running_task: Optional[asyncio.Task[None]] = None # Track running task for cancellation
def should_show_splash(self) -> bool:
"""Check if splash screen should be shown.
@@ -59,7 +59,7 @@ def should_show_splash(self) -> bool:
def create_splash_screen(
self,
duration: float = 90.0,
- logo_text: str | None = None,
+ logo_text: Optional[str] = None,
) -> SplashScreen:
"""Create a splash screen instance.
@@ -95,7 +95,7 @@ def create_adapter(self) -> AnimationAdapter:
async def show_splash_for_task(
self,
task_name: str,
- task_duration: float | None = None,
+ task_duration: Optional[float] = None,
max_duration: float = 90.0,
show_progress: bool = True,
) -> None:
@@ -223,8 +223,8 @@ def stop_splash(self) -> None:
@staticmethod
def from_cli_context(
- ctx: dict[str, Any] | None = None,
- console: Any | None = None,
+ ctx: Optional[dict[str, Any]] = None,
+ console: Optional[Any] = None,
) -> SplashManager:
"""Create SplashManager from CLI context.
@@ -243,7 +243,7 @@ def from_cli_context(
@staticmethod
def from_verbosity_count(
verbosity_count: int = 0,
- console: Any | None = None,
+ console: Optional[Any] = None,
) -> SplashManager:
"""Create SplashManager from verbosity count.
@@ -260,10 +260,10 @@ def from_verbosity_count(
async def show_splash_if_needed(
task_name: str,
- verbosity: VerbosityManager | None = None,
- console: Any | None = None,
+ verbosity: Optional[VerbosityManager] = None,
+ console: Optional[Any] = None,
duration: float = 90.0,
-) -> SplashManager | None:
+) -> Optional[SplashManager]:
"""Show splash screen if verbosity allows.
Convenience function to show splash screen for a task.
diff --git a/ccbt/interface/splash/splash_screen.py b/ccbt/interface/splash/splash_screen.py
index ae549fa..3c1b655 100644
--- a/ccbt/interface/splash/splash_screen.py
+++ b/ccbt/interface/splash/splash_screen.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import asyncio
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from rich.console import Console
@@ -59,9 +59,9 @@ class SplashScreen:
def __init__(
self,
- console: Console | None = None,
- textual_widget: Static | None = None,
- logo_text: str | None = None,
+ console: Optional[Console] = None,
+ textual_widget: Optional[Static] = None,
+ logo_text: Optional[str] = None,
duration: float = 90.0,
use_random_sequence: bool = True,
) -> None:
@@ -919,7 +919,7 @@ def _build_animation_sequence(self) -> AnimationSequence:
return sequence
- def _resolve_template(self, template_key: str | None) -> list[str] | None:
+ def _resolve_template(self, template_key: Optional[str]) -> Optional[list[str]]:
"""Return a copy of the requested color template, if available."""
if not template_key:
return None
@@ -1070,8 +1070,8 @@ def __rich__(self) -> Any:
async def run_splash_screen(
- console: Console | None = None,
- textual_widget: Static | None = None,
+ console: Optional[Console] = None,
+ textual_widget: Optional[Static] = None,
duration: float = 90.0,
) -> None:
"""Run splash screen animation.
diff --git a/ccbt/interface/splash/templates.py b/ccbt/interface/splash/templates.py
index 339cacf..145dc47 100644
--- a/ccbt/interface/splash/templates.py
+++ b/ccbt/interface/splash/templates.py
@@ -6,7 +6,7 @@
from __future__ import annotations
from dataclasses import dataclass
-from typing import Any
+from typing import Any, Optional
@dataclass
@@ -22,8 +22,8 @@ class Template:
name: str
content: str
- normalized_lines: list[str] | None = None
- metadata: dict[str, Any] | None = None
+ normalized_lines: Optional[list[str]] = None
+ metadata: Optional[dict[str, Any]] = None
def __post_init__(self) -> None:
"""Initialize template after creation."""
@@ -81,7 +81,7 @@ def normalize(self) -> list[str]:
return lines
- def validate(self) -> tuple[bool, str | None]:
+ def validate(self) -> tuple[bool, Optional[str]]:
"""Validate template content.
Returns:
@@ -147,7 +147,7 @@ def register(self, template: Template) -> None:
self._templates[template.name] = template
- def get(self, name: str) -> Template | None:
+ def get(self, name: str) -> Optional[Template]:
"""Get a template by name.
Args:
@@ -208,7 +208,7 @@ def register_template(template: Template) -> None:
_registry.register(template)
-def get_template(name: str) -> Template | None:
+def get_template(name: str) -> Optional[Template]:
"""Get a template from the global registry.
Args:
diff --git a/ccbt/interface/splash/textual_renderable.py b/ccbt/interface/splash/textual_renderable.py
index 45e2f2e..a426021 100644
--- a/ccbt/interface/splash/textual_renderable.py
+++ b/ccbt/interface/splash/textual_renderable.py
@@ -6,7 +6,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from rich.console import Console, RenderableType
@@ -33,7 +33,7 @@ def __init__(
"""
self.frame_content = frame_content
self.overlay_content = overlay_content
- self._cached_renderable: Any | None = None
+ self._cached_renderable: Optional[Any] = None
def update_frame(self, frame_content: Any) -> None:
"""Update the frame content without recreating structure.
@@ -129,7 +129,7 @@ def __init__(
"""
self.messages = messages
self.title = title
- self._cached_panel: Any | None = None
+ self._cached_panel: Optional[Any] = None
def update_messages(self, messages: list[str]) -> None:
"""Update messages without recreating box structure.
diff --git a/ccbt/interface/splash/transitions.py b/ccbt/interface/splash/transitions.py
index 0313df6..1109a21 100644
--- a/ccbt/interface/splash/transitions.py
+++ b/ccbt/interface/splash/transitions.py
@@ -8,7 +8,7 @@
import asyncio
import random
from abc import ABC, abstractmethod
-from typing import Any
+from typing import Any, Optional, Union
from ccbt.interface.splash.color_matching import (
generate_random_duration,
@@ -23,7 +23,7 @@ class Transition(ABC):
def __init__(
self,
- duration: float | None = None,
+ duration: Optional[float] = None,
min_duration: float = 1.5,
max_duration: float = 2.5,
) -> None:
@@ -70,12 +70,12 @@ class ColorTransition(Transition):
def __init__(
self,
- logo_color_start: str | list[str],
- logo_color_finish: str | list[str],
- bg_color_start: str | list[str] | None = None,
- bg_color_finish: str | list[str] | None = None,
- bg_config: BackgroundConfig | None = None,
- duration: float | None = None,
+ logo_color_start: Union[str, list[str]],
+ logo_color_finish: Union[str, list[str]],
+ bg_color_start: Optional[Union[str, list[str]]] = None,
+ bg_color_finish: Optional[Union[str, list[str]]] = None,
+ bg_config: Optional[BackgroundConfig] = None,
+ duration: Optional[float] = None,
min_duration: float = 1.5,
max_duration: float = 2.5,
ensure_smooth: bool = True,
@@ -128,7 +128,7 @@ async def execute(
self,
controller: Any,
text: str,
- update_callback: Any | None = None,
+ update_callback: Optional[Any] = None,
) -> None:
"""Execute color transition with precise timing.
@@ -156,7 +156,7 @@ class FadeTransition(Transition):
def __init__(
self,
fade_type: str = "in", # "in", "out", "in_out"
- duration: float | None = None,
+ duration: Optional[float] = None,
min_duration: float = 1.5,
max_duration: float = 2.5,
) -> None:
@@ -205,7 +205,7 @@ def __init__(
self,
direction: str = "left",
slide_type: str = "in",
- duration: float | None = None,
+ duration: Optional[float] = None,
min_duration: float = 1.5,
max_duration: float = 2.5,
) -> None:
@@ -267,7 +267,7 @@ def __init__(
text2: str,
color1: str = "white",
color2: str = "white",
- duration: float | None = None,
+ duration: Optional[float] = None,
min_duration: float = 1.5,
max_duration: float = 2.5,
) -> None:
diff --git a/ccbt/interface/terminal_dashboard.py b/ccbt/interface/terminal_dashboard.py
index 0bbe51e..9f16294 100644
--- a/ccbt/interface/terminal_dashboard.py
+++ b/ccbt/interface/terminal_dashboard.py
@@ -11,7 +11,7 @@
import logging
import time
from pathlib import Path
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if (
TYPE_CHECKING
@@ -461,7 +461,7 @@ class TerminalDashboard(App): # type: ignore[misc]
"""
def __init__(
- self, session: Any, refresh_interval: float = 1.0, splash_manager: Any | None = None
+ self, session: Any, refresh_interval: float = 1.0, splash_manager: Optional[Any] = None
): # pragma: no cover
"""Initialize terminal dashboard.
@@ -501,8 +501,8 @@ def __init__(
self.alert_manager = get_alert_manager()
self.metrics_collector = get_metrics_collector()
- self._poll_task: asyncio.Task | None = None
- self._filter_input: Input | None = None
+ self._poll_task: Optional[asyncio.Task] = None
+ self._filter_input: Optional[Input] = None
self._filter_text: str = ""
self._last_status: dict[str, dict[str, Any]] = {}
self._compact = False
@@ -519,19 +519,19 @@ def __init__(
else:
logger.warning("TerminalDashboard: Data provider does not have IPC client!")
# Reactive update manager for WebSocket events
- self._reactive_manager: Any | None = None
+ self._reactive_manager: Optional[Any] = None
# Widget references will be set in on_mount after compose
- self.overview: Overview | None = None
- self.overview_footer: Overview | None = None
- self.speeds: SpeedSparklines | None = None
- self.torrents: TorrentsTable | None = None
- self.peers: PeersTable | None = None
- self.details: Static | None = None
- self.statusbar: Static | None = None
- self.alerts: Static | None = None
- self.logs: RichLog | None = None
+ self.overview: Optional[Overview] = None
+ self.overview_footer: Optional[Overview] = None
+ self.speeds: Optional[SpeedSparklines] = None
+ self.torrents: Optional[TorrentsTable] = None
+ self.peers: Optional[PeersTable] = None
+ self.details: Optional[Static] = None
+ self.statusbar: Optional[Static] = None
+ self.alerts: Optional[Static] = None
+ self.logs: Optional[RichLog] = None
# New tabbed interface widgets
- self.graphs_section: GraphsSectionContainer | None = None
+ self.graphs_section: Optional[GraphsSectionContainer] = None
def _format_bindings_display(self) -> Any: # pragma: no cover
"""Format all key bindings grouped by category for display."""
@@ -1014,7 +1014,7 @@ def on_torrent_completed(data: dict[str, Any]) -> None:
self._reactive_manager.subscribe_to_adapter(adapter)
# Helper to refresh per-torrent tab when a specific info hash is impacted
- async def _refresh_per_torrent_tab(info_hash: str | None) -> None:
+ async def _refresh_per_torrent_tab(info_hash: Optional[str]) -> None:
if not info_hash:
return
try:
@@ -4008,7 +4008,7 @@ async def _scan_for_daemon_port(
api_key: str,
ports_to_try: list[int],
timeout_per_port: float = 1.0,
-) -> tuple[int | None, Any | None]:
+) -> tuple[Optional[int], Optional[Any]]:
"""Scan multiple ports to find where the daemon is actually listening.
Args:
@@ -4053,8 +4053,8 @@ async def _scan_for_daemon_port(
def _show_startup_splash(
no_splash: bool = False,
verbosity_count: int = 0,
- console: Any | None = None,
-) -> tuple[Any | None, Any | None]:
+ console: Optional[Any] = None,
+) -> tuple[Optional[Any], Optional[Any]]:
"""Show splash screen for terminal interface startup.
Args:
@@ -4136,8 +4136,8 @@ def run_splash() -> None:
async def _ensure_daemon_running(
- splash_manager: Any | None = None,
-) -> tuple[bool, Any | None]:
+ splash_manager: Optional[Any] = None,
+) -> tuple[bool, Optional[Any]]:
"""Ensure daemon is running, start if needed.
CRITICAL: This function ONLY uses IPC client health checks (is_daemon_running)
@@ -4146,7 +4146,7 @@ async def _ensure_daemon_running(
connections, not just when the process is running.
Returns:
- Tuple of (success: bool, ipc_client: IPCClient | None)
+ Tuple of (success: bool, ipc_client: Optional[IPCClient])
If daemon is running or successfully started, returns (True, IPCClient)
If daemon start fails, returns (False, None)
"""
@@ -4495,9 +4495,9 @@ async def _ensure_daemon_running(
def run_dashboard( # pragma: no cover
session: Any, # DaemonInterfaceAdapter required
- refresh: float | None = None,
+ refresh: Optional[float] = None,
dev_mode: bool = False, # Enable Textual development mode
- splash_manager: Any | None = None, # Splash manager to end when dashboard is rendered
+ splash_manager: Optional[Any] = None, # Splash manager to end when dashboard is rendered
) -> None:
"""Run the Textual dashboard App for the provided daemon session.
@@ -4579,7 +4579,7 @@ def main() -> (
return 1 # pragma: no cover - Same context
# CRITICAL: Dashboard ONLY works with daemon - no local sessions allowed
- session: DaemonInterfaceAdapter | None = None
+ session: Optional[DaemonInterfaceAdapter] = None
if args.no_daemon:
# User requested --no-daemon but dashboard requires daemon
diff --git a/ccbt/interface/terminal_dashboard_dev.py b/ccbt/interface/terminal_dashboard_dev.py
index 37d2cfa..f468279 100644
--- a/ccbt/interface/terminal_dashboard_dev.py
+++ b/ccbt/interface/terminal_dashboard_dev.py
@@ -11,7 +11,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.interface.daemon_session_adapter import DaemonInterfaceAdapter
@@ -125,10 +125,10 @@ def get_app() -> TerminalDashboard:
# Use a thread pool executor to run the async function in isolation
# This prevents event loop conflicts with Textual
- result_container: list[tuple[bool, Any | None]] = []
+ result_container: list[tuple[bool, Optional[Any]]] = []
exception_container: list[Exception] = []
- async def _ensure_and_close() -> tuple[bool, Any | None]:
+ async def _ensure_and_close() -> tuple[bool, Optional[Any]]:
"""Ensure daemon is running and close the IPCClient before returning.
This wrapper ensures the IPCClient is closed in the same event loop
@@ -321,7 +321,7 @@ def run_in_thread():
# CRITICAL: Textual's run command may try to call `app()` as a function
# So we need to make `app` a callable that returns the app instance
# We use lazy initialization to avoid creating the app twice
-_app_instance: TerminalDashboard | None = None
+_app_instance: Optional[TerminalDashboard] = None
_daemon_ready: bool = False
def _get_app_instance() -> TerminalDashboard:
diff --git a/ccbt/interface/widgets/button_selector.py b/ccbt/interface/widgets/button_selector.py
index 906b9d1..7a0ffb0 100644
--- a/ccbt/interface/widgets/button_selector.py
+++ b/ccbt/interface/widgets/button_selector.py
@@ -1,6 +1,8 @@
"""Button-based selector widget to replace Tabs for better visibility control."""
-from typing import TYPE_CHECKING, Any
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Optional
from textual.containers import Container
from textual.message import Message
@@ -42,7 +44,7 @@ class ButtonSelector(Container): # type: ignore[misc]
def __init__(
self,
options: list[tuple[str, str]], # [(id, label), ...]
- initial_selection: str | None = None,
+ initial_selection: Optional[str] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -55,7 +57,7 @@ def __init__(
super().__init__(*args, **kwargs)
self._options = options
self._buttons: dict[str, Button] = {}
- self._active_id: str | None = initial_selection or (options[0][0] if options else None)
+ self._active_id: Optional[str] = initial_selection or (options[0][0] if options else None)
def compose(self) -> "ComposeResult": # pragma: no cover
"""Compose the button selector."""
@@ -102,7 +104,7 @@ def _set_active(self, option_id: str) -> None: # pragma: no cover
self._active_id = option_id
@property
- def active(self) -> str | None: # pragma: no cover
+ def active(self) -> Optional[str]: # pragma: no cover
"""Get active selection ID."""
return self._active_id
diff --git a/ccbt/interface/widgets/config_wrapper.py b/ccbt/interface/widgets/config_wrapper.py
index 68c3de8..094e04a 100644
--- a/ccbt/interface/widgets/config_wrapper.py
+++ b/ccbt/interface/widgets/config_wrapper.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from ccbt.interface.commands.executor import CommandExecutor
@@ -155,7 +155,7 @@ def __init__(
config_type: str,
data_provider: DataProvider,
command_executor: CommandExecutor,
- info_hash: str | None = None,
+ info_hash: Optional[str] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -172,11 +172,11 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._info_hash = info_hash
- self._content_widget: Static | None = None
- self._sections_table: DataTable | None = None
- self._selected_section: str | None = None
+ self._content_widget: Optional[Static] = None
+ self._sections_table: Optional[DataTable] = None
+ self._selected_section: Optional[str] = None
self._editors: dict[str, ConfigValueEditor] = {}
- self._editors_container: Container | None = None
+ self._editors_container: Optional[Container] = None
self._original_values: dict[str, Any] = {}
self._editing_mode = False
self._changed_values: set[str] = set()
diff --git a/ccbt/interface/widgets/core_widgets.py b/ccbt/interface/widgets/core_widgets.py
index ecc4214..7f24ed1 100644
--- a/ccbt/interface/widgets/core_widgets.py
+++ b/ccbt/interface/widgets/core_widgets.py
@@ -4,7 +4,7 @@
import contextlib
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.i18n import _
@@ -148,7 +148,7 @@ def update_from_status(
key=ih,
)
- def get_selected_info_hash(self) -> str | None: # pragma: no cover
+ def get_selected_info_hash(self) -> Optional[str]: # pragma: no cover
"""Get the info hash of the currently selected torrent."""
if hasattr(self, "cursor_row_key"):
with contextlib.suppress(Exception):
@@ -439,7 +439,7 @@ class GlobalTorrentMetricsPanel(Static): # type: ignore[misc]
def update_metrics(
self,
- stats: dict[str, Any] | None,
+ stats: Optional[dict[str, Any]],
swarm_samples: list[dict[str, Any]] | None = None,
) -> None: # pragma: no cover
"""Render aggregated torrent metrics."""
@@ -648,8 +648,8 @@ def __init__(
"""
super().__init__(*args, **kwargs)
self._data_provider = data_provider
- self._graph_selector: Any | None = None # ButtonSelector
- self._active_graph_tab_id: str | None = None
+ self._graph_selector: Optional[Any] = None # ButtonSelector
+ self._active_graph_tab_id: Optional[str] = None
self._registered_widgets: list[Any] = [] # Track registered widgets for cleanup
def compose(self) -> Any: # pragma: no cover
diff --git a/ccbt/interface/widgets/dht_health_widget.py b/ccbt/interface/widgets/dht_health_widget.py
index 81f0e88..dd0ab7b 100644
--- a/ccbt/interface/widgets/dht_health_widget.py
+++ b/ccbt/interface/widgets/dht_health_widget.py
@@ -4,7 +4,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from textual.app import ComposeResult
@@ -50,14 +50,14 @@ class DHTHealthWidget(Static): # type: ignore[misc]
def __init__(
self,
- data_provider: Any | None,
+ data_provider: Optional[Any],
refresh_interval: float = 2.5,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self._data_provider = data_provider
self._refresh_interval = refresh_interval
- self._update_task: Any | None = None
+ self._update_task: Optional[Any] = None
def compose(self) -> ComposeResult: # pragma: no cover
"""Compose widget layout."""
diff --git a/ccbt/interface/widgets/file_browser.py b/ccbt/interface/widgets/file_browser.py
index d08e349..7ca900c 100644
--- a/ccbt/interface/widgets/file_browser.py
+++ b/ccbt/interface/widgets/file_browser.py
@@ -4,7 +4,7 @@
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.i18n import _
@@ -124,8 +124,8 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._current_path = Path.home()
- self._file_table: DataTable | None = None
- self._path_input: Input | None = None
+ self._file_table: Optional[DataTable] = None
+ self._path_input: Optional[Input] = None
self._selected_files: list[Path] = []
def compose(self) -> Any: # pragma: no cover
diff --git a/ccbt/interface/widgets/global_kpis_panel.py b/ccbt/interface/widgets/global_kpis_panel.py
index 3ff5500..61245c2 100644
--- a/ccbt/interface/widgets/global_kpis_panel.py
+++ b/ccbt/interface/widgets/global_kpis_panel.py
@@ -4,7 +4,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from textual.app import ComposeResult
@@ -43,14 +43,14 @@ class GlobalKPIsPanel(Static): # type: ignore[misc]
def __init__(
self,
- data_provider: Any | None,
+ data_provider: Optional[Any],
refresh_interval: float = 2.0,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self._data_provider = data_provider
self._refresh_interval = refresh_interval
- self._update_task: Any | None = None
+ self._update_task: Optional[Any] = None
def compose(self) -> ComposeResult: # pragma: no cover
"""Compose widget layout."""
diff --git a/ccbt/interface/widgets/graph_widget.py b/ccbt/interface/widgets/graph_widget.py
index cc7a70d..a8ddf4e 100644
--- a/ccbt/interface/widgets/graph_widget.py
+++ b/ccbt/interface/widgets/graph_widget.py
@@ -8,7 +8,7 @@
import asyncio
import logging
import math
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
logger = logging.getLogger(__name__)
@@ -101,7 +101,7 @@ class BaseGraphWidget(Container): # type: ignore[misc]
def __init__(
self,
title: str,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
max_samples: int = 120,
*args: Any,
**kwargs: Any,
@@ -118,7 +118,7 @@ def __init__(
self._data_provider = data_provider
self._max_samples = max_samples
self._data_history: list[float] = []
- self._sparkline: Sparkline | None = None
+ self._sparkline: Optional[Sparkline] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the graph widget."""
@@ -300,7 +300,7 @@ class UploadDownloadGraphWidget(BaseGraphWidget): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -309,13 +309,13 @@ def __init__(
self._download_history: list[float] = []
self._upload_history: list[float] = []
self._timestamps: list[float] = [] # Store timestamps for time-based display
- self._download_sparkline: Sparkline | None = None
- self._upload_sparkline: Sparkline | None = None
- self._update_task: Any | None = None
+ self._download_sparkline: Optional[Sparkline] = None
+ self._upload_sparkline: Optional[Sparkline] = None
+ self._update_task: Optional[Any] = None
# Event timeline tracking for annotations
self._event_timeline: list[dict[str, Any]] = [] # List of {timestamp, type, label, info_hash}
self._max_events = 50 # Keep last 50 events
- self._event_annotations_widget: Static | None = None
+ self._event_annotations_widget: Optional[Static] = None
DEFAULT_CSS = """
UploadDownloadGraphWidget {
@@ -704,7 +704,7 @@ def _update_display(self) -> None: # pragma: no cover
# Update event annotations
self._update_event_annotations()
- def _add_event_annotation(self, timestamp: float, event_type: str, label: str, info_hash: str | None = None) -> None:
+ def _add_event_annotation(self, timestamp: float, event_type: str, label: str, info_hash: Optional[str] = None) -> None:
"""Add an event annotation to the timeline.
Args:
@@ -795,17 +795,17 @@ class PieceHealthPictogram(Container): # type: ignore[misc]
def __init__(
self,
info_hash_hex: str,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
super().__init__(*args, **kwargs)
self._info_hash = info_hash_hex
self._data_provider = data_provider
- self._stats: Static | None = None
- self._content: Static | None = None
- self._legend: Static | None = None
- self._update_task: Any | None = None
+ self._stats: Optional[Static] = None
+ self._content: Optional[Static] = None
+ self._legend: Optional[Static] = None
+ self._update_task: Optional[Any] = None
self._row_width = 16
def compose(self) -> Any: # pragma: no cover
@@ -1008,7 +1008,7 @@ class DiskGraphWidget(BaseGraphWidget): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -1017,10 +1017,10 @@ def __init__(
self._read_history: list[float] = []
self._write_history: list[float] = []
self._cache_hit_history: list[float] = []
- self._read_sparkline: Sparkline | None = None
- self._write_sparkline: Sparkline | None = None
- self._cache_sparkline: Sparkline | None = None
- self._update_task: Any | None = None
+ self._read_sparkline: Optional[Sparkline] = None
+ self._write_sparkline: Optional[Sparkline] = None
+ self._cache_sparkline: Optional[Sparkline] = None
+ self._update_task: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the disk graph widget."""
@@ -1246,7 +1246,7 @@ class NetworkGraphWidget(BaseGraphWidget): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -1254,9 +1254,9 @@ def __init__(
super().__init__("Network Timing", data_provider, *args, **kwargs)
self._utp_delay_history: list[float] = []
self._overhead_history: list[float] = []
- self._utp_sparkline: Sparkline | None = None
- self._overhead_sparkline: Sparkline | None = None
- self._update_task: Any | None = None
+ self._utp_sparkline: Optional[Sparkline] = None
+ self._overhead_sparkline: Optional[Sparkline] = None
+ self._update_task: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the network graph widget."""
@@ -1415,14 +1415,14 @@ class DownloadGraphWidget(BaseGraphWidget): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
"""Initialize download graph widget."""
super().__init__("Download Speed", data_provider, *args, **kwargs)
self._download_history: list[float] = []
- self._update_task: Any | None = None
+ self._update_task: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the download graph widget."""
@@ -1550,14 +1550,14 @@ class UploadGraphWidget(BaseGraphWidget): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
"""Initialize upload graph widget."""
super().__init__("Upload Speed", data_provider, *args, **kwargs)
self._upload_history: list[float] = []
- self._update_task: Any | None = None
+ self._update_task: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the upload graph widget."""
@@ -1739,7 +1739,7 @@ class PerTorrentGraphWidget(Container): # type: ignore[misc]
def __init__(
self,
info_hash_hex: str,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -1755,11 +1755,11 @@ def __init__(
self._download_history: list[float] = []
self._upload_history: list[float] = []
self._piece_rate_history: list[float] = []
- self._download_sparkline: Sparkline | None = None
- self._upload_sparkline: Sparkline | None = None
- self._piece_rate_sparkline: Sparkline | None = None
- self._peer_table: DataTable | None = None
- self._update_task: Any | None = None
+ self._download_sparkline: Optional[Sparkline] = None
+ self._upload_sparkline: Optional[Sparkline] = None
+ self._piece_rate_sparkline: Optional[Sparkline] = None
+ self._peer_table: Optional[DataTable] = None
+ self._update_task: Optional[Any] = None
self._max_samples = 120
def compose(self) -> Any: # pragma: no cover
@@ -2120,14 +2120,14 @@ class PerformanceGraphWidget(Container): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
"""Initialize performance graph widget (upload/download only)."""
super().__init__(*args, **kwargs)
self._data_provider = data_provider
- self._upload_download_widget: UploadDownloadGraphWidget | None = None
+ self._upload_download_widget: Optional[UploadDownloadGraphWidget] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the performance graph widget.
@@ -2390,7 +2390,7 @@ class SystemResourcesGraphWidget(Container): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
@@ -2400,10 +2400,10 @@ def __init__(
self._cpu_history: list[float] = []
self._memory_history: list[float] = []
self._disk_history: list[float] = []
- self._cpu_sparkline: Sparkline | None = None
- self._memory_sparkline: Sparkline | None = None
- self._disk_sparkline: Sparkline | None = None
- self._update_task: Any | None = None
+ self._cpu_sparkline: Optional[Sparkline] = None
+ self._memory_sparkline: Optional[Sparkline] = None
+ self._disk_sparkline: Optional[Sparkline] = None
+ self._update_task: Optional[Any] = None
self._max_samples = 120
def compose(self) -> Any: # pragma: no cover
@@ -2513,8 +2513,8 @@ class SwarmHealthDotPlot(Container): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
- info_hash_hex: str | None = None,
+ data_provider: Optional[DataProvider] = None,
+ info_hash_hex: Optional[str] = None,
max_rows: int = 6,
*args: Any,
**kwargs: Any,
@@ -2522,9 +2522,9 @@ def __init__(
super().__init__(*args, **kwargs)
self._data_provider = data_provider
self._info_hash = info_hash_hex
- self._content: Static | None = None
- self._legend: Static | None = None
- self._update_task: Any | None = None
+ self._content: Optional[Static] = None
+ self._legend: Optional[Static] = None
+ self._update_task: Optional[Any] = None
self._max_rows = max_rows
self._dot_count = 12
self._previous_samples: dict[str, dict[str, Any]] = {} # Track previous samples for trends
@@ -2603,7 +2603,7 @@ async def _update_from_provider(self) -> None:
table.add_column("Rates", style="green", ratio=1)
strongest_sample = max(samples, key=lambda s: float(s.get("swarm_availability", 0.0)))
- rarity_percentiles: dict[str, float] | None = None
+ rarity_percentiles: Optional[dict[str, float]] = None
for sample in samples:
info_hash = sample.get("info_hash", "")
@@ -2880,15 +2880,15 @@ class PeerQualitySummaryWidget(Container): # type: ignore[misc]
def __init__(
self,
- data_provider: DataProvider | None = None,
+ data_provider: Optional[DataProvider] = None,
*args: Any,
**kwargs: Any,
) -> None:
super().__init__(*args, **kwargs)
self._data_provider = data_provider
- self._summary: Static | None = None
- self._table: DataTable | None = None
- self._update_task: Any | None = None
+ self._summary: Optional[Static] = None
+ self._table: Optional[DataTable] = None
+ self._update_task: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the peer quality widget."""
diff --git a/ccbt/interface/widgets/language_selector.py b/ccbt/interface/widgets/language_selector.py
index cfd9ac4..1ea7419 100644
--- a/ccbt/interface/widgets/language_selector.py
+++ b/ccbt/interface/widgets/language_selector.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.interface.commands.executor import CommandExecutor
@@ -125,8 +125,8 @@ def __init__(
super().__init__(*args, **kwargs)
self._data_provider = data_provider
self._command_executor = command_executor
- self._select_widget: Select | None = None
- self._info_widget: Static | None = None
+ self._select_widget: Optional[Select] = None
+ self._info_widget: Optional[Static] = None
self._current_locale = get_locale()
def compose(self) -> Any: # pragma: no cover
diff --git a/ccbt/interface/widgets/monitoring_wrapper.py b/ccbt/interface/widgets/monitoring_wrapper.py
index 4527e9f..990297d 100644
--- a/ccbt/interface/widgets/monitoring_wrapper.py
+++ b/ccbt/interface/widgets/monitoring_wrapper.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.session.session import AsyncSessionManager
@@ -70,8 +70,8 @@ def __init__(
super().__init__(*args, **kwargs)
self._screen_type = screen_type
self._data_provider = data_provider
- self._content_widget: Static | None = None
- self._monitoring_screen: Any | None = None
+ self._content_widget: Optional[Static] = None
+ self._monitoring_screen: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the monitoring wrapper."""
@@ -166,7 +166,7 @@ async def _refresh_content(self) -> None: # pragma: no cover
if self._content_widget:
self._content_widget.update(f"Error loading {self._screen_type}: {e}")
- async def _get_monitoring_content(self) -> str | None: # pragma: no cover
+ async def _get_monitoring_content(self) -> Optional[str]: # pragma: no cover
"""Get monitoring content based on screen type.
Returns:
diff --git a/ccbt/interface/widgets/peer_quality_distribution_widget.py b/ccbt/interface/widgets/peer_quality_distribution_widget.py
index 0c61379..7279eff 100644
--- a/ccbt/interface/widgets/peer_quality_distribution_widget.py
+++ b/ccbt/interface/widgets/peer_quality_distribution_widget.py
@@ -4,7 +4,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from textual.app import ComposeResult
@@ -53,14 +53,14 @@ class PeerQualityDistributionWidget(Static): # type: ignore[misc]
def __init__(
self,
- data_provider: Any | None,
+ data_provider: Optional[Any],
refresh_interval: float = 3.0,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self._data_provider = data_provider
self._refresh_interval = refresh_interval
- self._update_task: Any | None = None
+ self._update_task: Optional[Any] = None
def compose(self) -> ComposeResult: # pragma: no cover
"""Compose widget layout."""
diff --git a/ccbt/interface/widgets/piece_availability_bar.py b/ccbt/interface/widgets/piece_availability_bar.py
index d401738..50b9140 100644
--- a/ccbt/interface/widgets/piece_availability_bar.py
+++ b/ccbt/interface/widgets/piece_availability_bar.py
@@ -7,7 +7,7 @@
import contextlib
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from textual.widgets import Static
@@ -95,14 +95,14 @@ def __init__(
super().__init__(*args, **kwargs)
self._availability: list[int] = []
self._max_peers: int = 0
- self._piece_health_data: dict[str, Any] | None = None # Full piece health data from DataProvider
+ self._piece_health_data: Optional[dict[str, Any]] = None # Full piece health data from DataProvider
self._grid_rows: int = 8 # Number of rows in multi-line grid
self._grid_cols: int = 0 # Calculated based on terminal width
def update_availability(
self,
availability: list[int],
- max_peers: int | None = None,
+ max_peers: Optional[int] = None,
) -> None:
"""Update the health bar with piece availability data.
diff --git a/ccbt/interface/widgets/piece_selection_widget.py b/ccbt/interface/widgets/piece_selection_widget.py
index fe9ee10..373616b 100644
--- a/ccbt/interface/widgets/piece_selection_widget.py
+++ b/ccbt/interface/widgets/piece_selection_widget.py
@@ -4,7 +4,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from textual.app import ComposeResult
@@ -45,7 +45,7 @@ def __init__(
self,
*,
info_hash: str,
- data_provider: Any | None,
+ data_provider: Optional[Any],
refresh_interval: float = 2.5,
**kwargs: Any,
) -> None:
@@ -53,8 +53,8 @@ def __init__(
self._info_hash = info_hash
self._data_provider = data_provider
self._refresh_interval = refresh_interval
- self._update_task: Any | None = None
- self._adapter: Any | None = None
+ self._update_task: Optional[Any] = None
+ self._adapter: Optional[Any] = None
def compose(self) -> ComposeResult: # pragma: no cover
"""Render placeholder before metrics arrive."""
diff --git a/ccbt/interface/widgets/reusable_table.py b/ccbt/interface/widgets/reusable_table.py
index a1e57ab..6cfa91f 100644
--- a/ccbt/interface/widgets/reusable_table.py
+++ b/ccbt/interface/widgets/reusable_table.py
@@ -5,7 +5,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from textual.widgets import DataTable
@@ -79,7 +79,7 @@ def format_percentage(self, value: float, decimals: int = 1) -> str:
"""
return f"{value * 100:.{decimals}f}%"
- def get_selected_key(self) -> str | None:
+ def get_selected_key(self) -> Optional[str]:
"""Get the key of the currently selected row.
Returns:
@@ -93,7 +93,7 @@ def get_selected_key(self) -> str | None:
pass
return None
- def clear_and_populate(self, rows: list[list[Any]], keys: list[str] | None = None) -> None: # pragma: no cover
+ def clear_and_populate(self, rows: list[list[Any]], keys: Optional[list[str]] = None) -> None: # pragma: no cover
"""Clear table and populate with new rows.
Args:
diff --git a/ccbt/interface/widgets/reusable_widgets.py b/ccbt/interface/widgets/reusable_widgets.py
index 25a72a2..cf48898 100644
--- a/ccbt/interface/widgets/reusable_widgets.py
+++ b/ccbt/interface/widgets/reusable_widgets.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import contextlib
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from textual.widgets import DataTable, Sparkline, Static
@@ -109,7 +109,7 @@ def on_mount(self) -> None: # type: ignore[override] # pragma: no cover
# Will be populated by add_sparkline calls
def add_sparkline(
- self, name: str, data: list[float] | None = None
+ self, name: str, data: Optional[list[float]] = None
) -> None: # pragma: no cover
"""Add or update a sparkline.
diff --git a/ccbt/interface/widgets/swarm_timeline_widget.py b/ccbt/interface/widgets/swarm_timeline_widget.py
index 67b3467..54e6294 100644
--- a/ccbt/interface/widgets/swarm_timeline_widget.py
+++ b/ccbt/interface/widgets/swarm_timeline_widget.py
@@ -5,7 +5,7 @@
import asyncio
import logging
import time
-from typing import Any
+from typing import Any, Optional
from rich.console import Group
from rich.panel import Panel
@@ -39,8 +39,8 @@ class SwarmTimelineWidget(Static): # type: ignore[misc]
def __init__(
self,
- data_provider: Any | None,
- info_hash: str | None = None,
+ data_provider: Optional[Any],
+ info_hash: Optional[str] = None,
limit: int = 3,
history_seconds: int = 3600,
refresh_interval: float = 4.0,
@@ -52,7 +52,7 @@ def __init__(
self._limit = max(1, limit)
self._history_seconds = max(60, history_seconds)
self._refresh_interval = refresh_interval
- self._update_task: Any | None = None
+ self._update_task: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
yield Static(_("Loading swarm timeline..."), id="swarm-timeline-placeholder")
diff --git a/ccbt/interface/widgets/tabbed_interface.py b/ccbt/interface/widgets/tabbed_interface.py
index 21f4922..12b014d 100644
--- a/ccbt/interface/widgets/tabbed_interface.py
+++ b/ccbt/interface/widgets/tabbed_interface.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.i18n import _
@@ -125,23 +125,23 @@ def __init__(
super().__init__(*args, **kwargs)
self.session = session
# Workflow pane tabs (left side)
- self._workflow_selector: Any | None = None # ButtonSelector
- self._workflow_content: Container | None = None
- self._active_workflow_tab_id: str | None = None
+ self._workflow_selector: Optional[Any] = None # ButtonSelector
+ self._workflow_content: Optional[Container] = None
+ self._active_workflow_tab_id: Optional[str] = None
# Torrent Insight pane selector (right side)
- self._torrent_insight_selector: Any | None = None # ButtonSelector
- self._torrent_insight_content: Container | None = None
- self._active_insight_tab_id: str | None = None
+ self._torrent_insight_selector: Optional[Any] = None # ButtonSelector
+ self._torrent_insight_content: Optional[Container] = None
+ self._active_insight_tab_id: Optional[str] = None
# Shared selection model for cross-pane communication
- self._selected_torrent_hash: str | None = None
+ self._selected_torrent_hash: Optional[str] = None
# Create command executor first (like CLI uses)
from ccbt.interface.commands.executor import CommandExecutor
- self._command_executor: CommandExecutor | None = CommandExecutor(session)
+ self._command_executor: Optional[CommandExecutor] = CommandExecutor(session)
# Create data provider with executor reference
from ccbt.interface.data_provider import create_data_provider
# Pass executor to data provider so it can use executor for commands
executor_for_provider = self._command_executor._executor if self._command_executor and hasattr(self._command_executor, "_executor") else None
- self._data_provider: DataProvider | None = create_data_provider(session, executor_for_provider)
+ self._data_provider: Optional[DataProvider] = create_data_provider(session, executor_for_provider)
def compose(self) -> Any: # pragma: no cover
"""Compose the main tabs container with side-by-side panes.
diff --git a/ccbt/interface/widgets/torrent_controls.py b/ccbt/interface/widgets/torrent_controls.py
index 3b12f44..c9e33eb 100644
--- a/ccbt/interface/widgets/torrent_controls.py
+++ b/ccbt/interface/widgets/torrent_controls.py
@@ -4,7 +4,7 @@
import asyncio
import logging
-from typing import TYPE_CHECKING, Any, Callable
+from typing import TYPE_CHECKING, Any, Callable, Optional
from ccbt.i18n import _
@@ -123,9 +123,9 @@ def __init__(
self._data_provider = data_provider
self._command_executor = command_executor
self._selected_hash_callback = selected_hash_callback
- self._selected_info_hash: str | None = None
- self._torrent_selector: Select | None = None
- self._refresh_task: Any | None = None
+ self._selected_info_hash: Optional[str] = None
+ self._torrent_selector: Optional[Select] = None
+ self._refresh_task: Optional[Any] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the torrent controls."""
diff --git a/ccbt/interface/widgets/torrent_file_explorer.py b/ccbt/interface/widgets/torrent_file_explorer.py
index f64eaeb..5e17b0c 100644
--- a/ccbt/interface/widgets/torrent_file_explorer.py
+++ b/ccbt/interface/widgets/torrent_file_explorer.py
@@ -4,7 +4,7 @@
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.i18n import _
@@ -123,12 +123,12 @@ def __init__(
self._info_hash = info_hash_hex
self._data_provider = data_provider
self._command_executor = command_executor
- self._file_table: DataTable | None = None
- self._details_table: DataTable | None = None
- self._path_display: Static | None = None
+ self._file_table: Optional[DataTable] = None
+ self._details_table: Optional[DataTable] = None
+ self._path_display: Optional[Static] = None
self._files_data: list[dict[str, Any]] = []
- self._base_path: Path | None = None
- self._selected_file: dict[str, Any] | None = None
+ self._base_path: Optional[Path] = None
+ self._selected_file: Optional[dict[str, Any]] = None
self._expanded_dirs: set[str] = set()
def compose(self) -> Any: # pragma: no cover
diff --git a/ccbt/interface/widgets/torrent_selector.py b/ccbt/interface/widgets/torrent_selector.py
index 42d00bf..7bd4740 100644
--- a/ccbt/interface/widgets/torrent_selector.py
+++ b/ccbt/interface/widgets/torrent_selector.py
@@ -6,7 +6,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.interface.data_provider import DataProvider
@@ -76,9 +76,9 @@ def __init__(
"""
super().__init__(*args, **kwargs)
self._data_provider = data_provider
- self._selected_info_hash: str | None = None
+ self._selected_info_hash: Optional[str] = None
self._torrent_options: list[tuple[str, str]] = [] # (display_name, info_hash)
- self._select_widget: Select | None = None
+ self._select_widget: Optional[Select] = None
def compose(self) -> Any: # pragma: no cover
"""Compose the torrent selector."""
@@ -169,7 +169,7 @@ def on_select_changed(self, event: Any) -> None: # pragma: no cover
event_value = event.value
logger.debug("TorrentSelector: Select.Changed event.value = %r (type: %s)", event_value, type(event_value).__name__)
- info_hash: str | None = None
+ info_hash: Optional[str] = None
# Handle different value formats from Textual Select
if isinstance(event_value, tuple) and len(event_value) == 2:
@@ -214,7 +214,7 @@ def on_select_changed(self, event: Any) -> None: # pragma: no cover
logger.warning("TorrentSelector: Could not extract info_hash from event.value = %r", event_value)
- def get_selected_info_hash(self) -> str | None:
+ def get_selected_info_hash(self) -> Optional[str]:
"""Get the currently selected torrent info hash.
Returns:
diff --git a/ccbt/ml/adaptive_limiter.py b/ccbt/ml/adaptive_limiter.py
index d364048..83fd6a6 100644
--- a/ccbt/ml/adaptive_limiter.py
+++ b/ccbt/ml/adaptive_limiter.py
@@ -16,7 +16,7 @@
from collections import defaultdict
from dataclasses import dataclass
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -387,16 +387,16 @@ def get_rate_limit(
self,
peer_id: str,
limiter_type: LimiterType,
- ) -> RateLimit | None:
+ ) -> Optional[RateLimit]:
"""Get rate limit for a peer."""
limiter_key = f"{peer_id}_{limiter_type.value}"
return self.rate_limits.get(limiter_key)
- def get_bandwidth_estimate(self, peer_id: str) -> BandwidthEstimate | None:
+ def get_bandwidth_estimate(self, peer_id: str) -> Optional[BandwidthEstimate]:
"""Get bandwidth estimate for a peer."""
return self.bandwidth_estimates.get(peer_id)
- def get_congestion_state(self, peer_id: str) -> CongestionState | None:
+ def get_congestion_state(self, peer_id: str) -> Optional[CongestionState]:
"""Get congestion state for a peer."""
return self.congestion_states.get(peer_id)
diff --git a/ccbt/ml/peer_selector.py b/ccbt/ml/peer_selector.py
index 7721670..6ee23ff 100644
--- a/ccbt/ml/peer_selector.py
+++ b/ccbt/ml/peer_selector.py
@@ -16,7 +16,7 @@
from collections import defaultdict
from dataclasses import dataclass
from enum import Enum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -266,7 +266,7 @@ async def get_best_peers(
# Return top N peers
return [peer for peer, _ in ranked_peers[:count]]
- def get_peer_features(self, peer_id: str) -> PeerFeatures | None:
+ def get_peer_features(self, peer_id: str) -> Optional[PeerFeatures]:
"""Get features for a specific peer."""
return self.peer_features.get(peer_id)
diff --git a/ccbt/ml/piece_predictor.py b/ccbt/ml/piece_predictor.py
index 966df77..290fe22 100644
--- a/ccbt/ml/piece_predictor.py
+++ b/ccbt/ml/piece_predictor.py
@@ -16,7 +16,7 @@
from collections import defaultdict
from dataclasses import dataclass
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -333,7 +333,7 @@ async def analyze_download_patterns(self) -> dict[str, Any]:
return pattern_analysis
- def get_piece_info(self, piece_index: int) -> PieceInfo | None:
+ def get_piece_info(self, piece_index: int) -> Optional[PieceInfo]:
"""Get piece information."""
return self.piece_info.get(piece_index)
@@ -341,7 +341,7 @@ def get_all_piece_info(self) -> dict[int, PieceInfo]:
"""Get all piece information."""
return self.piece_info.copy()
- def get_download_pattern(self, piece_index: int) -> DownloadPattern | None:
+ def get_download_pattern(self, piece_index: int) -> Optional[DownloadPattern]:
"""Get download pattern for a piece."""
return self.download_patterns.get(piece_index)
diff --git a/ccbt/models.py b/ccbt/models.py
index 9ef8909..cb4b0d8 100644
--- a/ccbt/models.py
+++ b/ccbt/models.py
@@ -9,7 +9,7 @@
import time
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from pydantic import BaseModel, Field, field_validator, model_validator
@@ -125,12 +125,12 @@ class PeerInfo(BaseModel):
ip: str = Field(..., description="Peer IP address")
port: int = Field(..., ge=1, le=65535, description="Peer port number")
- peer_id: bytes | None = Field(None, description="Peer ID")
- peer_source: str | None = Field(
+ peer_id: Optional[bytes] = Field(None, description="Peer ID")
+ peer_source: Optional[str] = Field(
default="tracker",
description="Source of peer discovery (tracker/dht/pex/lsd/manual)",
)
- ssl_capable: bool | None = Field(
+ ssl_capable: Optional[bool] = Field(
None,
description="Whether peer supports SSL/TLS (None = unknown, discovered during extension handshake)",
)
@@ -171,11 +171,11 @@ class TrackerResponse(BaseModel):
interval: int = Field(..., ge=0, description="Announce interval in seconds")
peers: list[PeerInfo] = Field(default_factory=list, description="List of peers")
- complete: int | None = Field(None, ge=0, description="Number of seeders")
- incomplete: int | None = Field(None, ge=0, description="Number of leechers")
- download_url: str | None = Field(None, description="Download URL")
- tracker_id: str | None = Field(None, description="Tracker ID")
- warning_message: str | None = Field(None, description="Warning message")
+ complete: Optional[int] = Field(None, ge=0, description="Number of seeders")
+ incomplete: Optional[int] = Field(None, ge=0, description="Number of leechers")
+ download_url: Optional[str] = Field(None, description="Download URL")
+ tracker_id: Optional[str] = Field(None, description="Tracker ID")
+ warning_message: Optional[str] = Field(None, description="Warning message")
class PieceInfo(BaseModel):
@@ -206,19 +206,19 @@ class FileInfo(BaseModel):
name: str = Field(..., description="File name")
length: int = Field(..., ge=0, description="File length in bytes")
- path: list[str] | None = Field(None, description="File path components")
- full_path: str | None = Field(None, description="Full file path")
+ path: Optional[list[str]] = Field(None, description="File path components")
+ full_path: Optional[str] = Field(None, description="Full file path")
# BEP 47: Padding Files and Attributes
- attributes: str | None = Field(
+ attributes: Optional[str] = Field(
None,
description="File attributes string from BEP 47 (e.g., 'p', 'x', 'h', 'l')",
)
- symlink_path: str | None = Field(
+ symlink_path: Optional[str] = Field(
None,
description="Symlink target path (required when attr='l')",
)
- file_sha1: bytes | None = Field(
+ file_sha1: Optional[bytes] = Field(
None,
description="SHA-1 hash of file contents (optional BEP 47 sha1 field, 20 bytes)",
)
@@ -245,7 +245,7 @@ def is_hidden(self) -> bool:
@field_validator("symlink_path")
@classmethod
- def validate_symlink_path(cls, v: str | None, _info: Any) -> str | None:
+ def validate_symlink_path(cls, v: Optional[str], _info: Any) -> Optional[str]:
"""Validate symlink_path is provided when attr='l'."""
# Note: This validator runs before model_validator, so we can't check attributes here
# The model_validator below handles the cross-field validation
@@ -253,7 +253,7 @@ def validate_symlink_path(cls, v: str | None, _info: Any) -> str | None:
@field_validator("file_sha1")
@classmethod
- def validate_file_sha1(cls, v: bytes | None, _info: Any) -> bytes | None:
+ def validate_file_sha1(cls, v: Optional[bytes], _info: Any) -> Optional[bytes]:
"""Validate file_sha1 is 20 bytes (SHA-1 length) if provided."""
if v is not None and len(v) != 20:
msg = f"file_sha1 must be 20 bytes (SHA-1), got {len(v)} bytes"
@@ -276,7 +276,7 @@ class XetChunkInfo(BaseModel):
..., min_length=32, max_length=32, description="BLAKE3-256 hash of chunk"
)
size: int = Field(..., ge=8192, le=131072, description="Chunk size in bytes")
- storage_path: str | None = Field(None, description="Local storage path")
+ storage_path: Optional[str] = Field(None, description="Local storage path")
ref_count: int = Field(default=1, ge=1, description="Reference count")
created_at: float = Field(
default_factory=time.time, description="Creation timestamp"
@@ -344,10 +344,10 @@ class TonicFileInfo(BaseModel):
git_refs: list[str] = Field(
default_factory=list, description="Git commit hashes for version tracking"
)
- source_peers: list[str] | None = Field(
+ source_peers: Optional[list[str]] = Field(
None, description="Designated source peer IDs (for designated mode)"
)
- allowlist_hash: bytes | None = Field(
+ allowlist_hash: Optional[bytes] = Field(
None,
min_length=32,
max_length=32,
@@ -357,11 +357,11 @@ class TonicFileInfo(BaseModel):
default_factory=time.time, description="Creation timestamp"
)
version: int = Field(default=1, description="Tonic file format version")
- announce: str | None = Field(None, description="Primary tracker announce URL")
- announce_list: list[list[str]] | None = Field(
+ announce: Optional[str] = Field(None, description="Primary tracker announce URL")
+ announce_list: Optional[list[list[str]]] = Field(
None, description="List of tracker tiers"
)
- comment: str | None = Field(None, description="Optional comment")
+ comment: Optional[str] = Field(None, description="Optional comment")
xet_metadata: XetTorrentMetadata = Field(
..., description="XET metadata with chunk hashes and file info"
)
@@ -373,17 +373,19 @@ class TonicLinkInfo(BaseModel):
info_hash: bytes = Field(
..., min_length=32, max_length=32, description="32-byte SHA-256 info hash"
)
- display_name: str | None = Field(None, description="Display name")
- trackers: list[str] | None = Field(None, description="List of tracker URLs")
- git_refs: list[str] | None = Field(
+ display_name: Optional[str] = Field(None, description="Display name")
+ trackers: Optional[list[str]] = Field(None, description="List of tracker URLs")
+ git_refs: Optional[list[str]] = Field(
None, description="List of git commit hashes/refs"
)
- sync_mode: str | None = Field(
+ sync_mode: Optional[str] = Field(
None,
description="Synchronization mode (designated/best_effort/broadcast/consensus)",
)
- source_peers: list[str] | None = Field(None, description="List of source peer IDs")
- allowlist_hash: bytes | None = Field(
+ source_peers: Optional[list[str]] = Field(
+ None, description="List of source peer IDs"
+ )
+ allowlist_hash: Optional[bytes] = Field(
None,
min_length=32,
max_length=32,
@@ -397,10 +399,10 @@ class XetSyncStatus(BaseModel):
folder_path: str = Field(..., description="Path to synced folder")
sync_mode: str = Field(..., description="Current synchronization mode")
is_syncing: bool = Field(default=False, description="Whether sync is in progress")
- last_sync_time: float | None = Field(
+ last_sync_time: Optional[float] = Field(
None, description="Timestamp of last successful sync"
)
- current_git_ref: str | None = Field(None, description="Current git commit hash")
+ current_git_ref: Optional[str] = Field(None, description="Current git commit hash")
pending_changes: int = Field(
default=0, description="Number of pending file changes"
)
@@ -411,8 +413,8 @@ class XetSyncStatus(BaseModel):
sync_progress: float = Field(
default=0.0, ge=0.0, le=1.0, description="Sync progress (0.0 to 1.0)"
)
- error: str | None = Field(None, description="Error message if sync failed")
- last_check_time: float | None = Field(
+ error: Optional[str] = Field(None, description="Error message if sync failed")
+ last_check_time: Optional[float] = Field(
None, description="Timestamp of last folder check"
)
@@ -423,11 +425,11 @@ class TorrentInfo(BaseModel):
name: str = Field(..., description="Torrent name")
info_hash: bytes = Field(..., min_length=20, max_length=20, description="Info hash")
announce: str = Field(..., description="Announce URL")
- announce_list: list[list[str]] | None = Field(None, description="Announce list")
- comment: str | None = Field(None, description="Torrent comment")
- created_by: str | None = Field(None, description="Created by")
- creation_date: int | None = Field(None, description="Creation date")
- encoding: str | None = Field(None, description="String encoding")
+ announce_list: Optional[list[list[str]]] = Field(None, description="Announce list")
+ comment: Optional[str] = Field(None, description="Torrent comment")
+ created_by: Optional[str] = Field(None, description="Created by")
+ creation_date: Optional[int] = Field(None, description="Creation date")
+ encoding: Optional[str] = Field(None, description="String encoding")
is_private: bool = Field(
default=False,
description="Whether torrent is marked as private (BEP 27)",
@@ -446,29 +448,29 @@ class TorrentInfo(BaseModel):
meta_version: int = Field(
default=1, description="Protocol version (1=v1, 2=v2, 3=hybrid)"
)
- info_hash_v2: bytes | None = Field(
+ info_hash_v2: Optional[bytes] = Field(
None,
min_length=32,
max_length=32,
description="v2 info hash (SHA-256, 32 bytes)",
)
- info_hash_v1: bytes | None = Field(
+ info_hash_v1: Optional[bytes] = Field(
None,
min_length=20,
max_length=20,
description="v1 info hash (SHA-1, 20 bytes) for hybrid torrents",
)
- file_tree: dict[str, Any] | None = Field(
+ file_tree: Optional[dict[str, Any]] = Field(
None,
description="v2 file tree structure (hierarchical)",
)
- piece_layers: dict[bytes, list[bytes]] | None = Field(
+ piece_layers: Optional[dict[bytes, list[bytes]]] = Field(
None,
description="v2 piece layers (pieces_root -> list of piece hashes)",
)
# Xet protocol metadata
- xet_metadata: XetTorrentMetadata | None = Field(
+ xet_metadata: Optional[XetTorrentMetadata] = Field(
None,
description="Xet protocol metadata for content-defined chunking",
)
@@ -483,7 +485,7 @@ class WebTorrentConfig(BaseModel):
default=False,
description="Enable WebTorrent protocol support",
)
- webtorrent_signaling_url: str | None = Field(
+ webtorrent_signaling_url: Optional[str] = Field(
default=None,
description="WebTorrent signaling server URL (optional, uses built-in server if None)",
)
@@ -661,25 +663,25 @@ class NetworkConfig(BaseModel):
le=65535,
description="Listen port (deprecated: use listen_port_tcp and listen_port_udp)",
)
- listen_port_tcp: int | None = Field(
+ listen_port_tcp: Optional[int] = Field(
default=None,
ge=1024,
le=65535,
description="TCP listen port for incoming peer connections",
)
- listen_port_udp: int | None = Field(
+ listen_port_udp: Optional[int] = Field(
default=None,
ge=1024,
le=65535,
description="UDP listen port for incoming peer connections",
)
- tracker_udp_port: int | None = Field(
+ tracker_udp_port: Optional[int] = Field(
default=None,
ge=1024,
le=65535,
description="UDP port for tracker client communication",
)
- xet_port: int | None = Field(
+ xet_port: Optional[int] = Field(
default=None,
ge=1024,
le=65535,
@@ -695,7 +697,7 @@ class NetworkConfig(BaseModel):
le=65535,
description="XET multicast port",
)
- listen_interface: str | None = Field(
+ listen_interface: Optional[str] = Field(
default="0.0.0.0", # nosec B104 - Default bind address for network services
description="Listen interface",
)
@@ -1501,7 +1503,7 @@ class DiskConfig(BaseModel):
default=True,
description="Dynamically adjust mmap cache size based on available memory",
)
- max_file_size_mb: int | None = Field(
+ max_file_size_mb: Optional[int] = Field(
default=None,
ge=0,
le=1048576, # 1TB max
@@ -1567,7 +1569,7 @@ def validate_max_file_size(cls, v):
le=65536,
description="NVMe queue depth for optimal performance",
)
- download_path: str | None = Field(
+ download_path: Optional[str] = Field(
default=None,
description="Default download path",
)
@@ -1603,11 +1605,11 @@ def validate_max_file_size(cls, v):
default=True,
description="Enable chunk-level deduplication",
)
- xet_cache_db_path: str | None = Field(
+ xet_cache_db_path: Optional[str] = Field(
default=None,
description="Path to Xet deduplication cache database (defaults to download_dir/.xet_cache/chunks.db)",
)
- xet_chunk_store_path: str | None = Field(
+ xet_chunk_store_path: Optional[str] = Field(
default=None,
description="Path to Xet chunk storage directory (defaults to download_dir/.xet_chunks)",
)
@@ -1653,7 +1655,7 @@ def validate_max_file_size(cls, v):
default=CheckpointFormat.BOTH,
description="Checkpoint file format",
)
- checkpoint_dir: str | None = Field(
+ checkpoint_dir: Optional[str] = Field(
None,
description="Checkpoint directory (defaults to download_dir/.ccbt/checkpoints)",
)
@@ -2221,7 +2223,7 @@ class ObservabilityConfig(BaseModel):
"""Observability configuration."""
log_level: LogLevel = Field(default=LogLevel.INFO, description="Log level")
- log_file: str | None = Field(None, description="Log file path")
+ log_file: Optional[str] = Field(None, description="Log file path")
enable_metrics: bool = Field(default=True, description="Enable metrics collection")
metrics_port: int = Field(
default=64125,
@@ -2247,8 +2249,8 @@ class ObservabilityConfig(BaseModel):
le=3600.0,
description="Metrics collection interval in seconds",
)
- trace_file: str | None = Field(default=None, description="Path to write traces")
- alerts_rules_path: str | None = Field(
+ trace_file: Optional[str] = Field(default=None, description="Path to write traces")
+ alerts_rules_path: Optional[str] = Field(
default=".ccbt/alerts.json",
description="Path to alert rules JSON file",
)
@@ -2624,21 +2626,21 @@ class ProxyConfig(BaseModel):
default="http",
description="Proxy type (http/socks4/socks5)",
)
- proxy_host: str | None = Field(
+ proxy_host: Optional[str] = Field(
default=None,
description="Proxy server hostname or IP",
)
- proxy_port: int | None = Field(
+ proxy_port: Optional[int] = Field(
default=None,
ge=0,
le=65535,
description="Proxy server port (0 when disabled, 1-65535 when enabled)",
)
- proxy_username: str | None = Field(
+ proxy_username: Optional[str] = Field(
default=None,
description="Proxy username for authentication",
)
- proxy_password: str | None = Field(
+ proxy_password: Optional[str] = Field(
default=None,
description="Proxy password (encrypted in storage)",
)
@@ -2758,7 +2760,7 @@ class LocalBlacklistSourceConfig(BaseModel):
},
description="Thresholds for automatic blacklisting",
)
- expiration_hours: float | None = Field(
+ expiration_hours: Optional[float] = Field(
default=24.0,
description="Expiration time for auto-blacklisted IPs (hours, None = permanent)",
)
@@ -2794,7 +2796,7 @@ class BlacklistConfig(BaseModel):
default_factory=list,
description="URLs for automatic blacklist updates",
)
- default_expiration_hours: float | None = Field(
+ default_expiration_hours: Optional[float] = Field(
default=None,
description="Default expiration time for auto-blacklisted IPs in hours (None = permanent)",
)
@@ -2874,15 +2876,15 @@ class SSLConfig(BaseModel):
default=True,
description="Verify SSL certificates",
)
- ssl_ca_certificates: str | None = Field(
+ ssl_ca_certificates: Optional[str] = Field(
default=None,
description="Path to CA certificates file or directory",
)
- ssl_client_certificate: str | None = Field(
+ ssl_client_certificate: Optional[str] = Field(
default=None,
description="Path to client certificate file (PEM format)",
)
- ssl_client_key: str | None = Field(
+ ssl_client_key: Optional[str] = Field(
default=None,
description="Path to client private key file (PEM format)",
)
@@ -2979,15 +2981,15 @@ class FileCheckpoint(BaseModel):
size: int = Field(..., ge=0, description="File size in bytes")
exists: bool = Field(default=False, description="Whether file exists on disk")
# BEP 47: File attributes
- attributes: str | None = Field(
+ attributes: Optional[str] = Field(
None,
description="File attributes string (BEP 47, e.g., 'p', 'x', 'h', 'l')",
)
- symlink_path: str | None = Field(
+ symlink_path: Optional[str] = Field(
None,
description="Symlink target path (BEP 47, required when attr='l')",
)
- file_sha1: bytes | None = Field(
+ file_sha1: Optional[bytes] = Field(
None,
description="File SHA-1 hash (BEP 47, 20 bytes if provided)",
)
@@ -3025,7 +3027,7 @@ class TorrentCheckpoint(BaseModel):
default_factory=dict,
description="Piece states by index",
)
- download_stats: DownloadStats | None = Field(
+ download_stats: Optional[DownloadStats] = Field(
default_factory=DownloadStats,
description="Download statistics",
)
@@ -3054,94 +3056,94 @@ def _coerce_download_stats(cls, v):
)
# Optional metadata
- peer_info: dict[str, Any] | None = Field(
+ peer_info: Optional[dict[str, Any]] = Field(
None,
description="Peer availability info",
)
endgame_mode: bool = Field(default=False, description="Whether in endgame mode")
# Torrent source metadata for resume functionality
- torrent_file_path: str | None = Field(
+ torrent_file_path: Optional[str] = Field(
None,
description="Path to original .torrent file",
)
- magnet_uri: str | None = Field(None, description="Original magnet link")
+ magnet_uri: Optional[str] = Field(None, description="Original magnet link")
announce_urls: list[str] = Field(
default_factory=list,
description="Tracker announce URLs",
)
- display_name: str | None = Field(None, description="Torrent display name")
+ display_name: Optional[str] = Field(None, description="Torrent display name")
# Fast resume data (optional)
- resume_data: dict[str, Any] | None = Field(
+ resume_data: Optional[dict[str, Any]] = Field(
None,
description="Fast resume data (serialized FastResumeData)",
)
# File selection state
- file_selections: dict[int, dict[str, Any]] | None = Field(
+ file_selections: Optional[dict[int, dict[str, Any]]] = Field(
None,
description="File selection state: {file_index: {selected: bool, priority: str, bytes_downloaded: int}}",
)
# Per-torrent configuration options
- per_torrent_options: dict[str, Any] | None = Field(
+ per_torrent_options: Optional[dict[str, Any]] = Field(
None,
description="Per-torrent configuration options (piece_selection, streaming_mode, max_peers_per_torrent, etc.)",
)
# Per-torrent rate limits
- rate_limits: dict[str, int] | None = Field(
+ rate_limits: Optional[dict[str, int]] = Field(
None,
description="Per-torrent rate limits: {down_kib: int, up_kib: int}",
)
# Peer lists and state
- connected_peers: list[dict[str, Any]] | None = Field(
+ connected_peers: Optional[list[dict[str, Any]]] = Field(
None,
description="List of connected peers: [{ip, port, peer_id, peer_source, stats}]",
)
- active_peers: list[dict[str, Any]] | None = Field(
+ active_peers: Optional[list[dict[str, Any]]] = Field(
None,
description="List of active peers (subset of connected): [{ip, port, ...}]",
)
- peer_statistics: dict[str, dict[str, Any]] | None = Field(
+ peer_statistics: Optional[dict[str, dict[str, Any]]] = Field(
None,
description="Peer statistics by peer_key: {peer_key: {bytes_downloaded, bytes_uploaded, ...}}",
)
# Tracker lists and state
- tracker_list: list[dict[str, Any]] | None = Field(
+ tracker_list: Optional[list[dict[str, Any]]] = Field(
None,
description="List of trackers: [{url, last_announce, last_success, is_healthy, failure_count}]",
)
- tracker_health: dict[str, dict[str, Any]] | None = Field(
+ tracker_health: Optional[dict[str, dict[str, Any]]] = Field(
None,
description="Tracker health metrics: {url: {last_announce, last_success, failure_count, ...}}",
)
# Security state
- peer_whitelist: list[str] | None = Field(
+ peer_whitelist: Optional[list[str]] = Field(
None,
description="Per-torrent peer whitelist (IP addresses)",
)
- peer_blacklist: list[str] | None = Field(
+ peer_blacklist: Optional[list[str]] = Field(
None,
description="Per-torrent peer blacklist (IP addresses)",
)
# Session state
- session_state: str | None = Field(
+ session_state: Optional[str] = Field(
None,
description="Session state: 'active', 'paused', 'stopped', 'queued', 'seeding'",
)
- session_state_timestamp: float | None = Field(
+ session_state_timestamp: Optional[float] = Field(
None,
description="Timestamp when session state changed",
)
# Event history
- recent_events: list[dict[str, Any]] | None = Field(
+ recent_events: Optional[list[dict[str, Any]]] = Field(
None,
description="Recent events for debugging: [{event_type, timestamp, data}]",
)
@@ -3190,7 +3192,7 @@ class GlobalCheckpoint(BaseModel):
)
# Global limits
- global_rate_limits: dict[str, int] | None = Field(
+ global_rate_limits: Optional[dict[str, int]] = Field(
None,
description="Global rate limits: {down_kib: int, up_kib: int}",
)
@@ -3206,13 +3208,13 @@ class GlobalCheckpoint(BaseModel):
)
# DHT state
- dht_nodes: list[dict[str, Any]] | None = Field(
+ dht_nodes: Optional[list[dict[str, Any]]] = Field(
None,
description="Known DHT nodes: [{ip, port, node_id, last_seen}]",
)
# Global statistics
- global_stats: dict[str, Any] | None = Field(
+ global_stats: Optional[dict[str, Any]] = Field(
None,
description="Global statistics snapshot",
)
@@ -3223,48 +3225,48 @@ class GlobalCheckpoint(BaseModel):
class PerTorrentOptions(BaseModel):
"""Per-torrent configuration options for validation."""
- piece_selection: str | None = Field(
+ piece_selection: Optional[str] = Field(
None,
description="Piece selection strategy: round_robin, rarest_first, sequential",
)
- streaming_mode: bool | None = Field(
+ streaming_mode: Optional[bool] = Field(
None, description="Enable streaming mode for sequential download"
)
- sequential_window_size: int | None = Field(
+ sequential_window_size: Optional[int] = Field(
None,
ge=1,
description="Number of pieces ahead to download in sequential mode",
)
- max_peers_per_torrent: int | None = Field(
+ max_peers_per_torrent: Optional[int] = Field(
None,
ge=0,
description="Maximum peers for this torrent (0 = unlimited)",
)
- enable_tcp: bool | None = Field(None, description="Enable TCP transport")
- enable_utp: bool | None = Field(None, description="Enable uTP transport")
- enable_encryption: bool | None = Field(
+ enable_tcp: Optional[bool] = Field(None, description="Enable TCP transport")
+ enable_utp: Optional[bool] = Field(None, description="Enable uTP transport")
+ enable_encryption: Optional[bool] = Field(
None, description="Enable protocol encryption (BEP 3)"
)
- auto_scrape: bool | None = Field(
+ auto_scrape: Optional[bool] = Field(
None, description="Automatically scrape tracker on torrent add"
)
- enable_nat_mapping: bool | None = Field(
+ enable_nat_mapping: Optional[bool] = Field(
None, description="Enable NAT port mapping for this torrent"
)
- enable_xet: bool | None = Field(
+ enable_xet: Optional[bool] = Field(
None, description="Enable XET folder synchronization for this torrent"
)
- xet_sync_mode: str | None = Field(
+ xet_sync_mode: Optional[str] = Field(
None,
description="XET sync mode for this torrent (designated/best_effort/broadcast/consensus)",
)
- xet_allowlist_path: str | None = Field(
+ xet_allowlist_path: Optional[str] = Field(
None, description="Path to XET allowlist file for this torrent"
)
@field_validator("piece_selection")
@classmethod
- def validate_piece_selection(cls, v: str | None) -> str | None:
+ def validate_piece_selection(cls, v: Optional[str]) -> Optional[str]:
"""Validate piece_selection is a valid strategy."""
if v is None:
return v
@@ -3276,7 +3278,7 @@ def validate_piece_selection(cls, v: str | None) -> str | None:
@field_validator("xet_sync_mode")
@classmethod
- def validate_xet_sync_mode(cls, v: str | None) -> str | None:
+ def validate_xet_sync_mode(cls, v: Optional[str]) -> Optional[str]:
"""Validate xet_sync_mode is a valid mode."""
if v is None:
return v
@@ -3290,38 +3292,42 @@ def validate_xet_sync_mode(cls, v: str | None) -> str | None:
class PerTorrentDefaultsConfig(BaseModel):
"""Default per-torrent configuration options applied to new torrents."""
- piece_selection: str | None = Field(
+ piece_selection: Optional[str] = Field(
None,
description="Default piece selection strategy: round_robin, rarest_first, sequential",
)
- streaming_mode: bool | None = Field(
+ streaming_mode: Optional[bool] = Field(
None, description="Default streaming mode for sequential download"
)
- sequential_window_size: int | None = Field(
+ sequential_window_size: Optional[int] = Field(
None,
ge=1,
description="Default number of pieces ahead to download in sequential mode",
)
- max_peers_per_torrent: int | None = Field(
+ max_peers_per_torrent: Optional[int] = Field(
None,
ge=0,
description="Default maximum peers for torrents (0 = unlimited)",
)
- enable_tcp: bool | None = Field(None, description="Default TCP transport enabled")
- enable_utp: bool | None = Field(None, description="Default uTP transport enabled")
- enable_encryption: bool | None = Field(
+ enable_tcp: Optional[bool] = Field(
+ None, description="Default TCP transport enabled"
+ )
+ enable_utp: Optional[bool] = Field(
+ None, description="Default uTP transport enabled"
+ )
+ enable_encryption: Optional[bool] = Field(
None, description="Default protocol encryption enabled (BEP 3)"
)
- auto_scrape: bool | None = Field(
+ auto_scrape: Optional[bool] = Field(
None, description="Default auto-scrape tracker on torrent add"
)
- enable_nat_mapping: bool | None = Field(
+ enable_nat_mapping: Optional[bool] = Field(
None, description="Default NAT port mapping enabled"
)
@field_validator("piece_selection")
@classmethod
- def validate_piece_selection(cls, v: str | None) -> str | None:
+ def validate_piece_selection(cls, v: Optional[str]) -> Optional[str]:
"""Validate piece_selection is a valid strategy."""
if v is None:
return v
@@ -3371,19 +3377,19 @@ class ScrapeResult(BaseModel):
class DaemonConfig(BaseModel):
"""Daemon configuration."""
- api_key: str | None = Field(
+ api_key: Optional[str] = Field(
default=None,
description="API key for authentication (auto-generated if not set)",
)
- ed25519_public_key: str | None = Field(
+ ed25519_public_key: Optional[str] = Field(
None,
description="Ed25519 public key for cryptographic authentication (hex format)",
)
- ed25519_key_path: str | None = Field(
+ ed25519_key_path: Optional[str] = Field(
None,
description="Path to Ed25519 key storage directory (default: ~/.ccbt/keys)",
)
- tls_certificate_path: str | None = Field(
+ tls_certificate_path: Optional[str] = Field(
None, description="Path to TLS certificate file for HTTPS support"
)
tls_enabled: bool = Field(False, description="Enable TLS/HTTPS for IPC server")
@@ -3403,7 +3409,7 @@ class DaemonConfig(BaseModel):
ge=1.0,
description="Auto-save state interval in seconds",
)
- state_dir: str | None = Field(
+ state_dir: Optional[str] = Field(
None,
description="State directory path (default: ~/.ccbt/daemon)",
)
@@ -3567,7 +3573,7 @@ class XetSyncConfig(BaseModel):
le=10000,
description="Maximum number of queued updates",
)
- allowlist_encryption_key: str | None = Field(
+ allowlist_encryption_key: Optional[str] = Field(
None,
description="Path to allowlist encryption key file",
)
@@ -3636,7 +3642,7 @@ class Config(BaseModel):
default_factory=WebTorrentConfig,
description="WebTorrent protocol configuration",
)
- daemon: DaemonConfig | None = Field(
+ daemon: Optional[DaemonConfig] = Field(
None,
description="Daemon configuration",
)
diff --git a/ccbt/monitoring/__init__.py b/ccbt/monitoring/__init__.py
index 1ab794b..7eb7724 100644
--- a/ccbt/monitoring/__init__.py
+++ b/ccbt/monitoring/__init__.py
@@ -12,6 +12,8 @@
from __future__ import annotations
+from typing import Optional
+
from ccbt.monitoring.alert_manager import AlertManager
from ccbt.monitoring.dashboard import DashboardManager
from ccbt.monitoring.metrics_collector import MetricsCollector
@@ -30,10 +32,10 @@
]
# Global alert manager singleton for CLI/UI integration
-_GLOBAL_ALERT_MANAGER: AlertManager | None = None
+_GLOBAL_ALERT_MANAGER: Optional[AlertManager] = None
# Global metrics collector singleton for CLI/UI integration
-_GLOBAL_METRICS_COLLECTOR: MetricsCollector | None = None
+_GLOBAL_METRICS_COLLECTOR: Optional[MetricsCollector] = None
def get_alert_manager() -> AlertManager:
@@ -61,7 +63,7 @@ def get_metrics_collector() -> MetricsCollector:
return _GLOBAL_METRICS_COLLECTOR
-async def init_metrics() -> MetricsCollector | None:
+async def init_metrics() -> Optional[MetricsCollector]:
"""Initialize and start metrics collection if enabled in configuration.
This function:
@@ -72,7 +74,7 @@ async def init_metrics() -> MetricsCollector | None:
- Handles errors gracefully (logs warnings, doesn't raise)
Returns:
- MetricsCollector | None: MetricsCollector instance if enabled and started,
+ Optional[MetricsCollector]: MetricsCollector instance if enabled and started,
None if metrics are disabled or initialization failed.
Example:
diff --git a/ccbt/monitoring/alert_manager.py b/ccbt/monitoring/alert_manager.py
index dfa665c..a4fd6bb 100644
--- a/ccbt/monitoring/alert_manager.py
+++ b/ccbt/monitoring/alert_manager.py
@@ -21,7 +21,7 @@
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from enum import Enum
-from typing import TYPE_CHECKING, Any, Callable
+from typing import TYPE_CHECKING, Any, Callable, Optional
from ccbt.utils.events import Event, EventType, emit_event
from ccbt.utils.logging_config import get_logger
@@ -66,7 +66,7 @@ class Alert:
description: str
timestamp: float
resolved: bool = False
- resolved_timestamp: float | None = None
+ resolved_timestamp: Optional[float] = None
metadata: dict[str, Any] = field(default_factory=dict)
@@ -247,7 +247,7 @@ async def process_alert(
self,
metric_name: str,
value: Any,
- timestamp: float | None = None,
+ timestamp: Optional[float] = None,
) -> None:
"""Process an alert for a metric."""
if timestamp is None:
@@ -269,7 +269,7 @@ async def process_alert(
async def resolve_alert(
self,
alert_id: str,
- timestamp: float | None = None,
+ timestamp: Optional[float] = None,
) -> bool:
"""Resolve an alert."""
if timestamp is None:
@@ -308,7 +308,7 @@ async def resolve_alert(
async def resolve_alerts_for_metric(
self,
metric_name: str,
- timestamp: float | None = None,
+ timestamp: Optional[float] = None,
) -> int:
"""Resolve all alerts for a specific metric."""
if timestamp is None:
diff --git a/ccbt/monitoring/dashboard.py b/ccbt/monitoring/dashboard.py
index 021f095..74afdec 100644
--- a/ccbt/monitoring/dashboard.py
+++ b/ccbt/monitoring/dashboard.py
@@ -19,7 +19,7 @@
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Callable
+from typing import TYPE_CHECKING, Any, Callable, Optional
from ccbt.i18n import _
from ccbt.utils.events import Event, EventType, emit_event
@@ -167,7 +167,7 @@ def create_dashboard(
name: str,
dashboard_type: DashboardType,
description: str = "",
- widgets: list[Widget] | None = None,
+ widgets: Optional[list[Widget]] = None,
) -> str:
"""Create a new dashboard."""
dashboard_id = f"dashboard_{int(time.time())}"
@@ -297,7 +297,7 @@ def update_widget(
return False
- def get_dashboard(self, dashboard_id: str) -> Dashboard | None:
+ def get_dashboard(self, dashboard_id: str) -> Optional[Dashboard]:
"""Get dashboard by ID."""
return self.dashboards.get(dashboard_id)
@@ -305,7 +305,7 @@ def get_all_dashboards(self) -> dict[str, Dashboard]:
"""Get all dashboards."""
return self.dashboards.copy()
- def get_dashboard_data(self, dashboard_id: str) -> DashboardData | None:
+ def get_dashboard_data(self, dashboard_id: str) -> Optional[DashboardData]:
"""Get dashboard data."""
return self.dashboard_data.get(dashboard_id)
@@ -517,7 +517,7 @@ def _initialize_templates(self) -> None:
)
self.templates[DashboardType.SECURITY] = security_dashboard
- def _widget_to_grafana_panel(self, widget: Widget) -> dict[str, Any] | None:
+ def _widget_to_grafana_panel(self, widget: Widget) -> Optional[dict[str, Any]]:
"""Convert widget to Grafana panel."""
if widget.type == WidgetType.METRIC:
return {
@@ -614,7 +614,7 @@ async def add_torrent_file(
self,
session: AsyncSessionManager,
file_path: str,
- _output_dir: str | None = None,
+ _output_dir: Optional[str] = None,
resume: bool = False,
download_limit: int = 0,
upload_limit: int = 0,
@@ -676,7 +676,7 @@ async def add_torrent_magnet(
self,
session: AsyncSessionManager,
magnet_uri: str,
- _output_dir: str | None = None,
+ _output_dir: Optional[str] = None,
resume: bool = False,
download_limit: int = 0,
upload_limit: int = 0,
diff --git a/ccbt/monitoring/metrics_collector.py b/ccbt/monitoring/metrics_collector.py
index 7f0e4fc..bbb668a 100644
--- a/ccbt/monitoring/metrics_collector.py
+++ b/ccbt/monitoring/metrics_collector.py
@@ -1,7 +1,5 @@
"""Advanced Metrics Collector for ccBitTorrent.
-from __future__ import annotations
-
Provides comprehensive metrics collection including:
- Custom metrics with labels
- Metric aggregation and rollup
@@ -19,7 +17,7 @@
from collections import deque
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any, Callable, TypedDict
+from typing import Any, Callable, Optional, TypedDict, Union
import psutil
@@ -73,7 +71,7 @@ class MetricLabel:
class MetricValue:
"""Metric value with timestamp."""
- value: int | float | str
+ value: Union[int, float, str]
timestamp: float
labels: list[MetricLabel] = field(default_factory=list)
@@ -198,16 +196,16 @@ def __init__(self):
}
# Session reference for accessing DHT, queue, disk I/O, and tracker services
- self._session: Any | None = None
+ self._session: Optional[Any] = None
# Collection interval
self.collection_interval = 5.0 # seconds
- self.collection_task: asyncio.Task | None = None
+ self.collection_task: Optional[asyncio.Task] = None
self.running = False
# HTTP server for Prometheus endpoint (if enabled)
- self._http_server: Any | None = None
- self._http_server_thread: Any | None = None
+ self._http_server: Optional[Any] = None
+ self._http_server_thread: Optional[Any] = None
# Statistics
self.stats = {
@@ -270,7 +268,7 @@ def register_metric(
name: str,
metric_type: MetricType,
description: str,
- labels: list[MetricLabel] | None = None,
+ labels: Optional[list[MetricLabel]] = None,
aggregation: AggregationType = AggregationType.SUM,
retention_seconds: int = 3600,
) -> None:
@@ -287,8 +285,8 @@ def register_metric(
def record_metric(
self,
name: str,
- value: float | str,
- labels: list[MetricLabel] | None = None,
+ value: Union[float, str],
+ labels: Optional[list[MetricLabel]] = None,
) -> None:
"""Record a metric value."""
if name not in self.metrics:
@@ -320,7 +318,7 @@ def record_metric(
am = get_alert_manager()
# Only attempt numeric evaluation for shared rules
- v_any: float | str = value
+ v_any: Union[float, str] = value
if isinstance(value, str):
# simple numeric parse; ignore parse errors
with contextlib.suppress(Exception): # pragma: no cover
@@ -349,7 +347,7 @@ def increment_counter(
self,
name: str,
value: int = 1,
- labels: list[MetricLabel] | None = None,
+ labels: Optional[list[MetricLabel]] = None,
) -> None:
"""Increment a counter metric."""
if name not in self.metrics: # pragma: no cover
@@ -370,7 +368,7 @@ def set_gauge(
self,
name: str,
value: float,
- labels: list[MetricLabel] | None = None,
+ labels: Optional[list[MetricLabel]] = None,
) -> None:
"""Set a gauge metric value."""
if name not in self.metrics:
@@ -382,7 +380,7 @@ def record_histogram(
self,
name: str,
value: float,
- labels: list[MetricLabel] | None = None,
+ labels: Optional[list[MetricLabel]] = None,
) -> None:
"""Record a histogram value."""
if name not in self.metrics:
@@ -409,15 +407,15 @@ def add_alert_rule(
cooldown_seconds=cooldown_seconds,
)
- def get_metric(self, name: str) -> Metric | None:
+ def get_metric(self, name: str) -> Optional[Metric]:
"""Get a metric by name."""
return self.metrics.get(name) # pragma: no cover
def get_metric_value(
self,
name: str,
- aggregation: AggregationType | None = None,
- ) -> int | float | str | None:
+ aggregation: Optional[AggregationType] = None,
+ ) -> Optional[Union[int, float, str]]:
"""Get aggregated metric value."""
if name not in self.metrics: # pragma: no cover
return None
@@ -891,7 +889,9 @@ async def record_connection_success(self, peer_key: str) -> None:
self._connection_successes.get(peer_key, 0) + 1
)
- async def get_connection_success_rate(self, peer_key: str | None = None) -> float:
+ async def get_connection_success_rate(
+ self, peer_key: Optional[str] = None
+ ) -> float:
"""Get connection success rate for a peer or globally.
Args:
@@ -1274,7 +1274,7 @@ async def _collect_custom_metrics(self) -> None:
),
)
- def _check_alert_rules(self, metric_name: str, value: float | str) -> None:
+ def _check_alert_rules(self, metric_name: str, value: Union[float, str]) -> None:
"""Check alert rules for a metric."""
for rule_name, rule in self.alert_rules.items():
if rule.metric_name != metric_name or not rule.enabled:
@@ -1328,7 +1328,7 @@ def _check_alert_rules(self, metric_name: str, value: float | str) -> None:
lambda _t: None
) # Discard task reference # pragma: no cover
- def _evaluate_condition(self, condition: str, value: float | str) -> bool:
+ def _evaluate_condition(self, condition: str, value: Union[float, str]) -> bool:
"""Evaluate alert condition safely."""
try:
# Replace 'value' with actual value
diff --git a/ccbt/monitoring/tracing.py b/ccbt/monitoring/tracing.py
index 3c64be3..8d91a79 100644
--- a/ccbt/monitoring/tracing.py
+++ b/ccbt/monitoring/tracing.py
@@ -21,7 +21,7 @@
from collections import deque
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from typing_extensions import Self
@@ -56,12 +56,12 @@ class Span:
trace_id: str
span_id: str
- parent_span_id: str | None
+ parent_span_id: Optional[str]
name: str
kind: SpanKind
start_time: float
- end_time: float | None = None
- duration: float | None = None
+ end_time: Optional[float] = None
+ duration: Optional[float] = None
status: SpanStatus = SpanStatus.OK
attributes: dict[str, Any] = field(default_factory=dict)
events: list[dict[str, Any]] = field(default_factory=list)
@@ -75,10 +75,10 @@ class Trace:
trace_id: str
spans: list[Span] = field(default_factory=list)
- start_time: float | None = None
- end_time: float | None = None
+ start_time: Optional[float] = None
+ end_time: Optional[float] = None
duration: float = 0.0
- root_span: Span | None = None
+ root_span: Optional[Span] = None
class TracingManager:
@@ -89,7 +89,7 @@ def __init__(self):
self.active_spans: dict[str, Span] = {}
self.completed_spans: deque = deque(maxlen=10000)
self.traces: dict[str, Trace] = {}
- self.trace_context: contextvars.ContextVar[dict[str, str] | None] = (
+ self.trace_context: contextvars.ContextVar[Optional[dict[str, str]]] = (
contextvars.ContextVar("trace_context", default=None)
)
@@ -115,8 +115,8 @@ def start_span(
self,
name: str,
kind: SpanKind = SpanKind.INTERNAL,
- parent_span_id: str | None = None,
- attributes: dict[str, Any] | None = None,
+ parent_span_id: Optional[str] = None,
+ attributes: Optional[dict[str, Any]] = None,
) -> str:
"""Start a new span."""
# Generate trace ID if not in context
@@ -168,8 +168,8 @@ def end_span(
self,
span_id: str,
status: SpanStatus = SpanStatus.OK,
- attributes: dict[str, Any] | None = None,
- ) -> Span | None:
+ attributes: Optional[dict[str, Any]] = None,
+ ) -> Optional[Span]:
"""End a span."""
if span_id not in self.active_spans:
return None
@@ -217,7 +217,7 @@ def add_span_event(
self,
span_id: str,
name: str,
- attributes: dict[str, Any] | None = None,
+ attributes: Optional[dict[str, Any]] = None,
) -> None:
"""Add an event to a span."""
if span_id not in self.active_spans:
@@ -239,14 +239,14 @@ def add_span_attribute(self, span_id: str, key: str, value: Any) -> None:
span = self.active_spans[span_id]
span.attributes[key] = value
- def get_active_span(self) -> Span | None:
+ def get_active_span(self) -> Optional[Span]:
"""Get the current active span."""
span_id = self._get_current_span_id()
if span_id and span_id in self.active_spans:
return self.active_spans[span_id]
return None
- def get_trace(self, trace_id: str) -> Trace | None:
+ def get_trace(self, trace_id: str) -> Optional[Trace]:
"""Get a complete trace."""
return self.traces.get(trace_id)
@@ -336,14 +336,14 @@ def _get_or_create_trace_id(self) -> str:
return trace_id
- def _get_current_span_id(self) -> str | None:
+ def _get_current_span_id(self) -> Optional[str]:
"""Get current span ID from context."""
context = self.trace_context.get()
if context is None:
return None
return context.get("span_id")
- def _update_trace_context(self, trace_id: str, span_id: str | None) -> None:
+ def _update_trace_context(self, trace_id: str, span_id: Optional[str]) -> None:
"""Update trace context."""
context = {
"trace_id": trace_id,
@@ -429,14 +429,14 @@ def __init__(
tracing_manager: TracingManager,
name: str,
kind: SpanKind = SpanKind.INTERNAL,
- attributes: dict[str, Any] | None = None,
+ attributes: Optional[dict[str, Any]] = None,
):
"""Initialize trace context."""
self.tracing_manager = tracing_manager
self.name = name
self.kind = kind
self.attributes = attributes
- self.span_id: str | None = None
+ self.span_id: Optional[str] = None
def __enter__(self) -> Self:
"""Enter the span context manager."""
@@ -466,7 +466,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
# End span
self.tracing_manager.end_span(self.span_id, status)
- def add_event(self, name: str, attributes: dict[str, Any] | None = None) -> None:
+ def add_event(self, name: str, attributes: Optional[dict[str, Any]] = None) -> None:
"""Add event to current span."""
if self.span_id:
self.tracing_manager.add_span_event(self.span_id, name, attributes)
@@ -477,7 +477,7 @@ def add_attribute(self, key: str, value: Any) -> None:
self.tracing_manager.add_span_attribute(self.span_id, key, value)
-def trace_function(tracing_manager: TracingManager, name: str | None = None):
+def trace_function(tracing_manager: TracingManager, name: Optional[str] = None):
"""Provide decorator for tracing functions."""
def decorator(func):
@@ -492,7 +492,7 @@ def wrapper(*args, **kwargs):
return decorator
-def trace_async_function(tracing_manager: TracingManager, name: str | None = None):
+def trace_async_function(tracing_manager: TracingManager, name: Optional[str] = None):
"""Provide decorator for tracing async functions."""
def decorator(func):
diff --git a/ccbt/nat/manager.py b/ccbt/nat/manager.py
index 959f611..9a0e68d 100644
--- a/ccbt/nat/manager.py
+++ b/ccbt/nat/manager.py
@@ -5,7 +5,7 @@
import asyncio
import contextlib
import logging
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional, Tuple
from ccbt.nat.exceptions import NATPMPError, UPnPError
from ccbt.nat.natpmp import NATPMPClient
@@ -31,16 +31,16 @@ def __init__(self, config) -> None:
self.config = config
self.logger = logging.getLogger(__name__)
- self.natpmp_client: NATPMPClient | None = None
- self.upnp_client: UPnPClient | None = None
+ self.natpmp_client: Optional[NATPMPClient] = None
+ self.upnp_client: Optional[UPnPClient] = None
# Pass renewal callback to port mapping manager
self.port_mapping_manager = PortMappingManager(
renewal_callback=self._renew_mapping_callback
)
- self.active_protocol: str | None = None # "natpmp" or "upnp"
- self.external_ip: ipaddress.IPv4Address | None = None
- self._discovery_task: asyncio.Task | None = None
+ self.active_protocol: Optional[str] = None # "natpmp" or "upnp"
+ self.external_ip: Optional[ipaddress.IPv4Address] = None
+ self._discovery_task: Optional[asyncio.Task] = None
self._discovery_attempted: bool = False # Track if discovery has been attempted
async def discover(self, force: bool = False) -> bool:
@@ -222,7 +222,7 @@ async def map_port(
internal_port: int,
external_port: int = 0,
protocol: str = "tcp",
- ) -> PortMapping | None:
+ ) -> Optional[PortMapping]:
"""Map a port using the active protocol with retry logic.
Args:
@@ -484,7 +484,7 @@ async def map_port(
)
return None
- async def renew_mapping(self, mapping: PortMapping) -> tuple[bool, int | None]:
+ async def renew_mapping(self, mapping: PortMapping) -> Tuple[bool, Optional[int]]:
"""Renew a port mapping.
Renewal requests are identical to initial mapping requests per RFC 6886.
@@ -495,7 +495,7 @@ async def renew_mapping(self, mapping: PortMapping) -> tuple[bool, int | None]:
mapping: Port mapping to renew
Returns:
- Tuple of (success: bool, new_lifetime: int | None)
+ Tuple of (success: bool, new_lifetime: Optional[int])
new_lifetime is None if renewal failed or if mapping is permanent
"""
@@ -602,7 +602,7 @@ async def renew_mapping(self, mapping: PortMapping) -> tuple[bool, int | None]:
async def _renew_mapping_callback(
self, mapping: PortMapping
- ) -> tuple[bool, int | None]:
+ ) -> Tuple[bool, Optional[int]]:
"""Handle port mapping renewal callback.
This is passed to PortMappingManager to enable renewal.
@@ -611,7 +611,7 @@ async def _renew_mapping_callback(
mapping: Port mapping to renew
Returns:
- Tuple of (success: bool, new_lifetime: int | None)
+ Tuple of (success: bool, new_lifetime: Optional[int])
"""
return await self.renew_mapping(mapping)
@@ -1230,7 +1230,7 @@ async def stop(self) -> None:
self.logger.info("NAT manager stopped")
- async def get_external_ip(self) -> ipaddress.IPv4Address | None:
+ async def get_external_ip(self) -> Optional[ipaddress.IPv4Address]:
"""Get external IP address.
Returns:
@@ -1262,7 +1262,7 @@ async def get_external_ip(self) -> ipaddress.IPv4Address | None:
async def get_external_port(
self, internal_port: int, protocol: str = "tcp"
- ) -> int | None:
+ ) -> Optional[int]:
"""Get external port for a given internal port and protocol.
This method queries the port mapping manager to find the external port
diff --git a/ccbt/nat/natpmp.py b/ccbt/nat/natpmp.py
index 4c8be3c..50ac315 100644
--- a/ccbt/nat/natpmp.py
+++ b/ccbt/nat/natpmp.py
@@ -9,6 +9,7 @@
import struct
from dataclasses import dataclass
from enum import IntEnum
+from typing import Optional
from ccbt.nat.exceptions import NATPMPError
@@ -53,7 +54,7 @@ class NATPMPPortMapping:
# Gateway discovery functions
-async def discover_gateway() -> ipaddress.IPv4Address | None:
+async def discover_gateway() -> Optional[ipaddress.IPv4Address]:
"""Discover the NAT gateway using the default gateway method.
RFC 6886 section 3.3: Gateway is typically the default route gateway.
@@ -70,7 +71,7 @@ async def discover_gateway() -> ipaddress.IPv4Address | None:
return None
-async def get_gateway_ip() -> ipaddress.IPv4Address | None:
+async def get_gateway_ip() -> Optional[ipaddress.IPv4Address]:
"""Get gateway IP using platform-specific methods."""
import platform
@@ -290,7 +291,7 @@ class NATPMPClient:
def __init__(
self,
- gateway_ip: ipaddress.IPv4Address | None = None,
+ gateway_ip: Optional[ipaddress.IPv4Address] = None,
timeout: float = NAT_PMP_REQUEST_TIMEOUT,
):
"""Initialize NAT-PMP client.
@@ -303,8 +304,8 @@ def __init__(
self.gateway_ip = gateway_ip
self.timeout = timeout
self.logger = logging.getLogger(__name__)
- self._socket: socket.socket | None = None
- self._external_ip: ipaddress.IPv4Address | None = None
+ self._socket: Optional[socket.socket] = None
+ self._external_ip: Optional[ipaddress.IPv4Address] = None
self._last_epoch_time: int = 0
async def _ensure_socket(self) -> socket.socket:
diff --git a/ccbt/nat/port_mapping.py b/ccbt/nat/port_mapping.py
index edd3379..f2f9707 100644
--- a/ccbt/nat/port_mapping.py
+++ b/ccbt/nat/port_mapping.py
@@ -7,11 +7,12 @@
import time
from collections.abc import Awaitable, Callable
from dataclasses import dataclass, field
+from typing import Optional, Tuple
logger = logging.getLogger(__name__)
# Type alias for renewal callback (using string for forward reference)
-RenewalCallback = Callable[["PortMapping"], Awaitable[tuple[bool, int | None]]]
+RenewalCallback = Callable[["PortMapping"], Awaitable[Tuple[bool, Optional[int]]]]
@dataclass
@@ -23,19 +24,19 @@ class PortMapping:
protocol: str # "tcp" or "udp"
protocol_source: str # "natpmp" or "upnp"
created_at: float = field(default_factory=time.time)
- expires_at: float | None = None
- renewal_task: asyncio.Task | None = None
+ expires_at: Optional[float] = None
+ renewal_task: Optional[asyncio.Task] = None
class PortMappingManager:
"""Manages active port mappings and renewal."""
- def __init__(self, renewal_callback: RenewalCallback | None = None) -> None:
+ def __init__(self, renewal_callback: Optional[RenewalCallback] = None) -> None:
"""Initialize port mapping manager.
Args:
renewal_callback: Optional async callback for renewing mappings.
- Signature: async (mapping: PortMapping) -> tuple[bool, int | None]
+ Signature: async (mapping: PortMapping) -> tuple[bool, Optional[int]]
Returns (success, new_lifetime)
"""
@@ -54,7 +55,7 @@ async def add_mapping(
external_port: int,
protocol: str,
protocol_source: str,
- lifetime: int | None = None,
+ lifetime: Optional[int] = None,
) -> PortMapping:
"""Add port mapping and schedule renewal.
@@ -166,7 +167,7 @@ async def _renew_mapping(self, mapping: PortMapping, lifetime: int) -> None:
return
success = False
- new_lifetime: int | None = None
+ new_lifetime: Optional[int] = None
for attempt in range(max_retries):
try:
@@ -292,7 +293,7 @@ async def get_all_mappings(self) -> list[PortMapping]:
async def get_mapping(
self, protocol: str, external_port: int
- ) -> PortMapping | None:
+ ) -> Optional[PortMapping]:
"""Get a specific mapping.
Args:
diff --git a/ccbt/nat/upnp.py b/ccbt/nat/upnp.py
index d2a3ce5..8e4c6e6 100644
--- a/ccbt/nat/upnp.py
+++ b/ccbt/nat/upnp.py
@@ -7,6 +7,7 @@
import logging
import socket
import warnings
+from typing import Optional
from urllib.parse import urljoin
try:
@@ -45,7 +46,7 @@
UPNP_IGD_DEVICE_TYPE = "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
-def build_msearch_request(search_target: str | None = None) -> bytes:
+def build_msearch_request(search_target: Optional[str] = None) -> bytes:
"""Build SSDP M-SEARCH request (UPnP Device Architecture 1.1).
Args:
@@ -432,8 +433,8 @@ async def fetch_device_description(location_url: str) -> dict[str, str]:
# Improved error handling with retries for device description fetching
max_retries = 2
- last_error: Exception | None = None
- xml_content: str | None = None
+ last_error: Optional[Exception] = None
+ xml_content: Optional[str] = None
for attempt in range(max_retries):
try:
@@ -752,7 +753,7 @@ async def send_soap_action(
class UPnPClient:
"""Async UPnP IGD client."""
- def __init__(self, device_url: str | None = None):
+ def __init__(self, device_url: Optional[str] = None):
"""Initialize UPnP client.
Args:
@@ -760,7 +761,7 @@ def __init__(self, device_url: str | None = None):
"""
self.device_url = device_url
- self.control_url: str | None = None
+ self.control_url: Optional[str] = None
self.service_type: str = UPNP_IGD_SERVICE_TYPE
self.logger = logging.getLogger(__name__)
diff --git a/ccbt/observability/profiler.py b/ccbt/observability/profiler.py
index dbd4f65..ece641d 100644
--- a/ccbt/observability/profiler.py
+++ b/ccbt/observability/profiler.py
@@ -22,7 +22,7 @@
from collections import defaultdict, deque
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -133,7 +133,7 @@ def start_profile(
function_name: str,
module_name: str = "",
profile_type: ProfileType = ProfileType.FUNCTION,
- metadata: dict[str, Any] | None = None,
+ metadata: Optional[dict[str, Any]] = None,
) -> str:
"""Start profiling a function."""
if not self.enabled:
@@ -162,7 +162,7 @@ def start_profile(
return profile_id
- def end_profile(self, profile_id: str) -> ProfileEntry | None:
+ def end_profile(self, profile_id: str) -> Optional[ProfileEntry]:
"""End profiling a function."""
if profile_id not in self.active_profiles:
return None
@@ -206,8 +206,8 @@ def end_profile(self, profile_id: str) -> ProfileEntry | None:
def profile_function(
self,
- function_name: str | None = None,
- module_name: str | None = None,
+ function_name: Optional[str] = None,
+ module_name: Optional[str] = None,
profile_type: ProfileType = ProfileType.FUNCTION,
):
"""Provide decorator for profiling functions."""
@@ -231,8 +231,8 @@ def wrapper(*args, **kwargs):
def profile_async_function(
self,
- function_name: str | None = None,
- module_name: str | None = None,
+ function_name: Optional[str] = None,
+ module_name: Optional[str] = None,
profile_type: ProfileType = ProfileType.ASYNC,
):
"""Provide decorator for profiling async functions."""
diff --git a/ccbt/peer/async_peer_connection.py b/ccbt/peer/async_peer_connection.py
index 54c2d28..3475784 100644
--- a/ccbt/peer/async_peer_connection.py
+++ b/ccbt/peer/async_peer_connection.py
@@ -15,7 +15,7 @@
from dataclasses import dataclass, field
from enum import Enum
from heapq import heappop, heappush
-from typing import TYPE_CHECKING, Any, Callable, Iterable
+from typing import TYPE_CHECKING, Any, Callable, Iterable, Optional, Union
if TYPE_CHECKING: # pragma: no cover - type checking only, not executed at runtime
from ccbt.security.encrypted_stream import (
@@ -128,8 +128,8 @@ class AsyncPeerConnection:
peer_info: PeerInfo
torrent_data: dict[str, Any]
- reader: asyncio.StreamReader | EncryptedStreamReader | None = None
- writer: asyncio.StreamWriter | EncryptedStreamWriter | None = None
+ reader: Optional[Union[asyncio.StreamReader, EncryptedStreamReader]] = None
+ writer: Optional[Union[asyncio.StreamWriter, EncryptedStreamWriter]] = None
state: ConnectionState = ConnectionState.DISCONNECTED
peer_state: PeerState = field(default_factory=PeerState)
message_decoder: MessageDecoder = field(default_factory=MessageDecoder)
@@ -141,7 +141,7 @@ class AsyncPeerConnection:
)
request_queue: deque = field(default_factory=deque)
max_pipeline_depth: int = 16
- _priority_queue: list[tuple[float, float, RequestInfo]] | None = (
+ _priority_queue: Optional[list[tuple[float, float, RequestInfo]]] = (
None # (priority, timestamp, request)
)
@@ -152,15 +152,15 @@ class AsyncPeerConnection:
peer_interested: bool = False
# Connection management
- connection_task: asyncio.Task | None = None
- error_message: str | None = None
+ connection_task: Optional[asyncio.Task] = None
+ error_message: Optional[str] = None
# Encryption support
is_encrypted: bool = False
encryption_cipher: Any = None # CipherSuite instance from MSE handshake
# Reserved bytes from handshake (for extension support detection)
- reserved_bytes: bytes | None = None
+ reserved_bytes: Optional[bytes] = None
# Per-peer rate limiting (upload throttling)
per_peer_upload_limit_kib: int = 0 # KiB/s, 0 = unlimited
@@ -172,23 +172,25 @@ class AsyncPeerConnection:
_quality_probation_started: float = 0.0
# Connection pool support
- _pooled_connection: Any | None = None # Pooled connection from connection pool
- _pooled_connection_key: str | None = None # Key for connection pool lookup
+ _pooled_connection: Optional[Any] = None # Pooled connection from connection pool
+ _pooled_connection_key: Optional[str] = None # Key for connection pool lookup
# Connection timing and status
- connection_start_time: float | None = (
+ connection_start_time: Optional[float] = (
None # Timestamp when connection was established
)
is_seeder: bool = False # Whether peer is a seeder (has all pieces)
completion_percent: float = 0.0 # Peer's completion percentage (0.0-1.0)
# Callback functions (set by connection manager)
- on_peer_connected: Callable[[AsyncPeerConnection], None] | None = None
- on_peer_disconnected: Callable[[AsyncPeerConnection], None] | None = None
- on_bitfield_received: (
- Callable[[AsyncPeerConnection, BitfieldMessage], None] | None
- ) = None
- on_piece_received: Callable[[AsyncPeerConnection, PieceMessage], None] | None = None
+ on_peer_connected: Optional[Callable[[AsyncPeerConnection], None]] = None
+ on_peer_disconnected: Optional[Callable[[AsyncPeerConnection], None]] = None
+ on_bitfield_received: Optional[
+ Callable[[AsyncPeerConnection, BitfieldMessage], None]
+ ] = None
+ on_piece_received: Optional[Callable[[AsyncPeerConnection, PieceMessage], None]] = (
+ None
+ )
def __str__(self):
"""Return string representation of the connection."""
@@ -293,7 +295,7 @@ def quality_probation_started(self, value: float) -> None:
self._quality_probation_started = value
@property
- def pooled_connection(self) -> Any | None:
+ def pooled_connection(self) -> Optional[Any]:
"""Get pooled connection if available.
Returns:
@@ -303,7 +305,7 @@ def pooled_connection(self) -> Any | None:
return self._pooled_connection
@pooled_connection.setter
- def pooled_connection(self, value: Any | None) -> None:
+ def pooled_connection(self, value: Optional[Any]) -> None:
"""Set pooled connection.
Args:
@@ -313,7 +315,7 @@ def pooled_connection(self, value: Any | None) -> None:
self._pooled_connection = value
@property
- def pooled_connection_key(self) -> str | None:
+ def pooled_connection_key(self) -> Optional[str]:
"""Get pooled connection key if available.
Returns:
@@ -323,7 +325,7 @@ def pooled_connection_key(self) -> str | None:
return self._pooled_connection_key
@pooled_connection_key.setter
- def pooled_connection_key(self, value: str | None) -> None:
+ def pooled_connection_key(self, value: Optional[str]) -> None:
"""Set pooled connection key.
Args:
@@ -487,9 +489,9 @@ def __init__(
self,
torrent_data: dict[str, Any],
piece_manager: Any,
- peer_id: bytes | None = None,
+ peer_id: Optional[bytes] = None,
key_manager: Any = None, # Ed25519KeyManager
- max_peers_per_torrent: int | None = None,
+ max_peers_per_torrent: Optional[int] = None,
):
"""Initialize async peer connection manager.
@@ -584,7 +586,7 @@ def __init__(
)
# Adaptive timeout calculator (lazy initialization)
- self._timeout_calculator: Any | None = None
+ self._timeout_calculator: Optional[Any] = None
# Failed peer tracking with exponential backoff
# CRITICAL FIX: Track failure count for exponential backoff instead of just timestamp
@@ -613,7 +615,7 @@ def __init__(
str, dict[str, Any]
] = {} # peer_key -> peer_data
self._tracker_retry_lock = asyncio.Lock()
- self._tracker_retry_task: asyncio.Task | None = None
+ self._tracker_retry_task: Optional[asyncio.Task] = None
# CRITICAL FIX: Global connection limiter for Windows to prevent WinError 121 and WinError 10055
# Windows has strict limits on socket buffers and OS-level TCP connection semaphores
@@ -651,14 +653,14 @@ def __init__(
# Choking management
self.upload_slots: list[AsyncPeerConnection] = []
- self.optimistic_unchoke: AsyncPeerConnection | None = None
+ self.optimistic_unchoke: Optional[AsyncPeerConnection] = None
self.optimistic_unchoke_time: float = 0.0
# Background tasks
- self._choking_task: asyncio.Task | None = None
- self._stats_task: asyncio.Task | None = None
- self._reconnection_task: asyncio.Task | None = None
- self._peer_evaluation_task: asyncio.Task | None = None
+ self._choking_task: Optional[asyncio.Task] = None
+ self._stats_task: Optional[asyncio.Task] = None
+ self._reconnection_task: Optional[asyncio.Task] = None
+ self._peer_evaluation_task: Optional[asyncio.Task] = None
# Running state flag for idempotency
self._running: bool = False
@@ -670,19 +672,19 @@ def __init__(
self._piece_selection_debounce_lock = asyncio.Lock()
# Callbacks
- self._on_peer_connected: Callable[[AsyncPeerConnection], None] | None = None
- self._external_peer_disconnected: (
- Callable[[AsyncPeerConnection], None] | None
- ) = None
- self._on_peer_disconnected: Callable[[AsyncPeerConnection], None] | None = (
+ self._on_peer_connected: Optional[Callable[[AsyncPeerConnection], None]] = None
+ self._external_peer_disconnected: Optional[
+ Callable[[AsyncPeerConnection], None]
+ ] = None
+ self._on_peer_disconnected: Optional[Callable[[AsyncPeerConnection], None]] = (
self._peer_disconnected_wrapper
)
- self._on_bitfield_received: (
- Callable[[AsyncPeerConnection, BitfieldMessage], None] | None
- ) = None
- self._on_piece_received: (
- Callable[[AsyncPeerConnection, PieceMessage], None] | None
- ) = None
+ self._on_bitfield_received: Optional[
+ Callable[[AsyncPeerConnection, BitfieldMessage], None]
+ ] = None
+ self._on_piece_received: Optional[
+ Callable[[AsyncPeerConnection, PieceMessage], None]
+ ] = None
# Message handlers
self.message_handlers: dict[
@@ -716,14 +718,14 @@ def __init__(
)
# Security manager and privacy flags (set via public setters)
- self._security_manager: Any | None = None
+ self._security_manager: Optional[Any] = None
self._is_private: bool = False
# Event bus (optional, set externally if needed)
- self._event_bus: Any | None = None # EventBus | None
- self.event_bus: Any | None = None # EventBus | None
+ self._event_bus: Optional[Any] = None # Optional[EventBus]
+ self.event_bus: Optional[Any] = None # Optional[EventBus]
- def set_security_manager(self, security_manager: Any | None) -> None:
+ def set_security_manager(self, security_manager: Optional[Any]) -> None:
"""Set the security manager for peer validation.
Args:
@@ -761,13 +763,13 @@ async def _propagate_callbacks_to_connections(self) -> None:
@property
def on_piece_received(
self,
- ) -> Callable[[AsyncPeerConnection, PieceMessage], None] | None:
+ ) -> Optional[Callable[[AsyncPeerConnection, PieceMessage], None]]:
"""Get the on_piece_received callback."""
return self._on_piece_received
@on_piece_received.setter
def on_piece_received(
- self, value: Callable[[AsyncPeerConnection, PieceMessage], None] | None
+ self, value: Optional[Callable[[AsyncPeerConnection, PieceMessage], None]]
) -> None:
"""Set the on_piece_received callback and propagate to existing connections."""
self.logger.info(
@@ -795,13 +797,13 @@ def on_piece_received(
@property
def on_bitfield_received(
self,
- ) -> Callable[[AsyncPeerConnection, BitfieldMessage], None] | None:
+ ) -> Optional[Callable[[AsyncPeerConnection, BitfieldMessage], None]]:
"""Get the on_bitfield_received callback."""
return self._on_bitfield_received
@on_bitfield_received.setter
def on_bitfield_received(
- self, value: Callable[[AsyncPeerConnection, BitfieldMessage], None] | None
+ self, value: Optional[Callable[[AsyncPeerConnection, BitfieldMessage], None]]
) -> None:
"""Set the on_bitfield_received callback and propagate to existing connections."""
self._on_bitfield_received = value
@@ -814,13 +816,13 @@ def on_bitfield_received(
pass
@property
- def on_peer_connected(self) -> Callable[[AsyncPeerConnection], None] | None:
+ def on_peer_connected(self) -> Optional[Callable[[AsyncPeerConnection], None]]:
"""Get the on_peer_connected callback."""
return self._on_peer_connected
@on_peer_connected.setter
def on_peer_connected(
- self, value: Callable[[AsyncPeerConnection], None] | None
+ self, value: Optional[Callable[[AsyncPeerConnection], None]]
) -> None:
"""Set the on_peer_connected callback and propagate to existing connections."""
self._on_peer_connected = value
@@ -833,13 +835,13 @@ def on_peer_connected(
pass
@property
- def on_peer_disconnected(self) -> Callable[[AsyncPeerConnection], None] | None:
+ def on_peer_disconnected(self) -> Optional[Callable[[AsyncPeerConnection], None]]:
"""Get the on_peer_disconnected callback."""
return self._external_peer_disconnected
@on_peer_disconnected.setter
def on_peer_disconnected(
- self, value: Callable[[AsyncPeerConnection], None] | None
+ self, value: Optional[Callable[[AsyncPeerConnection], None]]
) -> None:
"""Set the on_peer_disconnected callback and propagate to existing connections."""
self._external_peer_disconnected = value
@@ -1005,7 +1007,7 @@ def _get_peer_key(self, peer: Any) -> str:
def _record_probation_peer(
self,
peer_key: str,
- connection: AsyncPeerConnection | None = None,
+ connection: Optional[AsyncPeerConnection] = None,
) -> None:
"""Mark peer as probationary until it proves useful."""
self._ensure_quality_tracking_initialized()
@@ -1020,7 +1022,7 @@ def _mark_peer_quality_verified(
self,
peer_key: str,
reason: str,
- connection: AsyncPeerConnection | None = None,
+ connection: Optional[AsyncPeerConnection] = None,
) -> None:
"""Mark peer as quality-verified and remove from probation."""
self._ensure_quality_tracking_initialized()
@@ -1298,7 +1300,7 @@ def _calculate_adaptive_handshake_timeout(self) -> float:
return self._timeout_calculator.calculate_handshake_timeout()
def _calculate_timeout(
- self, connection: AsyncPeerConnection | None = None
+ self, connection: Optional[AsyncPeerConnection] = None
) -> float:
"""Calculate adaptive timeout based on measured RTT.
@@ -1369,7 +1371,7 @@ async def _calculate_request_priority(
self,
piece_index: int,
piece_manager: Any,
- peer_connection: AsyncPeerConnection | None = None,
+ peer_connection: Optional[AsyncPeerConnection] = None,
) -> tuple[float, float]:
"""Calculate priority score for a request with bandwidth consideration.
@@ -1650,7 +1652,7 @@ def _coalesce_requests(self, requests: list[RequestInfo]) -> list[RequestInfo]:
sorted_requests = sorted(requests, key=lambda r: (r.piece_index, r.begin))
coalesced: list[RequestInfo] = []
- current: RequestInfo | None = None
+ current: Optional[RequestInfo] = None
for req in sorted_requests:
if current is None:
@@ -3031,7 +3033,7 @@ async def connect_to_peers(
)
try:
- pending_enqueue_reason: str | None = None
+ pending_enqueue_reason: Optional[str] = None
for batch_start in range(0, len(all_peers_to_process), batch_size):
# CRITICAL FIX: Check if manager is shutting down before processing batch
if not self._running:
@@ -3969,7 +3971,7 @@ async def _connect_to_peer(self, peer_info: PeerInfo) -> None:
# CRITICAL FIX: Acquire semaphore to limit concurrent connection attempts (BitTorrent spec compliant)
# This prevents OS socket exhaustion on Windows and other platforms
async with self._global_connection_semaphore:
- connection: AsyncPeerConnection | None = None
+ connection: Optional[AsyncPeerConnection] = None
try:
# Check if torrent is private and validate peer source (BEP 27)
is_private = getattr(
@@ -7296,7 +7298,7 @@ async def _handle_extension_message(
num_pieces = math.ceil(metadata_size / 16384)
# Recreate state for late response handling
piece_events: dict[int, asyncio.Event] = {}
- piece_data_dict: dict[int, bytes | None] = {}
+ piece_data_dict: dict[int, Optional[bytes]] = {}
for piece_idx in range(num_pieces):
piece_events[piece_idx] = asyncio.Event()
piece_data_dict[piece_idx] = None
@@ -12243,7 +12245,7 @@ async def _trigger_metadata_exchange(
else:
peer_key = str(connection.peer_info)
piece_events: dict[int, asyncio.Event] = {}
- piece_data_dict: dict[int, bytes | None] = {}
+ piece_data_dict: dict[int, Optional[bytes]] = {}
for piece_idx in range(num_pieces):
piece_events[piece_idx] = asyncio.Event()
@@ -13652,7 +13654,7 @@ async def set_per_peer_rate_limit(
)
return True
- async def get_per_peer_rate_limit(self, peer_key: str) -> int | None:
+ async def get_per_peer_rate_limit(self, peer_key: str) -> Optional[int]:
"""Get per-peer upload rate limit for a specific peer.
Args:
diff --git a/ccbt/peer/connection_pool.py b/ccbt/peer/connection_pool.py
index de89939..7efbc50 100644
--- a/ccbt/peer/connection_pool.py
+++ b/ccbt/peer/connection_pool.py
@@ -11,7 +11,7 @@
import logging
import time
from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
try:
import psutil
@@ -120,8 +120,8 @@ def __init__(
self.semaphore = asyncio.Semaphore(self.max_connections)
# Background tasks
- self._health_check_task: asyncio.Task | None = None
- self._cleanup_task: asyncio.Task | None = None
+ self._health_check_task: Optional[asyncio.Task] = None
+ self._cleanup_task: Optional[asyncio.Task] = None
# State
self._running = False
@@ -338,7 +338,7 @@ async def __aexit__(
"""Async context manager exit."""
await self.stop()
- async def acquire(self, peer_info: PeerInfo) -> Any | None:
+ async def acquire(self, peer_info: PeerInfo) -> Optional[Any]:
"""Acquire a connection for a peer.
Args:
@@ -562,7 +562,7 @@ def get_pool_stats(self) -> dict[str, Any]:
"warmup_success_rate": warmup_success_rate,
}
- async def _create_connection(self, peer_info: PeerInfo) -> Any | None:
+ async def _create_connection(self, peer_info: PeerInfo) -> Optional[Any]:
"""Create a new connection to a peer.
Args:
@@ -605,7 +605,7 @@ async def _create_connection(self, peer_info: PeerInfo) -> Any | None:
async def _create_peer_connection(
self, peer_info: PeerInfo
- ) -> PooledConnection | None:
+ ) -> Optional[PooledConnection]:
"""Create a peer connection.
Establishes a TCP connection to the peer and returns a PooledConnection
diff --git a/ccbt/peer/peer.py b/ccbt/peer/peer.py
index d630fb3..47c0cac 100644
--- a/ccbt/peer/peer.py
+++ b/ccbt/peer/peer.py
@@ -13,7 +13,7 @@
import socket
import struct
from collections import deque
-from typing import Any
+from typing import Any, Optional, Union
from ccbt.config.config import get_config
from ccbt.models import MessageType
@@ -32,7 +32,9 @@ def __init__(self) -> None:
self.am_interested: bool = False # We are interested in the peer
self.peer_choking: bool = True # Peer is choking us
self.peer_interested: bool = False # Peer is interested in us
- self.bitfield: bytes | None = None # Peer's bitfield (which pieces they have)
+ self.bitfield: Optional[bytes] = (
+ None # Peer's bitfield (which pieces they have)
+ )
self.pieces_we_have: set[int] = set() # Pieces we have downloaded
def __str__(self) -> str:
@@ -78,9 +80,9 @@ def __init__(
self,
info_hash: bytes,
peer_id: bytes,
- reserved_bytes: bytes | None = None,
- ed25519_public_key: bytes | None = None,
- ed25519_signature: bytes | None = None,
+ reserved_bytes: Optional[bytes] = None,
+ ed25519_public_key: Optional[bytes] = None,
+ ed25519_signature: Optional[bytes] = None,
) -> None:
"""Initialize handshake.
@@ -113,8 +115,8 @@ def __init__(
self.reserved_bytes: bytes = (
reserved_bytes if reserved_bytes is not None else self.RESERVED_BYTES
)
- self.ed25519_public_key: bytes | None = ed25519_public_key
- self.ed25519_signature: bytes | None = ed25519_signature
+ self.ed25519_public_key: Optional[bytes] = ed25519_public_key
+ self.ed25519_signature: Optional[bytes] = ed25519_signature
def encode(self) -> bytes:
"""Encode handshake to bytes.
@@ -751,7 +753,7 @@ def __init__(self, max_buffer_size: int = 1024 * 1024): # 1MB buffer
# Async message queue
self.message_queue = asyncio.Queue(maxsize=1000)
self.buffer = bytearray()
- self.buffer_view: memoryview | None = None
+ self.buffer_view: Optional[memoryview] = None
# Object pools for message reuse
self.message_pools = {
@@ -772,7 +774,7 @@ def __init__(self, max_buffer_size: int = 1024 * 1024): # 1MB buffer
self.logger = logging.getLogger(__name__)
- async def feed_data(self, data: bytes | memoryview) -> None:
+ async def feed_data(self, data: Union[bytes, memoryview]) -> None:
"""Feed data to the decoder asynchronously.
Args:
@@ -788,7 +790,7 @@ async def feed_data(self, data: bytes | memoryview) -> None:
# Process complete messages from buffer
await self._process_buffer()
- async def get_message(self) -> PeerMessage | None:
+ async def get_message(self) -> Optional[PeerMessage]:
"""Get the next message from the queue.
Returns:
@@ -1016,7 +1018,7 @@ def __init__(self, max_buffer_size: int = 1024 * 1024): # 1MB buffer
# Simple buffer for partial messages
self.buffer = bytearray()
- self.buffer_view: memoryview | None = None
+ self.buffer_view: Optional[memoryview] = None
# Object pools for message reuse
self.message_pools = {
@@ -1037,7 +1039,7 @@ def __init__(self, max_buffer_size: int = 1024 * 1024): # 1MB buffer
self.logger = logging.getLogger(__name__)
- def add_data(self, data: bytes | memoryview) -> list[PeerMessage]:
+ def add_data(self, data: Union[bytes, memoryview]) -> list[PeerMessage]:
"""Add data to the buffer and return any complete messages.
Args:
@@ -1095,7 +1097,7 @@ def add_data(self, data: bytes | memoryview) -> list[PeerMessage]:
return messages
- def _decode_next_message(self) -> PeerMessage | None:
+ def _decode_next_message(self) -> Optional[PeerMessage]:
"""Decode the next message from the buffer using memoryview."""
if self.buffer_size < 4:
return None # Need at least 4 bytes for length
@@ -1593,7 +1595,7 @@ def get_stats(self) -> dict[str, Any]:
# Global socket optimizer instance (lazy initialization)
-_socket_optimizer: SocketOptimizer | None = None
+_socket_optimizer: Optional[SocketOptimizer] = None
def _get_socket_optimizer() -> SocketOptimizer:
diff --git a/ccbt/peer/peer_connection.py b/ccbt/peer/peer_connection.py
index 2b72bd2..3c36d58 100644
--- a/ccbt/peer/peer_connection.py
+++ b/ccbt/peer/peer_connection.py
@@ -9,7 +9,7 @@
import time
from dataclasses import dataclass, field
from enum import Enum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
if TYPE_CHECKING: # pragma: no cover - type checking only, not executed at runtime
import asyncio
@@ -51,14 +51,14 @@ class PeerConnection:
peer_info: PeerInfo
torrent_data: dict[str, Any]
- reader: asyncio.StreamReader | EncryptedStreamReader | None = None
- writer: asyncio.StreamWriter | EncryptedStreamWriter | None = None
+ reader: Optional[Union[asyncio.StreamReader, EncryptedStreamReader]] = None
+ writer: Optional[Union[asyncio.StreamWriter, EncryptedStreamWriter]] = None
state: ConnectionState = ConnectionState.DISCONNECTED
peer_state: PeerState = field(default_factory=PeerState)
message_decoder: MessageDecoder = field(default_factory=MessageDecoder)
last_activity: float = field(default_factory=time.time)
- connection_task: asyncio.Task | None = None
- error_message: str | None = None
+ connection_task: Optional[asyncio.Task] = None
+ error_message: Optional[str] = None
# Encryption support
is_encrypted: bool = False
diff --git a/ccbt/peer/ssl_peer.py b/ccbt/peer/ssl_peer.py
index d77865e..bc4b8b4 100644
--- a/ccbt/peer/ssl_peer.py
+++ b/ccbt/peer/ssl_peer.py
@@ -11,6 +11,7 @@
import ssl
import time
from dataclasses import dataclass
+from typing import Optional
from ccbt.config.config import get_config
from ccbt.extensions.manager import get_extension_manager
@@ -243,7 +244,7 @@ async def _send_ssl_extension_message(
writer: asyncio.StreamWriter,
peer_id: str,
timeout: float = 5.0, # noqa: ARG002 - Required by interface signature
- ) -> tuple[int, bool] | None:
+ ) -> Optional[tuple[int, bool]]:
"""Send SSL extension message and wait for response.
Args:
@@ -334,7 +335,7 @@ async def negotiate_ssl_after_handshake(
peer_id: str,
peer_ip: str,
peer_port: int,
- ) -> tuple[asyncio.StreamReader, asyncio.StreamWriter] | None:
+ ) -> Optional[tuple[asyncio.StreamReader, asyncio.StreamWriter]]:
"""Negotiate SSL after BitTorrent handshake.
This method attempts to upgrade the connection to SSL after the
diff --git a/ccbt/peer/tcp_server.py b/ccbt/peer/tcp_server.py
index 3c032b8..b2db475 100644
--- a/ccbt/peer/tcp_server.py
+++ b/ccbt/peer/tcp_server.py
@@ -9,7 +9,7 @@
import asyncio
import logging
import socket
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.config.config import get_config
from ccbt.utils.exceptions import HandshakeError
@@ -23,7 +23,9 @@
class IncomingPeerServer:
"""TCP server for accepting incoming BitTorrent peer connections."""
- def __init__(self, session_manager: AsyncSessionManager, config: Any | None = None):
+ def __init__(
+ self, session_manager: AsyncSessionManager, config: Optional[Any] = None
+ ):
"""Initialize incoming peer server.
Args:
@@ -33,7 +35,7 @@ def __init__(self, session_manager: AsyncSessionManager, config: Any | None = No
"""
self.session_manager = session_manager
self.config = config or get_config()
- self.server: asyncio.Server | None = None
+ self.server: Optional[asyncio.Server] = None
self._running = False
self.logger = logging.getLogger(__name__)
@@ -226,7 +228,7 @@ def is_serving(self) -> bool:
return self._running and self.server is not None and self.server.is_serving()
@property
- def port(self) -> int | None:
+ def port(self) -> Optional[int]:
"""Get the port the server is bound to.
Returns:
diff --git a/ccbt/peer/utp_peer.py b/ccbt/peer/utp_peer.py
index b8969ad..2dbc799 100644
--- a/ccbt/peer/utp_peer.py
+++ b/ccbt/peer/utp_peer.py
@@ -25,7 +25,7 @@
)
if TYPE_CHECKING: # pragma: no cover
- from typing import Any, Callable
+ from typing import Any, Callable, Optional
logger = logging.getLogger(__name__)
@@ -152,13 +152,13 @@ class UTPPeerConnection(AsyncPeerConnection):
"""
# uTP-specific fields
- utp_connection: UTPConnection | None = None
+ utp_connection: Optional[UTPConnection] = None
# Callbacks for compatibility with AsyncPeerConnection interface
- on_peer_connected: Callable[[AsyncPeerConnection], None] | None = None
- on_peer_disconnected: Callable[[AsyncPeerConnection], None] | None = None
- on_bitfield_received: Callable[[AsyncPeerConnection, Any], None] | None = None
- on_piece_received: Callable[[AsyncPeerConnection, Any], None] | None = None
+ on_peer_connected: Optional[Callable[[AsyncPeerConnection], None]] = None
+ on_peer_disconnected: Optional[Callable[[AsyncPeerConnection], None]] = None
+ on_bitfield_received: Optional[Callable[[AsyncPeerConnection, Any], None]] = None
+ on_piece_received: Optional[Callable[[AsyncPeerConnection, Any], None]] = None
def __post_init__(self) -> None:
"""Initialize uTP peer connection."""
diff --git a/ccbt/peer/webrtc_peer.py b/ccbt/peer/webrtc_peer.py
index 3579a7f..4693bbb 100644
--- a/ccbt/peer/webrtc_peer.py
+++ b/ccbt/peer/webrtc_peer.py
@@ -11,7 +11,7 @@
import logging
import time
from dataclasses import dataclass, field
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.peer.async_peer_connection import (
AsyncPeerConnection,
@@ -31,15 +31,15 @@ class WebRTCPeerConnection(AsyncPeerConnection):
enabling seamless integration with the existing peer connection manager.
"""
- webtorrent_protocol: Any | None = None # WebTorrentProtocol
+ webtorrent_protocol: Optional[Any] = None # WebTorrentProtocol
_message_queue: asyncio.Queue[bytes] = field(default_factory=asyncio.Queue)
- _receive_task: asyncio.Task | None = None
+ _receive_task: Optional[asyncio.Task] = None
# Callbacks for compatibility with AsyncPeerConnection interface
- on_peer_connected: Callable[[AsyncPeerConnection], None] | None = None
- on_peer_disconnected: Callable[[AsyncPeerConnection], None] | None = None
- on_bitfield_received: Callable[[AsyncPeerConnection, Any], None] | None = None
- on_piece_received: Callable[[AsyncPeerConnection, Any], None] | None = None
+ on_peer_connected: Optional[Callable[[AsyncPeerConnection], None]] = None
+ on_peer_disconnected: Optional[Callable[[AsyncPeerConnection], None]] = None
+ on_bitfield_received: Optional[Callable[[AsyncPeerConnection, Any], None]] = None
+ on_piece_received: Optional[Callable[[AsyncPeerConnection, Any], None]] = None
def __post_init__(self) -> None:
"""Initialize WebRTC peer connection."""
@@ -169,7 +169,7 @@ async def send_message(self, message: bytes) -> None:
self.stats.bytes_uploaded += len(message)
self.stats.last_activity = time.time()
- async def receive_message(self) -> bytes | None:
+ async def receive_message(self) -> Optional[bytes]:
"""Receive message from WebRTC data channel.
Returns:
@@ -276,8 +276,12 @@ def has_timed_out(self, timeout: float = 60.0) -> bool:
# Note: WebRTC connections don't use traditional readers/writers
# The data channel replaces the stream reader/writer pattern
# Store as private attributes to satisfy dataclass field requirements
- _reader: asyncio.StreamReader | None = field(default=None, init=False, repr=False)
- _writer: asyncio.StreamWriter | None = field(default=None, init=False, repr=False)
+ _reader: Optional[asyncio.StreamReader] = field(
+ default=None, init=False, repr=False
+ )
+ _writer: Optional[asyncio.StreamWriter] = field(
+ default=None, init=False, repr=False
+ )
@property
def reader(self) -> None: # type: ignore[override]
diff --git a/ccbt/piece/async_metadata_exchange.py b/ccbt/piece/async_metadata_exchange.py
index d500e29..c10bd9b 100644
--- a/ccbt/piece/async_metadata_exchange.py
+++ b/ccbt/piece/async_metadata_exchange.py
@@ -35,7 +35,7 @@
import time
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.config.config import get_config
from ccbt.core.bencode import BencodeDecoder, BencodeEncoder
@@ -61,13 +61,13 @@ class PeerMetadataSession:
"""Metadata exchange session with a single peer."""
peer_info: tuple[str, int] # (ip, port)
- reader: asyncio.StreamReader | None = None
- writer: asyncio.StreamWriter | None = None
+ reader: Optional[asyncio.StreamReader] = None
+ writer: Optional[asyncio.StreamWriter] = None
state: MetadataState = MetadataState.CONNECTING
# Extended protocol
- ut_metadata_id: int | None = None
- metadata_size: int | None = None
+ ut_metadata_id: Optional[int] = None
+ metadata_size: Optional[int] = None
# Reliability tracking
reliability_score: float = 1.0
@@ -93,7 +93,7 @@ class MetadataPiece:
"""Represents a metadata piece."""
index: int
- data: bytes | None = None
+ data: Optional[bytes] = None
received_count: int = 0
sources: set[tuple[str, int]] = field(default_factory=set)
@@ -101,7 +101,7 @@ class MetadataPiece:
class AsyncMetadataExchange:
"""High-performance async metadata exchange manager."""
- def __init__(self, info_hash: bytes, peer_id: bytes | None = None):
+ def __init__(self, info_hash: bytes, peer_id: Optional[bytes] = None):
"""Initialize async metadata exchange.
Args:
@@ -119,21 +119,21 @@ def __init__(self, info_hash: bytes, peer_id: bytes | None = None):
# Session management
self.sessions: dict[tuple[str, int], PeerMetadataSession] = {}
self.metadata_pieces: dict[int, MetadataPiece] = {}
- self.metadata_size: int | None = None
+ self.metadata_size: Optional[int] = None
self.num_pieces: int = 0
# Completion tracking
self.completed = False
- self.metadata_data: bytes | None = None
- self.metadata_dict: dict[bytes, Any] | None = None
+ self.metadata_data: Optional[bytes] = None
+ self.metadata_dict: Optional[dict[bytes, Any]] = None
# Background tasks
- self._cleanup_task: asyncio.Task | None = None
+ self._cleanup_task: Optional[asyncio.Task] = None
# Callbacks
- self.on_progress: Callable | None = None
- self.on_complete: Callable | None = None
- self.on_error: Callable | None = None
+ self.on_progress: Optional[Callable] = None
+ self.on_complete: Optional[Callable] = None
+ self.on_error: Optional[Callable] = None
self.logger = logging.getLogger(__name__)
@@ -188,7 +188,7 @@ async def fetch_metadata(
peers: list[dict[str, Any]],
max_peers: int = 10,
timeout: float = 30.0,
- ) -> dict[bytes, Any] | None:
+ ) -> Optional[dict[bytes, Any]]:
"""Fetch metadata from multiple peers in parallel.
Args:
@@ -973,8 +973,8 @@ async def fetch_metadata_from_peers(
info_hash: bytes,
peers: list[dict[str, Any]],
timeout: float = 30.0,
- peer_id: bytes | None = None,
-) -> dict[bytes, Any] | None:
+ peer_id: Optional[bytes] = None,
+) -> Optional[dict[bytes, Any]]:
"""High-performance parallel metadata fetch.
Args:
@@ -1129,7 +1129,7 @@ def __init__(self, max_size: int = 100):
self.cache: dict[bytes, dict[str, Any]] = {}
self.access_times: dict[bytes, float] = {}
- def get(self, info_hash: bytes) -> dict[str, Any] | None:
+ def get(self, info_hash: bytes) -> Optional[dict[str, Any]]:
"""Get cached metadata."""
if info_hash in self.cache:
self.access_times[info_hash] = time.time()
@@ -1262,7 +1262,7 @@ async def _fetch_metadata_from_peer(
peer_info: tuple[str, int],
_info_hash: bytes,
timeout: float = 30.0,
-) -> dict[str, Any] | None: # pragma: no cover - Internal helper stub for testing
+) -> Optional[dict[str, Any]]: # pragma: no cover - Internal helper stub for testing
"""Fetch metadata from a single peer."""
try:
_reader, _writer = await _connect_to_peer(
@@ -1281,7 +1281,7 @@ async def fetch_metadata_from_peers_async(
peers: list[dict[str, Any]],
info_hash: bytes,
timeout: int = 30,
-) -> dict[str, Any] | None:
+) -> Optional[dict[str, Any]]:
"""Fetch metadata from peers asynchronously.
Args:
diff --git a/ccbt/piece/async_piece_manager.py b/ccbt/piece/async_piece_manager.py
index 951cab8..91ab028 100644
--- a/ccbt/piece/async_piece_manager.py
+++ b/ccbt/piece/async_piece_manager.py
@@ -14,7 +14,7 @@
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass, field
from enum import Enum
-from typing import TYPE_CHECKING, Any, Callable
+from typing import TYPE_CHECKING, Any, Callable, Optional
from ccbt.config.config import get_config
from ccbt.models import (
@@ -54,7 +54,7 @@ class PieceBlock:
requested_from: set[str] = field(
default_factory=set,
) # Peer keys that have this block
- received_from: str | None = None # Peer key that actually sent this block
+ received_from: Optional[str] = None # Peer key that actually sent this block
def is_complete(self) -> bool:
"""Check if block is complete."""
@@ -89,7 +89,7 @@ class PieceData:
last_activity_time: float = 0.0 # Timestamp of last block received
last_request_time: float = 0.0 # Timestamp when piece was last requested
request_timeout: float = 120.0 # Timeout for piece requests (seconds)
- primary_peer: str | None = None # Peer key that provided most blocks
+ primary_peer: Optional[str] = None # Peer key that provided most blocks
peer_block_counts: dict[str, int] = field(
default_factory=dict
) # peer_key -> number of blocks received
@@ -306,7 +306,7 @@ class AsyncPieceManager:
def __init__(
self,
torrent_data: dict[str, Any],
- file_selection_manager: Any | None = None,
+ file_selection_manager: Optional[Any] = None,
):
"""Initialize async piece manager.
@@ -494,21 +494,23 @@ def __init__(
self.download_start_time = time.time()
self.bytes_downloaded = 0
self._current_sequential_piece: int = 0 # Track current sequential position
- self._peer_manager: Any | None = None # Store peer manager for piece requests
+ self._peer_manager: Optional[Any] = (
+ None # Store peer manager for piece requests
+ )
# Callbacks
- self.on_piece_completed: Callable[[int], None] | None = None
- self.on_piece_verified: Callable[[int], None] | None = None
- self.on_download_complete: Callable[[], None] | None = None
- self.on_file_assembled: Callable[[int], None] | None = None
- self.on_checkpoint_save: Callable[[], None] | None = None
+ self.on_piece_completed: Optional[Callable[[int], None]] = None
+ self.on_piece_verified: Optional[Callable[[int], None]] = None
+ self.on_download_complete: Optional[Callable[[], None]] = None
+ self.on_file_assembled: Optional[Callable[[int], None]] = None
+ self.on_checkpoint_save: Optional[Callable[[], None]] = None
# File assembler (set by download manager)
- self.file_assembler: Any | None = None
+ self.file_assembler: Optional[Any] = None
# Background tasks
- self._hash_worker_task: asyncio.Task | None = None
- self._piece_selector_task: asyncio.Task | None = None
+ self._hash_worker_task: Optional[asyncio.Task] = None
+ self._piece_selector_task: Optional[asyncio.Task] = None
self._background_tasks: set[asyncio.Task] = set()
self.logger = logging.getLogger(__name__)
@@ -3009,7 +3011,7 @@ async def handle_piece_block(
piece_index: int,
begin: int,
data: bytes,
- peer_key: str | None = None,
+ peer_key: Optional[str] = None,
) -> None:
"""Handle a received piece block.
@@ -4015,7 +4017,7 @@ async def _verify_hybrid_piece(
self.logger.exception("Error in hybrid piece verification")
return False
- def _get_v2_piece_hash(self, piece_index: int) -> bytes | None:
+ def _get_v2_piece_hash(self, piece_index: int) -> Optional[bytes]:
"""Get SHA-256 hash for a piece from v2 piece layers.
For hybrid torrents, piece layers are organized by file (pieces_root).
@@ -5163,7 +5165,7 @@ async def _select_pieces(self) -> None:
len(self.peer_availability),
)
- async def _select_rarest_piece(self) -> int | None:
+ async def _select_rarest_piece(self) -> Optional[int]:
"""Select a single piece using rarest-first algorithm."""
async with self.lock:
missing_pieces = [
@@ -8017,7 +8019,7 @@ async def stop_download(self) -> None:
# LOGGING OPTIMIZATION: Keep as INFO - important lifecycle event
self.logger.info("Stopped piece download")
- def get_piece_data(self, piece_index: int) -> bytes | None:
+ def get_piece_data(self, piece_index: int) -> Optional[bytes]:
"""Get complete piece data if available."""
if (
piece_index >= len(self.pieces)
@@ -8030,7 +8032,7 @@ def get_piece_data(self, piece_index: int) -> bytes | None:
return None
- def get_block(self, piece_index: int, begin: int, length: int) -> bytes | None:
+ def get_block(self, piece_index: int, begin: int, length: int) -> Optional[bytes]:
"""Get a block of data from a piece."""
if (
piece_index >= len(self.pieces)
diff --git a/ccbt/piece/file_selection.py b/ccbt/piece/file_selection.py
index 2d9de25..7d77e43 100644
--- a/ccbt/piece/file_selection.py
+++ b/ccbt/piece/file_selection.py
@@ -10,7 +10,7 @@
import logging
from dataclasses import dataclass
from enum import IntEnum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING: # pragma: no cover - type checking only, not executed at runtime
from ccbt.models import TorrentInfo
@@ -439,7 +439,7 @@ async def update_file_progress(
if file_index in self.file_states:
self.file_states[file_index].bytes_downloaded = bytes_downloaded
- def get_file_state(self, file_index: int) -> FileSelectionState | None:
+ def get_file_state(self, file_index: int) -> Optional[FileSelectionState]:
"""Get selection state for a file.
Args:
diff --git a/ccbt/piece/hash_v2.py b/ccbt/piece/hash_v2.py
index 6591146..bb6758e 100644
--- a/ccbt/piece/hash_v2.py
+++ b/ccbt/piece/hash_v2.py
@@ -16,7 +16,7 @@
import hashlib
import logging
from enum import Enum
-from typing import TYPE_CHECKING, Any, BinaryIO
+from typing import TYPE_CHECKING, Any, BinaryIO, Optional, Union
if TYPE_CHECKING: # pragma: no cover - type checking only, not executed at runtime
from io import BytesIO
@@ -57,7 +57,7 @@ def hash_piece_v2(data: bytes) -> bytes:
def hash_piece_v2_streaming(
- data_source: BinaryIO | bytes | BytesIO,
+ data_source: Union[BinaryIO, bytes, BytesIO],
chunk_size: int = 65536,
) -> bytes:
"""Calculate SHA-256 hash of piece data using streaming for large pieces.
@@ -164,7 +164,7 @@ def verify_piece_v2(data: bytes, expected_hash: bytes) -> bool:
def verify_piece_v2_streaming(
- data_source: BinaryIO | bytes | BytesIO,
+ data_source: Union[BinaryIO, bytes, BytesIO],
expected_hash: bytes,
chunk_size: int = 65536,
) -> bool:
@@ -548,7 +548,7 @@ def hash_function(self):
def verify_piece(
data: bytes,
expected_hash: bytes,
- algorithm: HashAlgorithm | None = None,
+ algorithm: Optional[HashAlgorithm] = None,
) -> bool:
"""Verify piece data against expected hash using specified algorithm.
@@ -625,7 +625,7 @@ def verify_piece(
def verify_piece_streaming(
- data_source: BinaryIO | bytes | BytesIO,
+ data_source: Union[BinaryIO, bytes, BytesIO],
expected_hash: bytes,
algorithm: HashAlgorithm = HashAlgorithm.SHA256,
chunk_size: int = 65536,
diff --git a/ccbt/piece/metadata_exchange.py b/ccbt/piece/metadata_exchange.py
index 2b9b87c..f990a32 100644
--- a/ccbt/piece/metadata_exchange.py
+++ b/ccbt/piece/metadata_exchange.py
@@ -13,7 +13,7 @@
import math
import socket
import struct
-from typing import Any
+from typing import Any, Optional
from ccbt.core.bencode import BencodeDecoder, BencodeEncoder
@@ -61,8 +61,8 @@ def fetch_metadata_from_peers(
info_hash: bytes,
peers: list[dict[str, Any]],
timeout: float = 5.0,
- peer_id: bytes | None = None,
-) -> dict[bytes, Any] | None:
+ peer_id: Optional[bytes] = None,
+) -> Optional[dict[bytes, Any]]:
"""Fetch torrent metadata from a list of peers."""
if peer_id is None:
peer_id = b"-CC0101-" + b"x" * 12
diff --git a/ccbt/piece/piece_manager.py b/ccbt/piece/piece_manager.py
index d753380..b3d5a5f 100644
--- a/ccbt/piece/piece_manager.py
+++ b/ccbt/piece/piece_manager.py
@@ -6,7 +6,7 @@
import threading
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any, Callable
+from typing import Any, Callable, Optional
class PieceState(Enum):
@@ -55,7 +55,7 @@ class PieceData:
blocks: list[PieceBlock] = field(default_factory=list)
state: PieceState = PieceState.MISSING
hash_verified: bool = False
- data_buffer: bytearray | None = None
+ data_buffer: Optional[bytearray] = None
def __post_init__(self):
"""Initialize blocks after creation."""
@@ -153,10 +153,10 @@ def __init__(self, torrent_data: dict[str, Any]):
self.lock = threading.Lock()
# Callbacks
- self.on_piece_completed: Callable[[int], None] | None = None
- self.on_piece_verified: Callable[[int], None] | None = None
- self.on_file_assembled: Callable[[int], None] | None = None
- self.on_download_complete: Callable[[], None] | None = None
+ self.on_piece_completed: Optional[Callable[[int], None]] = None
+ self.on_piece_verified: Optional[Callable[[int], None]] = None
+ self.on_file_assembled: Optional[Callable[[int], None]] = None
+ self.on_download_complete: Optional[Callable[[], None]] = None
# File assembler
self.file_assembler = None
@@ -173,7 +173,7 @@ def get_missing_pieces(self) -> list[int]:
if piece.state == PieceState.MISSING
]
- def get_random_missing_piece(self) -> int | None:
+ def get_random_missing_piece(self) -> Optional[int]:
"""Get a random missing piece index."""
missing = self.get_missing_pieces()
if not missing:
@@ -291,7 +291,7 @@ def _check_download_complete(self) -> None:
if self.on_download_complete:
self.on_download_complete()
- def get_piece_data(self, piece_index: int) -> bytes | None:
+ def get_piece_data(self, piece_index: int) -> Optional[bytes]:
"""Get data for a verified piece."""
if piece_index >= self.num_pieces:
return None
diff --git a/ccbt/plugins/base.py b/ccbt/plugins/base.py
index eaeffc9..d3ea035 100644
--- a/ccbt/plugins/base.py
+++ b/ccbt/plugins/base.py
@@ -14,7 +14,7 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.utils.exceptions import CCBTError
from ccbt.utils.logging_config import get_logger
@@ -48,7 +48,7 @@ class PluginInfo:
dependencies: list[str] = field(default_factory=list)
hooks: list[str] = field(default_factory=list)
state: PluginState = PluginState.UNLOADED
- error: str | None = None
+ error: Optional[str] = None
class Plugin(ABC):
@@ -67,7 +67,7 @@ def __init__(self, name: str, version: str = "1.0.0", description: str = ""):
self.version = version
self.description = description
self.state = PluginState.UNLOADED
- self.error: str | None = None
+ self.error: Optional[str] = None
self.logger = get_logger(f"plugin.{name}")
self._hooks: dict[str, list[Callable]] = {}
self._dependencies: list[str] = []
@@ -154,7 +154,7 @@ def __init__(self) -> None:
async def load_plugin(
self,
plugin_class: type[Plugin],
- config: dict[str, Any] | None = None,
+ config: Optional[dict[str, Any]] = None,
) -> str:
"""Load a plugin.
@@ -331,11 +331,11 @@ async def emit_hook(self, hook_name: str, *args, **kwargs) -> list[Any]:
self.logger.exception("Global hook '%s' failed", hook_name)
return results
- def get_plugin(self, plugin_name: str) -> Plugin | None:
+ def get_plugin(self, plugin_name: str) -> Optional[Plugin]:
"""Get a plugin by name."""
return self.plugins.get(plugin_name)
- def get_plugin_info(self, plugin_name: str) -> PluginInfo | None:
+ def get_plugin_info(self, plugin_name: str) -> Optional[PluginInfo]:
"""Get plugin information."""
return self.plugin_info.get(plugin_name)
@@ -353,7 +353,7 @@ async def load_plugin_from_module(
self,
module_path: str,
plugin_class_name: str = "Plugin",
- config: dict[str, Any] | None = None,
+ config: Optional[dict[str, Any]] = None,
) -> str:
"""Load a plugin from a module.
@@ -404,7 +404,7 @@ async def shutdown(self) -> None:
# Global plugin manager instance
-_plugin_manager: PluginManager | None = None
+_plugin_manager: Optional[PluginManager] = None
def get_plugin_manager() -> PluginManager:
diff --git a/ccbt/plugins/logging_plugin.py b/ccbt/plugins/logging_plugin.py
index 9a642bd..5f7cc05 100644
--- a/ccbt/plugins/logging_plugin.py
+++ b/ccbt/plugins/logging_plugin.py
@@ -9,6 +9,7 @@
import json
from pathlib import Path
+from typing import Optional
from ccbt.plugins.base import Plugin
from ccbt.utils.events import Event, EventHandler, EventType
@@ -18,7 +19,7 @@
class EventLoggingHandler(EventHandler):
"""Handler for logging events."""
- def __init__(self, log_file: str | None = None):
+ def __init__(self, log_file: Optional[str] = None):
"""Initialize event logging handler."""
super().__init__("event_logging_handler")
self.log_file = log_file
@@ -58,7 +59,7 @@ class LoggingPlugin(Plugin):
def __init__(
self,
name: str = "logging_plugin",
- log_file: str | None = None,
+ log_file: Optional[str] = None,
log_level: str = "INFO",
):
"""Initialize logging plugin."""
@@ -69,7 +70,7 @@ def __init__(
)
self.log_file = log_file
self.log_level = log_level
- self.handler: EventLoggingHandler | None = None
+ self.handler: Optional[EventLoggingHandler] = None
async def initialize(self) -> None:
"""Initialize the logging plugin."""
diff --git a/ccbt/plugins/metrics_plugin.py b/ccbt/plugins/metrics_plugin.py
index 0dddb2a..2f9cc9b 100644
--- a/ccbt/plugins/metrics_plugin.py
+++ b/ccbt/plugins/metrics_plugin.py
@@ -9,7 +9,7 @@
from collections import deque
from dataclasses import dataclass, field
-from typing import Any
+from typing import Any, Optional
from ccbt.plugins.base import Plugin
from ccbt.utils.events import Event, EventHandler, EventType
@@ -140,8 +140,8 @@ def _update_aggregate(self, metric: Metric) -> None:
def get_metrics(
self,
- name: str | None = None,
- tags: dict[str, str] | None = None,
+ name: Optional[str] = None,
+ tags: Optional[dict[str, str]] = None,
limit: int = 100,
) -> list[Metric]:
"""Get metrics with optional filtering."""
@@ -157,7 +157,7 @@ def get_metrics(
return metrics[-limit:] if limit > 0 else metrics
- def get_aggregates(self, name: str | None = None) -> list[MetricAggregate]:
+ def get_aggregates(self, name: Optional[str] = None) -> list[MetricAggregate]:
"""Get metric aggregates."""
aggregates = list(self.aggregates.values())
@@ -178,7 +178,7 @@ def __init__(self, name: str = "metrics_plugin", max_metrics: int = 10000):
description="Performance metrics collection plugin",
)
self.max_metrics = max_metrics
- self.collector: MetricsCollector | None = None
+ self.collector: Optional[MetricsCollector] = None
async def initialize(self) -> None:
"""Initialize the metrics plugin."""
@@ -231,8 +231,8 @@ async def cleanup(self) -> None:
def get_metrics(
self,
- name: str | None = None,
- tags: dict[str, str] | None = None,
+ name: Optional[str] = None,
+ tags: Optional[dict[str, str]] = None,
limit: int = 100,
) -> list[Metric]:
"""Get collected metrics."""
@@ -240,7 +240,7 @@ def get_metrics(
return self.collector.get_metrics(name, tags, limit)
return []
- def get_aggregates(self, name: str | None = None) -> list[MetricAggregate]:
+ def get_aggregates(self, name: Optional[str] = None) -> list[MetricAggregate]:
"""Get metric aggregates."""
if self.collector:
return self.collector.get_aggregates(name)
diff --git a/ccbt/protocols/__init__.py b/ccbt/protocols/__init__.py
index 79b4b4d..f27a746 100644
--- a/ccbt/protocols/__init__.py
+++ b/ccbt/protocols/__init__.py
@@ -18,9 +18,11 @@
from ccbt.protocols.bittorrent import BitTorrentProtocol
try:
+ from typing import Optional
+
from ccbt.protocols.ipfs import IPFSProtocol as _IPFSProtocol
- IPFSProtocol: type[Protocol] | None = _IPFSProtocol # type: ignore[assignment]
+ IPFSProtocol: Optional[type[Protocol]] = _IPFSProtocol # type: ignore[assignment]
except ImportError:
IPFSProtocol = None # type: ignore[assignment] # IPFS support optional
diff --git a/ccbt/protocols/base.py b/ccbt/protocols/base.py
index 2dc6836..2c6783f 100644
--- a/ccbt/protocols/base.py
+++ b/ccbt/protocols/base.py
@@ -14,7 +14,7 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -106,7 +106,7 @@ async def send_message(self, peer_id: str, message: bytes) -> bool:
"""Send message to peer."""
@abstractmethod
- async def receive_message(self, peer_id: str) -> bytes | None:
+ async def receive_message(self, peer_id: str) -> Optional[bytes]:
"""Receive message from peer."""
@abstractmethod
@@ -129,7 +129,7 @@ def get_peers(self) -> dict[str, PeerInfo]:
"""Get connected peers."""
return self.peers.copy()
- def get_peer(self, peer_id: str) -> PeerInfo | None:
+ def get_peer(self, peer_id: str) -> Optional[PeerInfo]:
"""Get specific peer."""
return self.peers.get(peer_id)
@@ -343,7 +343,7 @@ async def unregister_protocol(self, protocol_type: ProtocolType) -> None:
),
)
- def get_protocol(self, protocol_type: ProtocolType) -> Protocol | None:
+ def get_protocol(self, protocol_type: ProtocolType) -> Optional[Protocol]:
"""Get protocol by type."""
return self.protocols.get(protocol_type)
@@ -460,7 +460,7 @@ def get_protocol_statistics(self) -> dict[str, Any]:
return stats
async def connect_peers_batch(
- self, peers: list[PeerInfo], preferred_protocol: ProtocolType | None = None
+ self, peers: list[PeerInfo], preferred_protocol: Optional[ProtocolType] = None
) -> dict[ProtocolType, list[PeerInfo]]:
"""Connect to multiple peers using the best available protocols.
@@ -522,7 +522,7 @@ async def _connect_peers_for_protocol(
return connected_peers
def _group_peers_by_protocol(
- self, peers: list[PeerInfo], preferred_protocol: ProtocolType | None
+ self, peers: list[PeerInfo], preferred_protocol: Optional[ProtocolType]
) -> dict[ProtocolType, list[PeerInfo]]:
"""Group peers by their preferred protocol."""
groups: dict[ProtocolType, list[PeerInfo]] = {}
@@ -542,8 +542,8 @@ def _group_peers_by_protocol(
def _select_best_protocol_for_peer(
self,
_peer: PeerInfo,
- preferred_protocol: ProtocolType | None,
- ) -> ProtocolType | None:
+ preferred_protocol: Optional[ProtocolType],
+ ) -> Optional[ProtocolType]:
"""Select the best protocol for a peer."""
# Use preferred protocol if available and healthy
if preferred_protocol and self._is_protocol_available(preferred_protocol):
@@ -757,7 +757,7 @@ def health_check_all_sync(self) -> dict[ProtocolType, bool]:
# Global protocol manager instance
-_protocol_manager: ProtocolManager | None = None
+_protocol_manager: Optional[ProtocolManager] = None
def get_protocol_manager() -> ProtocolManager:
diff --git a/ccbt/protocols/bittorrent.py b/ccbt/protocols/bittorrent.py
index 41806b1..924606d 100644
--- a/ccbt/protocols/bittorrent.py
+++ b/ccbt/protocols/bittorrent.py
@@ -9,7 +9,7 @@
import logging
import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.protocols.base import (
Protocol,
@@ -262,7 +262,7 @@ async def send_message(self, peer_id: str, message: bytes) -> bool:
self.update_stats(errors=1)
return False
- async def receive_message(self, peer_id: str) -> bytes | None:
+ async def receive_message(self, peer_id: str) -> Optional[bytes]:
"""Receive message from BitTorrent peer."""
try:
# Use peer manager if available
diff --git a/ccbt/protocols/bittorrent_v2.py b/ccbt/protocols/bittorrent_v2.py
index 6b9b934..5732e21 100644
--- a/ccbt/protocols/bittorrent_v2.py
+++ b/ccbt/protocols/bittorrent_v2.py
@@ -12,7 +12,7 @@
import logging
import struct
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from ccbt.core.bencode import BencodeDecoder, BencodeEncoder
from ccbt.extensions.protocol import ExtensionMessageType, ExtensionProtocol
@@ -196,8 +196,8 @@ def parse_v2_handshake(data: bytes) -> dict[str, Any]:
version = detect_protocol_version(data)
# Parse info hashes and peer_id based on version
- info_hash_v2: bytes | None = None
- info_hash_v1: bytes | None = None
+ info_hash_v2: Optional[bytes] = None
+ info_hash_v1: Optional[bytes] = None
peer_id: bytes
hash_start = reserved_end
@@ -425,7 +425,7 @@ async def send_hybrid_handshake(
def negotiate_protocol_version(
handshake: bytes,
supported_versions: list[ProtocolVersion],
-) -> ProtocolVersion | None:
+) -> Optional[ProtocolVersion]:
"""Negotiate highest common protocol version with peer.
Compares peer's supported version (from handshake) with our supported versions
@@ -512,8 +512,8 @@ def negotiate_protocol_version(
async def handle_v2_handshake(
reader: asyncio.StreamReader,
writer: asyncio.StreamWriter, # noqa: ARG001 - Reserved for future use
- our_info_hash_v2: bytes | None = None,
- our_info_hash_v1: bytes | None = None,
+ our_info_hash_v2: Optional[bytes] = None,
+ our_info_hash_v1: Optional[bytes] = None,
timeout: float = 30.0,
) -> tuple[ProtocolVersion, bytes, dict[str, Any]]:
"""Handle incoming v2 handshake from peer.
@@ -640,7 +640,7 @@ async def _send_extension_message(
async def _receive_extension_message(
connection: Any,
timeout: float = 10.0,
-) -> tuple[int, bytes] | None:
+) -> Optional[tuple[int, bytes]]:
"""Receive an extension message via BEP 10 extension protocol.
Args:
diff --git a/ccbt/protocols/hybrid.py b/ccbt/protocols/hybrid.py
index bcc890e..cfd28c0 100644
--- a/ccbt/protocols/hybrid.py
+++ b/ccbt/protocols/hybrid.py
@@ -11,7 +11,7 @@
import contextlib
import time
from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.protocols import (
WebTorrentProtocol, # Import from protocols __init__ which handles the module/package conflict
@@ -60,7 +60,9 @@ class HybridProtocol(Protocol):
"""Hybrid protocol combining multiple protocols."""
def __init__(
- self, strategy: HybridStrategy | None = None, session_manager: Any | None = None
+ self,
+ strategy: Optional[HybridStrategy] = None,
+ session_manager: Optional[Any] = None,
):
"""Initialize hybrid protocol.
@@ -333,7 +335,7 @@ async def send_message(self, peer_id: str, message: bytes) -> bool:
else:
return success
- async def receive_message(self, peer_id: str) -> bytes | None:
+ async def receive_message(self, peer_id: str) -> Optional[bytes]:
"""Receive message from peer using the best available protocol."""
# Find which protocol has this peer
best_protocol = self._find_protocol_for_peer(peer_id)
@@ -444,7 +446,7 @@ async def scrape_torrent(self, torrent_info: TorrentInfo) -> dict[str, int]:
return combined_stats
- def _select_best_protocol(self, _peer_info: PeerInfo) -> Protocol | None:
+ def _select_best_protocol(self, _peer_info: PeerInfo) -> Optional[Protocol]:
"""Select the best protocol for a peer."""
# Calculate scores for each protocol
protocol_scores = {}
@@ -467,7 +469,7 @@ def _select_best_protocol(self, _peer_info: PeerInfo) -> Protocol | None:
return None
- def _find_protocol_for_peer(self, peer_id: str) -> Protocol | None:
+ def _find_protocol_for_peer(self, peer_id: str) -> Optional[Protocol]:
"""Find which protocol has a specific peer."""
for protocol in self.sub_protocols.values():
if protocol.is_connected(peer_id):
diff --git a/ccbt/protocols/ipfs.py b/ccbt/protocols/ipfs.py
index 5d32f41..4c92391 100644
--- a/ccbt/protocols/ipfs.py
+++ b/ccbt/protocols/ipfs.py
@@ -14,7 +14,7 @@
import logging
import time
from dataclasses import dataclass
-from typing import Any, Callable, TypeVar
+from typing import Any, Callable, Optional, TypeVar
import ipfshttpclient
import multiaddr
@@ -70,7 +70,7 @@ class IPFSContent:
class IPFSProtocol(Protocol):
"""IPFS protocol implementation."""
- def __init__(self, session_manager: Any | None = None):
+ def __init__(self, session_manager: Optional[Any] = None):
"""Initialize IPFS protocol.
Args:
@@ -84,7 +84,7 @@ def __init__(self, session_manager: Any | None = None):
self.session_manager = session_manager
# Configuration will be set by session manager
- self.config: Any | None = None
+ self.config: Optional[Any] = None
# IPFS-specific capabilities
self.capabilities = ProtocolCapabilities(
@@ -116,7 +116,7 @@ def __init__(self, session_manager: Any | None = None):
]
# IPFS client and connection state
- self._ipfs_client: ipfshttpclient.Client | None = None
+ self._ipfs_client: Optional[ipfshttpclient.Client] = None
self._ipfs_connected: bool = False
self._connection_retries: int = 0
self._last_connection_attempt: float = 0.0
@@ -483,8 +483,8 @@ async def send_message(
self,
peer_id: str,
message: bytes,
- want_list: list[str] | None = None,
- blocks: dict[str, bytes] | None = None,
+ want_list: Optional[list[str]] = None,
+ blocks: Optional[dict[str, bytes]] = None,
) -> bool:
"""Send message to IPFS peer.
@@ -556,7 +556,7 @@ async def send_message(
async def receive_message(
self, peer_id: str, parse_bitswap: bool = True
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Receive message from IPFS peer.
Args:
@@ -618,8 +618,8 @@ async def receive_message(
def _format_bitswap_message(
self,
message: bytes,
- want_list: list[str] | None = None,
- blocks: dict[str, bytes] | None = None,
+ want_list: Optional[list[str]] = None,
+ blocks: Optional[dict[str, bytes]] = None,
) -> bytes:
"""Format message according to Bitswap protocol.
@@ -1256,7 +1256,7 @@ def _cache_discovery_result(
def _get_cached_discovery_result(
self, cid: str, ttl: int = 300
- ) -> list[str] | None:
+ ) -> Optional[list[str]]:
"""Get cached discovery result if valid.
Args:
@@ -1461,7 +1461,7 @@ async def add_content(self, data: bytes) -> str:
)
return ""
- async def get_content(self, cid: str) -> bytes | None:
+ async def get_content(self, cid: str) -> Optional[bytes]:
"""Get content from IPFS by CID.
First tries to retrieve from IPFS daemon, then falls back to peer-based retrieval.
@@ -1619,7 +1619,9 @@ async def _request_blocks_from_peers(
timeout_per_block = 30 # seconds
# Request blocks from peers in parallel
- async def request_from_peer(peer_id: str, cid: str) -> tuple[str, bytes | None]:
+ async def request_from_peer(
+ peer_id: str, cid: str
+ ) -> tuple[str, Optional[bytes]]:
"""Request a single block from a peer."""
for attempt in range(max_retries):
try:
@@ -1699,7 +1701,7 @@ async def request_from_peer(peer_id: str, cid: str) -> tuple[str, bytes | None]:
return blocks
async def _reconstruct_content_from_blocks(
- self, blocks: dict[str, bytes], dag_structure: dict[str, Any] | None = None
+ self, blocks: dict[str, bytes], dag_structure: Optional[dict[str, Any]] = None
) -> bytes:
"""Reconstruct content from IPFS blocks following DAG structure.
@@ -1959,7 +1961,7 @@ def get_ipfs_content(self) -> dict[str, IPFSContent]:
"""Get IPFS content."""
return self.ipfs_content.copy()
- def get_content_stats(self, cid: str) -> dict[str, Any] | None:
+ def get_content_stats(self, cid: str) -> Optional[dict[str, Any]]:
"""Get content statistics."""
if cid not in self.ipfs_content:
return None
diff --git a/ccbt/protocols/webtorrent.py b/ccbt/protocols/webtorrent.py
index 1fa8b88..291f3d7 100644
--- a/ccbt/protocols/webtorrent.py
+++ b/ccbt/protocols/webtorrent.py
@@ -15,7 +15,7 @@
import logging
import time
from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
import aiohttp
from aiohttp import web
@@ -40,7 +40,7 @@ class WebRTCConnection:
"""WebRTC connection information."""
peer_id: str
- data_channel: Any | None = None # RTCDataChannel
+ data_channel: Optional[Any] = None # RTCDataChannel
connection_state: str = "new"
ice_connection_state: str = "new"
last_activity: float = 0.0
@@ -51,7 +51,7 @@ class WebRTCConnection:
class WebTorrentProtocol(Protocol):
"""WebTorrent protocol implementation."""
- def __init__(self, session_manager: Any | None = None):
+ def __init__(self, session_manager: Optional[Any] = None):
"""Initialize WebTorrent protocol.
Args:
@@ -90,24 +90,24 @@ def __init__(self, session_manager: Any | None = None):
self._pending_messages: dict[str, list[bytes]] = {}
# Background task for retrying pending messages
- self._retry_task: asyncio.Task | None = None
+ self._retry_task: Optional[asyncio.Task] = None
# CRITICAL FIX: WebSocket server is now managed at daemon startup
# Use shared server from session manager instead of creating new one
# This prevents port conflicts and socket recreation issues
- self.websocket_server: Application | None = None
+ self.websocket_server: Optional[Application] = None
self.websocket_connections: set[WebSocketResponse] = set()
self.websocket_connections_by_peer: dict[str, WebSocketResponse] = {}
# CRITICAL FIX: WebRTC connection manager is now initialized at daemon startup
# Use shared manager from session manager instead of creating new one
# This ensures proper resource management and prevents duplicate managers
- self.webrtc_manager: Any | None = None
+ self.webrtc_manager: Optional[Any] = None
# Tracker URLs for WebTorrent
self.tracker_urls: list[str] = []
- def _get_webrtc_manager(self) -> Any | None:
+ def _get_webrtc_manager(self) -> Optional[Any]:
"""Get WebRTC manager from session manager.
CRITICAL FIX: WebRTC manager should be initialized at daemon startup.
@@ -309,7 +309,7 @@ async def _websocket_handler(self, request: web.Request) -> web.WebSocketRespons
await ws.prepare(request)
self.websocket_connections.add(ws)
- peer_id: str | None = None
+ peer_id: Optional[str] = None
try:
async for msg in ws:
@@ -740,7 +740,7 @@ async def connect_peer(self, peer_info: PeerInfo) -> bool:
# Create ICE candidate callback to send via WebSocket
async def ice_candidate_callback(
- peer_id: str, candidate: dict[str, Any] | None
+ peer_id: str, candidate: Optional[dict[str, Any]]
):
"""Send ICE candidate via WebSocket."""
if candidate is None:
@@ -1133,7 +1133,7 @@ async def _process_received_data(self, peer_id: str, data: bytes) -> None:
# Update buffer even if no messages extracted
self._message_buffer[peer_id] = buffer
- async def receive_message(self, peer_id: str) -> bytes | None:
+ async def receive_message(self, peer_id: str) -> Optional[bytes]:
"""Receive message from WebTorrent peer.
Args:
@@ -1337,7 +1337,7 @@ def get_webrtc_connections(self) -> dict[str, WebRTCConnection]:
"""Get WebRTC connections."""
return self.webrtc_connections.copy()
- def get_connection_stats(self, peer_id: str) -> dict[str, Any] | None:
+ def get_connection_stats(self, peer_id: str) -> Optional[dict[str, Any]]:
"""Get connection statistics for a peer.
Args:
diff --git a/ccbt/protocols/webtorrent/webrtc_manager.py b/ccbt/protocols/webtorrent/webrtc_manager.py
index af112b1..c425ac9 100644
--- a/ccbt/protocols/webtorrent/webrtc_manager.py
+++ b/ccbt/protocols/webtorrent/webrtc_manager.py
@@ -7,7 +7,7 @@
import logging
import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING: # pragma: no cover - type checking only, not executed at runtime
# TYPE_CHECKING block is only evaluated by static type checkers, not at runtime.
@@ -61,8 +61,8 @@ class WebRTCConnectionManager:
def __init__(
self,
- stun_servers: list[str] | None = None,
- turn_servers: list[str] | None = None,
+ stun_servers: Optional[list[str]] = None,
+ turn_servers: Optional[list[str]] = None,
max_connections: int = 100,
):
"""Initialize WebRTC connection manager.
@@ -132,7 +132,7 @@ def _build_ice_servers(self) -> list[Any]: # type: ignore[type-arg]
async def create_peer_connection(
self,
peer_id: str,
- ice_candidate_callback: Any | None = None,
+ ice_candidate_callback: Optional[Any] = None,
) -> Any: # RTCPeerConnection, but type checker needs help
"""Create a new RTCPeerConnection instance.
@@ -181,7 +181,9 @@ async def on_ice_connection_state_change(): # pragma: no cover - See above
# Set up ICE candidate handler
@pc.on("icecandidate")
- async def on_ice_candidate(candidate: Any | None): # RTCIceCandidate | None
+ async def on_ice_candidate(
+ candidate: Optional[Any],
+ ): # Optional[RTCIceCandidate]
if candidate is None:
# End of candidates
if ice_candidate_callback:
@@ -397,7 +399,7 @@ def _handle_data_channel_message(
message_size = len(message) if isinstance(message, bytes) else "N/A"
logger.debug("Received message from peer %s, size: %s", peer_id, message_size)
- def get_connection_stats(self, peer_id: str) -> dict[str, Any] | None:
+ def get_connection_stats(self, peer_id: str) -> Optional[dict[str, Any]]:
"""Get connection statistics for a peer.
Args:
diff --git a/ccbt/protocols/xet.py b/ccbt/protocols/xet.py
index b3c5712..f399572 100644
--- a/ccbt/protocols/xet.py
+++ b/ccbt/protocols/xet.py
@@ -11,7 +11,7 @@
import asyncio
import logging
import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.discovery.xet_cas import P2PCASClient
from ccbt.protocols.base import (
@@ -82,7 +82,7 @@ def __init__(
self.bloom_filter = bloom_filter
# P2P CAS client
- self.cas_client: P2PCASClient | None = None
+ self.cas_client: Optional[P2PCASClient] = None
# Logger
self.logger = logging.getLogger(__name__)
@@ -282,7 +282,7 @@ async def send_message(self, _peer_id: str, message: bytes) -> bool:
self.update_stats(errors=1)
return False
- async def receive_message(self, _peer_id: str) -> bytes | None:
+ async def receive_message(self, _peer_id: str) -> Optional[bytes]:
"""Receive message from peer.
Note: Xet uses BitTorrent protocol extension for chunk messages,
diff --git a/ccbt/proxy/auth.py b/ccbt/proxy/auth.py
index 6f32d3b..536b099 100644
--- a/ccbt/proxy/auth.py
+++ b/ccbt/proxy/auth.py
@@ -8,6 +8,7 @@
import base64
import logging
from pathlib import Path
+from typing import Optional
try:
from cryptography.fernet import Fernet
@@ -91,7 +92,7 @@ class CredentialStore:
Uses Fernet symmetric encryption to store credentials encrypted.
"""
- def __init__(self, config_dir: Path | None = None):
+ def __init__(self, config_dir: Optional[Path] = None):
"""Initialize credential store.
Args:
@@ -213,7 +214,7 @@ def decrypt_credentials(self, encrypted: str) -> tuple[str, str]:
class ProxyAuth:
"""Handles proxy authentication challenges and credential management."""
- def __init__(self, credential_store: CredentialStore | None = None):
+ def __init__(self, credential_store: Optional[CredentialStore] = None):
"""Initialize proxy authentication handler.
Args:
@@ -227,9 +228,9 @@ def __init__(self, credential_store: CredentialStore | None = None):
async def handle_challenge(
self,
challenge_header: str,
- username: str | None = None,
- password: str | None = None,
- ) -> str | None:
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ ) -> Optional[str]:
"""Handle Proxy-Authenticate challenge.
Args:
diff --git a/ccbt/proxy/client.py b/ccbt/proxy/client.py
index ab81101..1221d71 100644
--- a/ccbt/proxy/client.py
+++ b/ccbt/proxy/client.py
@@ -8,7 +8,7 @@
import asyncio
import logging
from dataclasses import dataclass
-from typing import Any
+from typing import Any, Optional
import aiohttp
from aiohttp import ClientSession, ClientTimeout
@@ -91,8 +91,8 @@ def _build_proxy_url(
self,
proxy_host: str,
proxy_port: int,
- proxy_username: str | None = None,
- proxy_password: str | None = None,
+ proxy_username: Optional[str] = None,
+ proxy_password: Optional[str] = None,
) -> str:
"""Build proxy URL for aiohttp.
@@ -115,9 +115,9 @@ def create_proxy_connector(
proxy_host: str,
proxy_port: int,
proxy_type: str = "http",
- proxy_username: str | None = None,
- proxy_password: str | None = None,
- timeout: ClientTimeout | None = None,
+ proxy_username: Optional[str] = None,
+ proxy_password: Optional[str] = None,
+ timeout: Optional[ClientTimeout] = None,
) -> aiohttp.BaseConnector:
"""Create aiohttp ProxyConnector for proxy connections.
@@ -187,10 +187,10 @@ def create_proxy_session(
proxy_host: str,
proxy_port: int,
proxy_type: str = "http",
- proxy_username: str | None = None,
- proxy_password: str | None = None,
- timeout: ClientTimeout | None = None,
- headers: dict[str, str] | None = None,
+ proxy_username: Optional[str] = None,
+ proxy_password: Optional[str] = None,
+ timeout: Optional[ClientTimeout] = None,
+ headers: Optional[dict[str, str]] = None,
) -> ClientSession:
"""Create aiohttp ClientSession configured for proxy.
@@ -235,8 +235,8 @@ async def get_proxy_session(
proxy_host: str,
proxy_port: int,
proxy_type: str = "http",
- proxy_username: str | None = None,
- proxy_password: str | None = None,
+ proxy_username: Optional[str] = None,
+ proxy_password: Optional[str] = None,
) -> ClientSession:
"""Get or create connection pool for proxy.
@@ -277,8 +277,8 @@ async def test_connection(
proxy_host: str,
proxy_port: int,
proxy_type: str = "http",
- proxy_username: str | None = None,
- proxy_password: str | None = None,
+ proxy_username: Optional[str] = None,
+ proxy_password: Optional[str] = None,
test_url: str = "http://httpbin.org/get",
) -> bool:
"""Test proxy connection.
@@ -391,7 +391,7 @@ async def connect_via_chain(
target_host: str,
target_port: int,
proxy_chain: list[dict[str, Any]],
- timeout: float | None = None,
+ timeout: Optional[float] = None,
) -> tuple[asyncio.StreamReader, asyncio.StreamWriter]:
"""Connect to target through a chain of proxies using HTTP CONNECT.
@@ -433,8 +433,8 @@ async def connect_via_chain(
# Connect through chain
# For now, only HTTP proxies support chaining via CONNECT
# SOCKS proxies would need special handling
- reader: asyncio.StreamReader | None = None
- writer: asyncio.StreamWriter | None = None
+ reader: Optional[asyncio.StreamReader] = None
+ writer: Optional[asyncio.StreamWriter] = None
for i, proxy in enumerate(proxy_chain):
proxy_host = proxy["host"]
@@ -511,8 +511,8 @@ async def _connect_to_proxy(
self,
proxy_host: str,
proxy_port: int,
- _username: str | None,
- _password: str | None,
+ _username: Optional[str],
+ _password: Optional[str],
timeout: float,
) -> tuple[asyncio.StreamReader, asyncio.StreamWriter]:
"""Establish TCP connection to proxy server.
@@ -548,8 +548,8 @@ async def _send_connect_request(
writer: asyncio.StreamWriter,
target_host: str,
target_port: int,
- username: str | None,
- password: str | None,
+ username: Optional[str],
+ password: Optional[str],
) -> None:
"""Send HTTP CONNECT request through proxy.
@@ -576,7 +576,7 @@ async def _send_connect_request(
async def _read_connect_response(
self, reader: asyncio.StreamReader
- ) -> asyncio.StreamReader | None:
+ ) -> Optional[asyncio.StreamReader]:
"""Read and parse HTTP CONNECT response.
Args:
diff --git a/ccbt/queue/manager.py b/ccbt/queue/manager.py
index 30cfe4b..87394ae 100644
--- a/ccbt/queue/manager.py
+++ b/ccbt/queue/manager.py
@@ -7,7 +7,7 @@
import time
from collections import OrderedDict
from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.models import QueueConfig, QueueEntry, TorrentPriority
@@ -35,7 +35,7 @@ class TorrentQueueManager:
def __init__(
self,
session_manager: AsyncSessionManager,
- config: QueueConfig | None = None,
+ config: Optional[QueueConfig] = None,
):
"""Initialize queue manager.
@@ -59,8 +59,8 @@ def __init__(
self._lock = asyncio.Lock()
# Background tasks
- self._monitor_task: asyncio.Task | None = None
- self._bandwidth_task: asyncio.Task | None = None
+ self._monitor_task: Optional[asyncio.Task] = None
+ self._bandwidth_task: Optional[asyncio.Task] = None
# Statistics
self.stats = QueueStatistics()
@@ -107,7 +107,7 @@ async def stop(self) -> None:
async def add_torrent(
self,
info_hash: bytes,
- priority: TorrentPriority | None = None,
+ priority: Optional[TorrentPriority] = None,
auto_start: bool = True,
resume: bool = False,
) -> QueueEntry:
@@ -518,7 +518,9 @@ async def get_queue_status(self) -> dict[str, Any]:
"entries": entries,
}
- async def get_torrent_queue_state(self, info_hash: bytes) -> dict[str, Any] | None:
+ async def get_torrent_queue_state(
+ self, info_hash: bytes
+ ) -> Optional[dict[str, Any]]:
"""Get queue state for a specific torrent.
Args:
@@ -696,7 +698,7 @@ async def _try_start_torrent(
async def _try_start_next_torrent(self) -> None:
"""Try to start the next queued torrent."""
- info_hash: bytes | None = None
+ info_hash: Optional[bytes] = None
async with self._lock:
# Find first queued torrent (already sorted by priority)
for info_hash_key, entry in self.queue.items():
diff --git a/ccbt/security/anomaly_detector.py b/ccbt/security/anomaly_detector.py
index 877ddd0..d1ac920 100644
--- a/ccbt/security/anomaly_detector.py
+++ b/ccbt/security/anomaly_detector.py
@@ -17,7 +17,7 @@
from collections import defaultdict, deque
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any, TypedDict
+from typing import Any, Optional, TypedDict
from ccbt.utils.events import Event, EventType, emit_event
@@ -614,7 +614,7 @@ def get_anomaly_statistics(self) -> dict[str, Any]:
/ max(1, self.stats["total_anomalies"]),
}
- def get_behavioral_pattern(self, peer_id: str) -> BehavioralPattern | None:
+ def get_behavioral_pattern(self, peer_id: str) -> Optional[BehavioralPattern]:
"""Get behavioral pattern for a peer."""
return self.behavioral_patterns.get(peer_id)
@@ -622,7 +622,7 @@ def get_statistical_baseline(
self,
peer_id: str,
metric_name: str,
- ) -> dict[str, float] | None:
+ ) -> Optional[dict[str, float]]:
"""Get statistical baseline for a peer metric."""
return self.statistical_baselines.get(peer_id, {}).get(metric_name)
diff --git a/ccbt/security/blacklist_updater.py b/ccbt/security/blacklist_updater.py
index 666f7fa..1b778c5 100644
--- a/ccbt/security/blacklist_updater.py
+++ b/ccbt/security/blacklist_updater.py
@@ -13,7 +13,7 @@
import json
import logging
from io import StringIO
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
import aiohttp
@@ -30,8 +30,8 @@ def __init__(
self,
security_manager: SecurityManager,
update_interval: float = 3600.0,
- sources: list[str] | None = None,
- local_source_config: Any | None = None,
+ sources: Optional[list[str]] = None,
+ local_source_config: Optional[Any] = None,
):
"""Initialize blacklist updater.
@@ -45,8 +45,8 @@ def __init__(
self.security_manager = security_manager
self.update_interval = update_interval
self.sources = sources or []
- self._update_task: asyncio.Task | None = None
- self._local_source: Any | None = None
+ self._update_task: Optional[asyncio.Task] = None
+ self._local_source: Optional[Any] = None
self._local_source_config = local_source_config
async def update_from_source(self, source_url: str) -> int:
diff --git a/ccbt/security/ciphers/aes.py b/ccbt/security/ciphers/aes.py
index 1ecbb85..a442540 100644
--- a/ccbt/security/ciphers/aes.py
+++ b/ccbt/security/ciphers/aes.py
@@ -9,6 +9,7 @@
from __future__ import annotations
import secrets
+from typing import Optional
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
@@ -19,7 +20,7 @@
class AESCipher(CipherSuite):
"""AES cipher implementation using CFB mode."""
- def __init__(self, key: bytes, iv: bytes | None = None):
+ def __init__(self, key: bytes, iv: Optional[bytes] = None):
"""Initialize AES cipher.
Args:
diff --git a/ccbt/security/ciphers/chacha20.py b/ccbt/security/ciphers/chacha20.py
index 78c8626..3694c47 100644
--- a/ccbt/security/ciphers/chacha20.py
+++ b/ccbt/security/ciphers/chacha20.py
@@ -10,6 +10,7 @@
from __future__ import annotations
import secrets
+from typing import Optional
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms
@@ -20,7 +21,7 @@
class ChaCha20Cipher(CipherSuite):
"""ChaCha20 stream cipher implementation."""
- def __init__(self, key: bytes, nonce: bytes | None = None):
+ def __init__(self, key: bytes, nonce: Optional[bytes] = None):
"""Initialize ChaCha20 cipher.
Args:
diff --git a/ccbt/security/dh_exchange.py b/ccbt/security/dh_exchange.py
index 2acc97a..ad6eb48 100644
--- a/ccbt/security/dh_exchange.py
+++ b/ccbt/security/dh_exchange.py
@@ -9,7 +9,7 @@
from __future__ import annotations
import hashlib
-from typing import NamedTuple
+from typing import NamedTuple, Optional
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import dh
@@ -109,7 +109,7 @@ def derive_encryption_key(
self,
shared_secret: bytes,
info_hash: bytes,
- pad: bytes | None = None,
+ pad: Optional[bytes] = None,
) -> bytes:
"""Derive encryption key from shared secret.
diff --git a/ccbt/security/ed25519_handshake.py b/ccbt/security/ed25519_handshake.py
index f25641b..fa2ab36 100644
--- a/ccbt/security/ed25519_handshake.py
+++ b/ccbt/security/ed25519_handshake.py
@@ -8,7 +8,7 @@
from __future__ import annotations
import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.utils.logging_config import get_logger
@@ -81,7 +81,7 @@ def verify_peer_handshake(
peer_id: bytes,
peer_public_key: bytes,
peer_signature: bytes,
- timestamp: int | None = None,
+ timestamp: Optional[int] = None,
) -> bool:
"""Verify peer's handshake signature.
@@ -149,7 +149,7 @@ def create_handshake_extension(
def parse_handshake_extension(
self, extension_data: dict[str, Any]
- ) -> tuple[bytes, bytes, int] | None:
+ ) -> Optional[tuple[bytes, bytes, int]]:
"""Parse handshake extension data.
Args:
diff --git a/ccbt/security/encryption.py b/ccbt/security/encryption.py
index bf9f14f..713416d 100644
--- a/ccbt/security/encryption.py
+++ b/ccbt/security/encryption.py
@@ -17,7 +17,7 @@
import time
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -125,7 +125,7 @@ class EncryptionSession:
last_activity: float = 0.0
# MSE handshake state (for integration with MSEHandshake)
mse_handshake: Any = None # Will store MSEHandshake instance if needed
- info_hash: bytes | None = None # Torrent info hash for key derivation
+ info_hash: Optional[bytes] = None # Torrent info hash for key derivation
class EncryptionManager:
@@ -133,7 +133,7 @@ class EncryptionManager:
def __init__(
self,
- config: EncryptionConfig | None = None,
+ config: Optional[EncryptionConfig] = None,
security_config: Any = None,
):
"""Initialize encryption manager.
@@ -403,7 +403,7 @@ def is_peer_encrypted(self, peer_id: str) -> bool:
session = self.encryption_sessions[peer_id]
return session.handshake_complete
- def get_encryption_type(self, peer_id: str) -> EncryptionType | None:
+ def get_encryption_type(self, peer_id: str) -> Optional[EncryptionType]:
"""Get encryption type for a peer."""
if peer_id not in self.encryption_sessions:
return None
@@ -423,7 +423,7 @@ def get_encryption_statistics(self) -> dict[str, Any]:
/ max(1, self.stats["bytes_encrypted"] + self.stats["bytes_decrypted"]),
}
- def get_peer_encryption_info(self, peer_id: str) -> dict[str, Any] | None:
+ def get_peer_encryption_info(self, peer_id: str) -> Optional[dict[str, Any]]:
"""Get encryption information for a peer."""
if peer_id not in self.encryption_sessions:
return None
@@ -485,7 +485,7 @@ async def _create_encryption_session(
)
def _select_encryption_type(
- self, peer_capabilities: list[EncryptionType] | None = None
+ self, peer_capabilities: Optional[list[EncryptionType]] = None
) -> EncryptionType:
"""Select encryption type based on configuration and peer capabilities.
diff --git a/ccbt/security/ip_filter.py b/ccbt/security/ip_filter.py
index 1353055..e407f34 100644
--- a/ccbt/security/ip_filter.py
+++ b/ccbt/security/ip_filter.py
@@ -23,7 +23,7 @@
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional, Union
import aiofiles
import aiohttp
@@ -46,7 +46,7 @@ class FilterMode(Enum):
class IPFilterRule:
"""IP filter rule definition."""
- network: IPv4Network | IPv6Network
+ network: Union[IPv4Network, IPv6Network]
mode: FilterMode
priority: int = 0 # Higher priority wins (allow > block on tie)
source: str = "manual" # Source of rule (file path, URL, or "manual")
@@ -92,8 +92,8 @@ def __init__(self, enabled: bool = False, mode: FilterMode = FilterMode.BLOCK):
self.mode: FilterMode = mode
# Auto-update task
- self._update_task: asyncio.Task | None = None
- self._last_update: float | None = None
+ self._update_task: Optional[asyncio.Task] = None
+ self._last_update: Optional[float] = None
logger.debug("IPFilter initialized: enabled=%s, mode=%s", enabled, mode.value)
@@ -137,7 +137,7 @@ def is_blocked(self, ip: str) -> bool:
return True
def _is_ip_in_ranges(
- self, ip: ipaddress.IPv4Address | ipaddress.IPv6Address
+ self, ip: Union[ipaddress.IPv4Address, ipaddress.IPv6Address]
) -> bool:
"""Check if IP address is in any filter range.
@@ -190,7 +190,7 @@ def _is_ipv6_in_ranges(self, ip: ipaddress.IPv6Address) -> bool:
def add_rule(
self,
ip_range: str,
- mode: FilterMode | None = None,
+ mode: Optional[FilterMode] = None,
priority: int = 0,
source: str = "manual",
) -> bool:
@@ -306,7 +306,7 @@ def get_rules(self) -> list[IPFilterRule]:
"""
return self.rules.copy()
- def get_filter_statistics(self) -> dict[str, int | float | None]:
+ def get_filter_statistics(self) -> dict[str, Optional[int | float]]:
"""Get filter statistics.
Returns:
@@ -403,8 +403,8 @@ def _parse_ip_range(self, ip_range: str) -> tuple[IPv4Network | IPv6Network, boo
async def load_from_file(
self,
file_path: str,
- mode: FilterMode | None = None,
- source: str | None = None,
+ mode: Optional[FilterMode] = None,
+ source: Optional[str] = None,
) -> tuple[int, int]:
"""Load filter rules from a file.
@@ -491,7 +491,7 @@ async def _read_compressed_file(self, file_path: Path):
async def _parse_and_add_line(
self,
line: str,
- mode: FilterMode | None,
+ mode: Optional[FilterMode],
source: str,
) -> bool:
"""Parse a single line and add rule if valid."""
@@ -522,10 +522,10 @@ async def _parse_and_add_line(
async def load_from_url(
self,
url: str,
- cache_dir: str | Path | None = None,
- mode: FilterMode | None = None,
+ cache_dir: Optional[str | Path] = None,
+ mode: Optional[FilterMode] = None,
update_interval: float = 86400.0,
- ) -> tuple[bool, int, str | None]:
+ ) -> tuple[bool, int, Optional[str]]:
"""Load filter rules from a URL.
Args:
@@ -535,7 +535,7 @@ async def load_from_url(
update_interval: Minimum seconds between updates (default 24h)
Returns:
- Tuple of (success: bool, rules_loaded: int, error_message: str | None)
+ Tuple of (success: bool, rules_loaded: int, error_message: Optional[str])
"""
source = f"url:{url}"
@@ -642,7 +642,7 @@ async def load_from_url(
async def update_filter_lists(
self,
urls: list[str],
- cache_dir: str | Path,
+ cache_dir: Union[str, Path],
update_interval: float = 86400.0,
) -> dict[str, tuple[bool, int]]:
"""Update filter lists from URLs.
@@ -674,7 +674,7 @@ async def update_filter_lists(
async def start_auto_update(
self,
urls: list[str],
- cache_dir: str | Path,
+ cache_dir: Union[str, Path],
update_interval: float = 86400.0,
) -> None:
"""Start background task to auto-update filter lists.
diff --git a/ccbt/security/key_manager.py b/ccbt/security/key_manager.py
index a2a9c0f..06772fc 100644
--- a/ccbt/security/key_manager.py
+++ b/ccbt/security/key_manager.py
@@ -9,7 +9,7 @@
from __future__ import annotations
from pathlib import Path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
try:
from cryptography.fernet import Fernet
@@ -53,7 +53,7 @@ class Ed25519KeyManager:
authentication. Private keys are encrypted using Fernet before storage.
"""
- def __init__(self, key_dir: Path | str | None = None):
+ def __init__(self, key_dir: Optional[Path | str] = None):
"""Initialize key manager.
Args:
@@ -87,8 +87,8 @@ def __init__(self, key_dir: Path | str | None = None):
self.cipher = self._get_or_create_encryption_key()
# Key pair (loaded on demand)
- self._private_key: Ed25519PrivateKey | None = None
- self._public_key: Ed25519PublicKey | None = None
+ self._private_key: Optional[Ed25519PrivateKey] = None
+ self._public_key: Optional[Ed25519PublicKey] = None
def _get_or_create_encryption_key(self) -> Fernet:
"""Get or create encryption key for private key storage.
@@ -161,8 +161,8 @@ def generate_keypair(self) -> tuple[Ed25519PrivateKey, Ed25519PublicKey]:
def save_keypair(
self,
- private_key: Ed25519PrivateKey | None = None,
- public_key: Ed25519PublicKey | None = None,
+ private_key: Optional[Ed25519PrivateKey] = None,
+ public_key: Optional[Ed25519PublicKey] = None,
) -> None:
"""Save key pair to secure storage.
diff --git a/ccbt/security/local_blacklist_source.py b/ccbt/security/local_blacklist_source.py
index faefed4..0467405 100644
--- a/ccbt/security/local_blacklist_source.py
+++ b/ccbt/security/local_blacklist_source.py
@@ -13,7 +13,7 @@
import time
from collections import deque
from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING: # pragma: no cover - type checking only, not executed at runtime
from ccbt.security.security_manager import SecurityManager
@@ -59,8 +59,8 @@ def __init__(
security_manager: SecurityManager,
evaluation_interval: float = 300.0, # 5 minutes
metric_window: float = 3600.0, # 1 hour
- thresholds: dict[str, Any] | None = None,
- expiration_hours: float | None = 24.0,
+ thresholds: Optional[dict[str, Any]] = None,
+ expiration_hours: Optional[float] = 24.0,
min_observations: int = 3,
):
"""Initialize local blacklist source.
@@ -96,7 +96,7 @@ def __init__(
self.metric_entries: deque[PeerMetricEntry] = deque(maxlen=100000)
# Background task
- self._evaluation_task: asyncio.Task | None = None
+ self._evaluation_task: Optional[asyncio.Task] = None
self._running = False
async def start_evaluation(self) -> None:
@@ -150,7 +150,7 @@ async def record_metric(
ip: str,
metric_type: str,
value: float,
- metadata: dict[str, Any] | None = None,
+ metadata: Optional[dict[str, Any]] = None,
) -> None:
"""Record a metric for an IP.
diff --git a/ccbt/security/messaging.py b/ccbt/security/messaging.py
index b66e013..22bf486 100644
--- a/ccbt/security/messaging.py
+++ b/ccbt/security/messaging.py
@@ -12,7 +12,7 @@
import secrets
import time
from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.utils.logging_config import get_logger
@@ -329,7 +329,7 @@ def encrypt_message(
raise SecureMessageError(msg) from e
def decrypt_message(
- self, secure_message: SecureMessage, sender_public_key: bytes | None = None
+ self, secure_message: SecureMessage, sender_public_key: Optional[bytes] = None
) -> bytes:
"""Decrypt and verify a message.
diff --git a/ccbt/security/mse_handshake.py b/ccbt/security/mse_handshake.py
index b0cdff4..3b07324 100644
--- a/ccbt/security/mse_handshake.py
+++ b/ccbt/security/mse_handshake.py
@@ -11,7 +11,7 @@
import asyncio
import struct
from enum import IntEnum
-from typing import TYPE_CHECKING, NamedTuple
+from typing import TYPE_CHECKING, NamedTuple, Optional
from ccbt.security.ciphers.aes import AESCipher
from ccbt.security.ciphers.chacha20 import ChaCha20Cipher
@@ -42,8 +42,8 @@ class MSEHandshakeResult(NamedTuple):
"""Result of MSE handshake."""
success: bool
- cipher: CipherSuite | None
- error: str | None = None
+ cipher: Optional[CipherSuite]
+ error: Optional[str] = None
class MSEHandshake:
@@ -58,7 +58,7 @@ def __init__(
self,
dh_key_size: int = 768,
prefer_rc4: bool = True,
- allowed_ciphers: list[CipherType] | None = None,
+ allowed_ciphers: Optional[list[CipherType]] = None,
):
"""Initialize MSE handshake handler.
@@ -332,7 +332,7 @@ def _encode_message(self, msg_type: MSEHandshakeType, payload: bytes) -> bytes:
length = len(payload) + 1 # +1 for message type byte
return struct.pack("!IB", length, int(msg_type)) + payload
- def _decode_message(self, data: bytes) -> tuple[MSEHandshakeType, bytes] | None:
+ def _decode_message(self, data: bytes) -> Optional[tuple[MSEHandshakeType, bytes]]:
"""Decode MSE handshake message.
Args:
@@ -354,7 +354,7 @@ def _decode_message(self, data: bytes) -> tuple[MSEHandshakeType, bytes] | None:
return (msg_type, payload)
- async def _read_message(self, reader: asyncio.StreamReader) -> bytes | None:
+ async def _read_message(self, reader: asyncio.StreamReader) -> Optional[bytes]:
"""Read a complete MSE handshake message from stream.
Args:
diff --git a/ccbt/security/peer_validator.py b/ccbt/security/peer_validator.py
index e2a4444..222f4e7 100644
--- a/ccbt/security/peer_validator.py
+++ b/ccbt/security/peer_validator.py
@@ -14,7 +14,7 @@
import time
from dataclasses import dataclass
from enum import Enum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING: # pragma: no cover - type checking only, not executed at runtime
from ccbt.models import PeerInfo
@@ -226,7 +226,7 @@ async def assess_peer_quality(
return quality_score, assessment_details
- def get_validation_metrics(self, peer_id: str) -> ValidationMetrics | None:
+ def get_validation_metrics(self, peer_id: str) -> Optional[ValidationMetrics]:
"""Get validation metrics for a peer."""
return self.validation_metrics.get(peer_id)
diff --git a/ccbt/security/security_manager.py b/ccbt/security/security_manager.py
index 53971dd..093c882 100644
--- a/ccbt/security/security_manager.py
+++ b/ccbt/security/security_manager.py
@@ -20,7 +20,7 @@
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
import aiofiles
@@ -110,7 +110,7 @@ class BlacklistEntry:
ip: str
reason: str
added_at: float
- expires_at: float | None = None # None = permanent
+ expires_at: Optional[float] = None # None = permanent
source: str = "manual" # "manual", "auto", "reputation", "violation"
def is_expired(self) -> bool:
@@ -141,12 +141,12 @@ def __init__(self):
self.peer_reputations: dict[str, PeerReputation] = {}
self.blacklist_entries: dict[str, BlacklistEntry] = {}
self.ip_whitelist: set[str] = set()
- self.ip_filter: IPFilter | None = None
+ self.ip_filter: Optional[IPFilter] = None
self.security_events: deque = deque(maxlen=10000)
- self.blacklist_file: Path | None = None
- self.blacklist_updater: Any | None = None
- self._cleanup_task: asyncio.Task | None = None
- self._default_expiration_hours: float | None = None
+ self.blacklist_file: Optional[Path] = None
+ self.blacklist_updater: Optional[Any] = None
+ self._cleanup_task: Optional[asyncio.Task] = None
+ self._default_expiration_hours: Optional[float] = None
# Rate limiting
self.connection_rates: dict[str, deque] = defaultdict(lambda: deque())
@@ -285,7 +285,7 @@ async def report_violation(
ip: str,
violation: ThreatType,
description: str,
- metadata: dict[str, Any] | None = None,
+ metadata: Optional[dict[str, Any]] = None,
) -> None:
"""Report a security violation."""
reputation = self._get_peer_reputation(peer_id, ip)
@@ -327,7 +327,7 @@ def add_to_blacklist(
self,
ip: str,
reason: str = "",
- expires_in: float | None = None,
+ expires_in: Optional[float] = None,
source: str = "manual",
) -> None:
"""Add IP to blacklist.
@@ -481,7 +481,7 @@ def ip_blacklist(self) -> set[str]:
if entry.expires_at is None or entry.expires_at > current_time
}
- async def save_blacklist(self, blacklist_file: Path | None = None) -> None:
+ async def save_blacklist(self, blacklist_file: Optional[Path] = None) -> None:
"""Save blacklist to persistent storage.
Args:
@@ -543,7 +543,7 @@ async def save_blacklist(self, blacklist_file: Path | None = None) -> None:
with contextlib.suppress(Exception):
temp_file.unlink()
- async def load_blacklist(self, blacklist_file: Path | None = None) -> None:
+ async def load_blacklist(self, blacklist_file: Optional[Path] = None) -> None:
"""Load blacklist from persistent storage.
Args:
@@ -613,7 +613,7 @@ async def load_blacklist(self, blacklist_file: Path | None = None) -> None:
except Exception as e:
logger.warning("Failed to load blacklist from %s: %s", blacklist_file, e)
- def get_peer_reputation(self, peer_id: str, _ip: str) -> PeerReputation | None:
+ def get_peer_reputation(self, peer_id: str, _ip: str) -> Optional[PeerReputation]:
"""Get peer reputation."""
return self.peer_reputations.get(peer_id)
@@ -708,7 +708,7 @@ async def _log_security_event(
ip: str,
severity: SecurityLevel,
description: str,
- metadata: dict[str, Any] | None = None,
+ metadata: Optional[dict[str, Any]] = None,
) -> None:
"""Log a security event."""
event = SecurityEvent(
diff --git a/ccbt/security/ssl_context.py b/ccbt/security/ssl_context.py
index 7243b0c..c80d382 100644
--- a/ccbt/security/ssl_context.py
+++ b/ccbt/security/ssl_context.py
@@ -10,7 +10,7 @@
import logging
import ssl
from pathlib import Path
-from typing import Any
+from typing import Any, Optional, Union
from ccbt.config.config import get_config
@@ -226,7 +226,7 @@ def _get_protocol_version(self, version_str: str) -> ssl.TLSVersion:
return version_map[version_str]
- def _load_ca_certificates(self, path: str | Path) -> tuple[list[str], int]:
+ def _load_ca_certificates(self, path: Union[str, Path]) -> tuple[list[str], int]:
"""Load CA certificates from file or directory.
Args:
@@ -263,8 +263,8 @@ def _load_ca_certificates(self, path: str | Path) -> tuple[list[str], int]:
return cert_paths, len(cert_paths)
def _validate_certificate_paths(
- self, cert_path: str, key_path: str | None = None
- ) -> tuple[Path, Path | None]:
+ self, cert_path: str, key_path: Optional[str] = None
+ ) -> tuple[Path, Optional[Path]]:
"""Validate certificate file paths.
Args:
@@ -335,7 +335,7 @@ def validate_tracker_certificate(self, cert: dict[str, Any], hostname: str) -> b
)
return False
- def _extract_common_name(self, cert: dict[str, Any]) -> str | None:
+ def _extract_common_name(self, cert: dict[str, Any]) -> Optional[str]:
"""Extract common name from certificate.
Args:
@@ -374,7 +374,7 @@ def _extract_sans(self, cert: dict[str, Any]) -> list[str]:
sans.append(value)
return sans
- def _match_hostname(self, hostname: str, pattern: str | None) -> bool:
+ def _match_hostname(self, hostname: str, pattern: Optional[str]) -> bool:
"""Match hostname against certificate pattern.
Supports wildcard certificates (e.g., *.example.com).
@@ -425,7 +425,7 @@ def pin_certificate(self, hostname: str, fingerprint: str) -> None:
self.pinned_certs[hostname.lower()] = fingerprint
self.logger.info("Pinned certificate for %s: %s", hostname, fingerprint)
- def verify_pin(self, hostname: str, cert: bytes | dict[str, Any]) -> bool:
+ def verify_pin(self, hostname: str, cert: Union[bytes, dict[str, Any]]) -> bool:
"""Verify certificate matches pinned fingerprint.
Args:
diff --git a/ccbt/security/tls_certificates.py b/ccbt/security/tls_certificates.py
index 2a504d1..1ced61d 100644
--- a/ccbt/security/tls_certificates.py
+++ b/ccbt/security/tls_certificates.py
@@ -11,7 +11,7 @@
import ipaddress
from datetime import datetime, timedelta, timezone
from pathlib import Path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
from ccbt.utils.logging_config import get_logger
@@ -54,7 +54,7 @@ class TLSCertificateError(Exception):
class TLSCertificateManager:
"""Manages Ed25519-based TLS certificates."""
- def __init__(self, cert_dir: Path | str | None = None):
+ def __init__(self, cert_dir: Optional[Path | str] = None):
"""Initialize certificate manager.
Args:
@@ -203,7 +203,7 @@ def save_certificate(
def load_certificate(
self,
- ) -> tuple[x509.Certificate, Ed25519PrivateKey] | None:
+ ) -> Optional[tuple[x509.Certificate, Ed25519PrivateKey]]:
"""Load certificate and private key from files.
Returns:
diff --git a/ccbt/security/xet_allowlist.py b/ccbt/security/xet_allowlist.py
index 252208c..5c0f354 100644
--- a/ccbt/security/xet_allowlist.py
+++ b/ccbt/security/xet_allowlist.py
@@ -10,7 +10,7 @@
import json
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
@@ -38,9 +38,9 @@ class XetAllowlist:
def __init__(
self,
- allowlist_path: str | Path,
- encryption_key: bytes | None = None,
- key_manager: Ed25519KeyManager | None = None,
+ allowlist_path: Union[str, Path],
+ encryption_key: Optional[bytes] = None,
+ key_manager: Optional[Ed25519KeyManager] = None,
) -> None:
"""Initialize allowlist manager.
@@ -153,9 +153,9 @@ async def save(self) -> None:
def add_peer(
self,
peer_id: str,
- public_key: bytes | None = None,
- metadata: dict[str, Any] | None = None,
- alias: str | None = None,
+ public_key: Optional[bytes] = None,
+ metadata: Optional[dict[str, Any]] = None,
+ alias: Optional[str] = None,
) -> None:
"""Add peer to allowlist.
@@ -226,7 +226,7 @@ def set_alias(self, peer_id: str, alias: str) -> bool:
self.logger.info("Set alias '%s' for peer %s", alias, peer_id)
return True
- def get_alias(self, peer_id: str) -> str | None:
+ def get_alias(self, peer_id: str) -> Optional[str]:
"""Get alias for a peer.
Args:
@@ -376,7 +376,7 @@ def get_peers(self) -> list[str]:
return list(self._allowlist.keys())
- def get_peer_info(self, peer_id: str) -> dict[str, Any] | None:
+ def get_peer_info(self, peer_id: str) -> Optional[dict[str, Any]]:
"""Get information about a peer.
Args:
diff --git a/ccbt/services/base.py b/ccbt/services/base.py
index 8ca602a..632a976 100644
--- a/ccbt/services/base.py
+++ b/ccbt/services/base.py
@@ -13,7 +13,7 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.utils.exceptions import CCBTError
from ccbt.utils.logging_config import get_logger
@@ -334,11 +334,11 @@ async def stop_service(self, service_name: str) -> None:
msg = f"Failed to stop service '{service_name}': {e}"
raise ServiceError(msg) from e
- def get_service(self, service_name: str) -> Service | None:
+ def get_service(self, service_name: str) -> Optional[Service]:
"""Get a service by name."""
return self.services.get(service_name)
- def get_service_info(self, service_name: str) -> ServiceInfo | None:
+ def get_service_info(self, service_name: str) -> Optional[ServiceInfo]:
"""Get service information."""
return self.service_info.get(service_name)
@@ -375,7 +375,7 @@ async def shutdown(self) -> None:
# Global service manager instance
-_service_manager: ServiceManager | None = None
+_service_manager: Optional[ServiceManager] = None
def get_service_manager() -> ServiceManager:
diff --git a/ccbt/services/peer_service.py b/ccbt/services/peer_service.py
index 264631a..552a599 100644
--- a/ccbt/services/peer_service.py
+++ b/ccbt/services/peer_service.py
@@ -11,7 +11,7 @@
import asyncio
import time
from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.services.base import HealthCheck, Service
from ccbt.utils.logging_config import LoggingContext
@@ -58,7 +58,7 @@ def __init__(self, max_peers: int = 200, connection_timeout: float = 30.0):
self.total_pieces_uploaded = 0
# Background task reference
- self._monitor_task: asyncio.Task[None] | None = None
+ self._monitor_task: Optional[asyncio.Task[None]] = None
async def start(self) -> None:
"""Start the peer service."""
@@ -248,7 +248,7 @@ async def disconnect_peer(self, peer_id: str) -> None:
except Exception:
self.logger.exception("Error disconnecting peer %s", peer_id)
- async def get_peer(self, peer_id: str) -> PeerConnection | None:
+ async def get_peer(self, peer_id: str) -> Optional[PeerConnection]:
"""Get peer connection by ID."""
return self.peers.get(peer_id)
diff --git a/ccbt/services/storage_service.py b/ccbt/services/storage_service.py
index 316ae55..d5d8317 100644
--- a/ccbt/services/storage_service.py
+++ b/ccbt/services/storage_service.py
@@ -12,7 +12,7 @@
import time
from dataclasses import dataclass
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
from ccbt.config.config import get_config
from ccbt.services.base import HealthCheck, Service
@@ -30,7 +30,7 @@ class StorageOperation:
timestamp: float
duration: float
success: bool
- data: bytes | None = None # Actual data bytes for write operations
+ data: Optional[bytes] = None # Actual data bytes for write operations
@dataclass
@@ -88,7 +88,7 @@ def __init__(self, max_concurrent_operations: int = 10, cache_size_mb: int = 256
)
# Disk I/O manager for chunked writes
- self.disk_io: DiskIOManager | None = None
+ self.disk_io: Optional[DiskIOManager] = None
# Flag to mark queue as closed
self._queue_closed = False
@@ -501,7 +501,7 @@ async def write_file(self, file_path: str, data: bytes) -> bool:
return True
- async def read_file(self, file_path: str, size: int) -> bytes | None:
+ async def read_file(self, file_path: str, size: int) -> Optional[bytes]:
"""Read data from a file.
Args:
@@ -569,7 +569,7 @@ async def delete_file(self, file_path: str) -> bool:
return True
- async def get_file_info(self, file_path: str) -> FileInfo | None:
+ async def get_file_info(self, file_path: str) -> Optional[FileInfo]:
"""Get file information."""
return self.files.get(file_path)
diff --git a/ccbt/services/tracker_service.py b/ccbt/services/tracker_service.py
index bc1b91e..7a58ce7 100644
--- a/ccbt/services/tracker_service.py
+++ b/ccbt/services/tracker_service.py
@@ -11,7 +11,7 @@
import asyncio
import time
from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.services.base import HealthCheck, Service
from ccbt.utils.logging_config import LoggingContext
@@ -410,6 +410,6 @@ async def get_healthy_trackers(self) -> list[str]:
"""Get list of healthy trackers."""
return [url for url, conn in self.trackers.items() if conn.is_healthy]
- async def get_tracker_info(self, url: str) -> TrackerConnection | None:
+ async def get_tracker_info(self, url: str) -> Optional[TrackerConnection]:
"""Get tracker connection info."""
return self.trackers.get(url)
diff --git a/ccbt/session/adapters.py b/ccbt/session/adapters.py
index ad9a5ef..6c011be 100644
--- a/ccbt/session/adapters.py
+++ b/ccbt/session/adapters.py
@@ -6,7 +6,7 @@
from __future__ import annotations
-from typing import Any, Callable
+from typing import Any, Callable, Optional
from ccbt.session.types import DHTClientProtocol, TrackerClientProtocol
@@ -21,7 +21,7 @@ def __init__(self, dht_client: Any) -> None:
def add_peer_callback(
self,
callback: Callable[[list[tuple[str, int]]], None],
- info_hash: bytes | None = None,
+ info_hash: Optional[bytes] = None,
) -> None:
"""Add a callback for peer discovery events.
@@ -85,7 +85,7 @@ async def announce(
port: int,
uploaded: int = 0,
downloaded: int = 0,
- left: int | None = None,
+ left: Optional[int] = None,
event: str = "started",
) -> Any:
"""Announce to the tracker.
diff --git a/ccbt/session/announce.py b/ccbt/session/announce.py
index 052eda5..df64fc3 100644
--- a/ccbt/session/announce.py
+++ b/ccbt/session/announce.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import asyncio
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Union
from ccbt.session.models import SessionContext
@@ -209,7 +209,7 @@ async def announce_initial(self) -> list[TrackerResponse]:
)
return []
- def _prepare_torrent_dict(self, td: dict[str, Any] | Any) -> dict[str, Any]:
+ def _prepare_torrent_dict(self, td: Union[dict[str, Any], Any]) -> dict[str, Any]:
"""Normalize torrent_data to a dict that tracker client expects."""
if isinstance(td, dict):
result = dict(td)
diff --git a/ccbt/session/checkpoint_operations.py b/ccbt/session/checkpoint_operations.py
index 085a4c0..de90c8a 100644
--- a/ccbt/session/checkpoint_operations.py
+++ b/ccbt/session/checkpoint_operations.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
from ccbt.models import PieceState, TorrentCheckpoint
from ccbt.storage.checkpoint import CheckpointManager
@@ -31,7 +31,7 @@ async def resume_from_checkpoint(
self,
info_hash: bytes,
checkpoint: TorrentCheckpoint,
- torrent_path: str | None = None,
+ torrent_path: Optional[str] = None,
) -> str:
"""Resume download from checkpoint.
@@ -144,7 +144,7 @@ async def list_resumable(self) -> list[TorrentCheckpoint]:
return resumable
- async def find_by_name(self, name: str) -> TorrentCheckpoint | None:
+ async def find_by_name(self, name: str) -> Optional[TorrentCheckpoint]:
"""Find checkpoint by torrent name."""
checkpoint_manager = CheckpointManager(self.config.disk)
checkpoints = await checkpoint_manager.list_checkpoints()
@@ -166,7 +166,7 @@ async def find_by_name(self, name: str) -> TorrentCheckpoint | None:
return None
- async def get_info(self, info_hash: bytes) -> dict[str, Any] | None:
+ async def get_info(self, info_hash: bytes) -> Optional[dict[str, Any]]:
"""Get checkpoint summary information."""
checkpoint_manager = CheckpointManager(self.config.disk)
checkpoint = await checkpoint_manager.load_checkpoint(info_hash)
diff --git a/ccbt/session/checkpointing.py b/ccbt/session/checkpointing.py
index cf1f2f4..a51da23 100644
--- a/ccbt/session/checkpointing.py
+++ b/ccbt/session/checkpointing.py
@@ -5,7 +5,7 @@
import asyncio
import contextlib
import time
-from typing import TYPE_CHECKING, Any, cast
+from typing import TYPE_CHECKING, Any, Optional, cast
from ccbt.session.fast_resume import FastResumeLoader
from ccbt.session.tasks import TaskSupervisor
@@ -22,16 +22,16 @@ class CheckpointController:
def __init__(
self,
ctx: SessionContext,
- tasks: TaskSupervisor | None = None,
- checkpoint_manager: CheckpointManager | None = None,
+ tasks: Optional[TaskSupervisor] = None,
+ checkpoint_manager: Optional[CheckpointManager] = None,
) -> None:
"""Initialize the checkpoint controller with session context and optional dependencies."""
self._ctx = ctx
self._tasks = tasks or TaskSupervisor()
# Prefer provided manager, else from context
self._manager: CheckpointManager = checkpoint_manager or ctx.checkpoint_manager # type: ignore[assignment]
- self._queue: asyncio.Queue[bool] | None = None
- self._batch_task: asyncio.Task[None] | None = None
+ self._queue: Optional[asyncio.Queue[bool]] = None
+ self._batch_task: Optional[asyncio.Task[None]] = None
self._batch_interval: float = 0.0
self._batch_pieces: int = 0
# Initialize fast resume loader if enabled
@@ -679,6 +679,9 @@ async def resume_from_checkpoint(
# Restore security state if available
await self._restore_security_state(checkpoint, session)
+ # Restore rate limits if available
+ await self._restore_rate_limits(checkpoint, session)
+
# Restore session state if available
await self._restore_session_state(checkpoint, session)
@@ -1106,6 +1109,44 @@ async def _restore_security_state(
if self._ctx.logger:
self._ctx.logger.debug("Failed to restore security state: %s", e)
+ async def _restore_rate_limits(
+ self, checkpoint: TorrentCheckpoint, session: Any
+ ) -> None:
+ """Restore rate limits from checkpoint."""
+ try:
+ if not checkpoint.rate_limits:
+ return
+
+ # Get session manager
+ session_manager = getattr(session, "session_manager", None)
+ if not session_manager:
+ return
+
+ # Get info hash
+ info_hash = getattr(self._ctx.info, "info_hash", None)
+ if not info_hash:
+ return
+
+ # Convert info hash to hex string for set_rate_limits
+ info_hash_hex = info_hash.hex()
+
+ # Restore rate limits via session manager
+ if hasattr(session_manager, "set_rate_limits"):
+ down_kib = checkpoint.rate_limits.get("down_kib", 0)
+ up_kib = checkpoint.rate_limits.get("up_kib", 0)
+ await session_manager.set_rate_limits(
+ info_hash_hex, down_kib, up_kib
+ )
+ if self._ctx.logger:
+ self._ctx.logger.debug(
+ "Restored rate limits: down=%d KiB/s, up=%d KiB/s",
+ down_kib,
+ up_kib,
+ )
+ except Exception as e:
+ if self._ctx.logger:
+ self._ctx.logger.debug("Failed to restore rate limits: %s", e)
+
async def _restore_session_state(
self, checkpoint: TorrentCheckpoint, session: Any
) -> None:
diff --git a/ccbt/session/discovery.py b/ccbt/session/discovery.py
index 98baf5a..fcd87eb 100644
--- a/ccbt/session/discovery.py
+++ b/ccbt/session/discovery.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import asyncio
-from typing import TYPE_CHECKING, Awaitable, Callable
+from typing import TYPE_CHECKING, Awaitable, Callable, Optional
from ccbt.session.tasks import TaskSupervisor
@@ -20,7 +20,7 @@ class DiscoveryController:
"""Controller to orchestrate DHT/tracker/PEX peer discovery with dedup and scheduling."""
def __init__(
- self, ctx: SessionContext, tasks: TaskSupervisor | None = None
+ self, ctx: SessionContext, tasks: Optional[TaskSupervisor] = None
) -> None:
"""Initialize the discovery controller with session context and optional task supervisor."""
self._ctx = ctx
diff --git a/ccbt/session/download_manager.py b/ccbt/session/download_manager.py
index d0ce5f3..a1f7425 100644
--- a/ccbt/session/download_manager.py
+++ b/ccbt/session/download_manager.py
@@ -11,7 +11,7 @@
import time
import typing
from collections import deque
-from typing import Any, Callable
+from typing import Any, Callable, Optional, Union
from ccbt.config.config import get_config
from ccbt.core.magnet import (
@@ -29,10 +29,10 @@ class AsyncDownloadManager:
def __init__(
self,
- torrent_data: dict[str, Any] | Any,
+ torrent_data: Union[dict[str, Any], Any],
output_dir: str = ".",
- peer_id: bytes | None = None,
- security_manager: Any | None = None,
+ peer_id: Optional[bytes] = None,
+ security_manager: Optional[Any] = None,
):
"""Initialize async download manager."""
# Normalize torrent_data to dict shape expected by piece manager
@@ -102,11 +102,11 @@ def __init__(
self.piece_manager = None
else:
self._init_error = None
- self.peer_manager: Any | None = None
+ self.peer_manager: Optional[Any] = None
# State
self.download_complete = False
- self.start_time: float | None = None
+ self.start_time: Optional[float] = None
self._background_tasks: set[asyncio.Task] = set()
self._piece_verified_background_tasks: set[asyncio.Task[None]] = set()
@@ -123,10 +123,10 @@ def __init__(
self._upload_rate: float = 0.0
# Callbacks
- self.on_peer_connected: Callable | None = None
- self.on_peer_disconnected: Callable | None = None
- self.on_piece_completed: Callable | None = None
- self.on_download_complete: Callable | None = None
+ self.on_peer_connected: Optional[Callable] = None
+ self.on_peer_disconnected: Optional[Callable] = None
+ self.on_piece_completed: Optional[Callable] = None
+ self.on_download_complete: Optional[Callable] = None
self.logger = logging.getLogger(__name__)
@@ -162,7 +162,7 @@ async def stop(self) -> None:
self.logger.info("Async download manager stopped")
async def start_download(
- self, peers: list[dict[str, Any]], max_peers_per_torrent: int | None = None
+ self, peers: list[dict[str, Any]], max_peers_per_torrent: Optional[int] = None
) -> None:
"""Start the download process.
@@ -663,7 +663,7 @@ async def _announce_to_trackers(
async def download_torrent(
torrent_path: str, output_dir: str = "."
-) -> AsyncDownloadManager | None:
+) -> Optional[AsyncDownloadManager]:
"""Download a single torrent file (compat helper for tests)."""
import contextlib
@@ -711,7 +711,7 @@ async def monitor_progress():
async def download_magnet(
magnet_uri: str, output_dir: str = "."
-) -> AsyncDownloadManager | None:
+) -> Optional[AsyncDownloadManager]:
"""Download from a magnet link (compat helper for tests)."""
download_manager = None
tracker_clients = []
diff --git a/ccbt/session/download_startup.py b/ccbt/session/download_startup.py
index 17f5452..a5791d0 100644
--- a/ccbt/session/download_startup.py
+++ b/ccbt/session/download_startup.py
@@ -3,3 +3,9 @@
This module handles the initialization and startup sequence for torrent downloads,
including metadata retrieval, piece manager setup, and initial peer connections.
"""
+
+
+
+
+
+
diff --git a/ccbt/session/factories.py b/ccbt/session/factories.py
index 089ee10..a9bb63a 100644
--- a/ccbt/session/factories.py
+++ b/ccbt/session/factories.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import Any
+from typing import Any, Optional
from ccbt import session as _session_mod
@@ -21,7 +21,7 @@ def __init__(self, manager: Any) -> None:
self._di = manager._di
self.logger = manager.logger
- def create_security_manager(self) -> Any | None:
+ def create_security_manager(self) -> Optional[Any]:
"""Create security manager with DI fallback.
Returns:
@@ -42,7 +42,7 @@ def create_security_manager(self) -> Any | None:
except Exception:
return None
- def create_dht_client(self, bind_ip: str, bind_port: int) -> Any | None:
+ def create_dht_client(self, bind_ip: str, bind_port: int) -> Optional[Any]:
"""Create DHT client with DI fallback.
Args:
@@ -76,7 +76,7 @@ def create_dht_client(self, bind_ip: str, bind_port: int) -> Any | None:
self.logger.exception("Failed to create DHT client")
return None
- def create_nat_manager(self) -> Any | None:
+ def create_nat_manager(self) -> Optional[Any]:
"""Create NAT manager with DI fallback.
Returns:
@@ -97,7 +97,7 @@ def create_nat_manager(self) -> Any | None:
except Exception:
return None
- def create_tcp_server(self) -> Any | None:
+ def create_tcp_server(self) -> Optional[Any]:
"""Create TCP server with DI fallback.
Returns:
diff --git a/ccbt/session/fast_resume.py b/ccbt/session/fast_resume.py
index 92f4c2c..9dc63b6 100644
--- a/ccbt/session/fast_resume.py
+++ b/ccbt/session/fast_resume.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import random
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
if TYPE_CHECKING: # pragma: no cover - type checking only, not executed at runtime
from ccbt.storage.checkpoint import TorrentCheckpoint
@@ -35,7 +35,7 @@ def __init__(self, config: Any) -> None:
def validate_resume_data(
self,
resume_data: FastResumeData,
- torrent_info: TorrentInfoModel | dict[str, Any],
+ torrent_info: Union[TorrentInfoModel, dict[str, Any]],
) -> tuple[bool, list[str]]:
"""Validate resume data against torrent metadata.
@@ -141,8 +141,8 @@ def migrate_resume_data(
async def verify_integrity(
self,
resume_data: FastResumeData,
- torrent_info: TorrentInfoModel | dict[str, Any],
- file_assembler: Any | None,
+ torrent_info: Union[TorrentInfoModel, dict[str, Any]],
+ file_assembler: Optional[Any],
num_pieces_to_verify: int = 10,
) -> dict[str, Any]:
"""Verify integrity of critical pieces.
@@ -239,9 +239,9 @@ async def verify_integrity(
async def handle_corrupted_resume(
self,
- _resume_data: FastResumeData | None,
+ _resume_data: Optional[FastResumeData],
error: Exception,
- checkpoint: TorrentCheckpoint | None,
+ checkpoint: Optional[TorrentCheckpoint],
) -> dict[str, Any]:
"""Handle corrupted resume data gracefully.
diff --git a/ccbt/session/lifecycle.py b/ccbt/session/lifecycle.py
index 93a18c3..04f36c7 100644
--- a/ccbt/session/lifecycle.py
+++ b/ccbt/session/lifecycle.py
@@ -6,7 +6,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.session.tasks import TaskSupervisor
@@ -18,7 +18,7 @@ class LifecycleController:
"""Owns high-level start/pause/resume/stop sequencing for a torrent session."""
def __init__(
- self, ctx: SessionContext, tasks: TaskSupervisor | None = None
+ self, ctx: SessionContext, tasks: Optional[TaskSupervisor] = None
) -> None:
"""Initialize the lifecycle controller with session context and optional task supervisor."""
self._ctx = ctx
diff --git a/ccbt/session/manager_startup.py b/ccbt/session/manager_startup.py
index 8f3695d..d8ba2a5 100644
--- a/ccbt/session/manager_startup.py
+++ b/ccbt/session/manager_startup.py
@@ -3,3 +3,9 @@
This module handles the startup sequence for the session manager, including
component initialization, service startup, and background task coordination.
"""
+
+
+
+
+
+
diff --git a/ccbt/session/metrics_status.py b/ccbt/session/metrics_status.py
index 86fc90b..e309cc4 100644
--- a/ccbt/session/metrics_status.py
+++ b/ccbt/session/metrics_status.py
@@ -5,7 +5,7 @@
import asyncio
import contextlib
import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.session.tasks import TaskSupervisor
@@ -17,7 +17,7 @@ class MetricsAndStatus:
"""Status aggregation and metrics emission helper for session/manager."""
def __init__(
- self, ctx: SessionContext, tasks: TaskSupervisor | None = None
+ self, ctx: SessionContext, tasks: Optional[TaskSupervisor] = None
) -> None:
"""Initialize the metrics and status helper with session context and optional task supervisor."""
self._ctx = ctx
diff --git a/ccbt/session/models.py b/ccbt/session/models.py
index 103bdc8..13d13ec 100644
--- a/ccbt/session/models.py
+++ b/ccbt/session/models.py
@@ -8,7 +8,7 @@
from dataclasses import dataclass
from enum import Enum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from pathlib import Path
@@ -35,14 +35,14 @@ class SessionContext:
output_dir: Path
# Optional references populated during lifecycle
- info: Any | None = None # TorrentSessionInfo
- session_manager: Any | None = None
- logger: Any | None = None
-
- piece_manager: Any | None = None
- peer_manager: Any | None = None
- tracker: Any | None = None
- dht_client: Any | None = None
- checkpoint_manager: Any | None = None
- download_manager: Any | None = None
- file_selection_manager: Any | None = None
+ info: Optional[Any] = None # TorrentSessionInfo
+ session_manager: Optional[Any] = None
+ logger: Optional[Any] = None
+
+ piece_manager: Optional[Any] = None
+ peer_manager: Optional[Any] = None
+ tracker: Optional[Any] = None
+ dht_client: Optional[Any] = None
+ checkpoint_manager: Optional[Any] = None
+ download_manager: Optional[Any] = None
+ file_selection_manager: Optional[Any] = None
diff --git a/ccbt/session/peer_events.py b/ccbt/session/peer_events.py
index a751d75..243b80a 100644
--- a/ccbt/session/peer_events.py
+++ b/ccbt/session/peer_events.py
@@ -6,7 +6,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Callable
+from typing import TYPE_CHECKING, Callable, Optional
if TYPE_CHECKING:
from ccbt.session.models import SessionContext
@@ -24,10 +24,10 @@ def bind_peer_manager(
self,
peer_manager: PeerManagerProtocol,
*,
- on_peer_connected: Callable[..., None] | None = None,
- on_peer_disconnected: Callable[..., None] | None = None,
- on_piece_received: Callable[..., None] | None = None,
- on_bitfield_received: Callable[..., None] | None = None,
+ on_peer_connected: Optional[Callable[..., None]] = None,
+ on_peer_disconnected: Optional[Callable[..., None]] = None,
+ on_piece_received: Optional[Callable[..., None]] = None,
+ on_bitfield_received: Optional[Callable[..., None]] = None,
) -> None:
"""Bind peer manager and event callbacks.
@@ -53,9 +53,9 @@ def bind_piece_manager(
self,
piece_manager: PieceManagerProtocol,
*,
- on_piece_completed: Callable[[int], None] | None = None,
- on_piece_verified: Callable[[int], None] | None = None,
- on_download_complete: Callable[[], None] | None = None,
+ on_piece_completed: Optional[Callable[[int], None]] = None,
+ on_piece_verified: Optional[Callable[[int], None]] = None,
+ on_download_complete: Optional[Callable[[], None]] = None,
) -> None:
"""Bind piece manager and event callbacks.
diff --git a/ccbt/session/peers.py b/ccbt/session/peers.py
index 73f9d91..6cc176c 100644
--- a/ccbt/session/peers.py
+++ b/ccbt/session/peers.py
@@ -8,7 +8,7 @@
import asyncio
import time
-from typing import TYPE_CHECKING, Any, Callable, cast
+from typing import TYPE_CHECKING, Any, Callable, Optional, cast
from ccbt.session.peer_events import PeerEventsBinder
@@ -27,12 +27,12 @@ async def init_and_bind(
*,
is_private: bool,
session_ctx: SessionContext,
- on_peer_connected: Callable[..., None] | None = None,
- on_peer_disconnected: Callable[..., None] | None = None,
- on_piece_received: Callable[..., None] | None = None,
- on_bitfield_received: Callable[..., None] | None = None,
- logger: Any | None = None,
- max_peers_per_torrent: int | None = None,
+ on_peer_connected: Optional[Callable[..., None]] = None,
+ on_peer_disconnected: Optional[Callable[..., None]] = None,
+ on_piece_received: Optional[Callable[..., None]] = None,
+ on_bitfield_received: Optional[Callable[..., None]] = None,
+ logger: Optional[Any] = None,
+ max_peers_per_torrent: Optional[int] = None,
) -> Any:
"""Ensure a running peer manager exists and is bound to callbacks.
@@ -109,9 +109,9 @@ def bind_piece_manager(
session_ctx: SessionContext,
piece_manager: Any,
*,
- on_piece_verified: Callable[[int], None] | None = None,
- on_download_complete: Callable[[], None] | None = None,
- on_piece_completed: Callable[[int], None] | None = None,
+ on_piece_verified: Optional[Callable[[int], None]] = None,
+ on_download_complete: Optional[Callable[[], None]] = None,
+ on_piece_completed: Optional[Callable[[int], None]] = None,
) -> None:
"""Bind piece manager events using a PeerEventsBinder.
@@ -655,7 +655,7 @@ async def connect_peers_to_download(self, peer_list: list[dict[str, Any]]) -> No
# CRITICAL FIX: Increased max_wait_attempts and wait_interval for better reliability
max_wait_attempts = 20 # Increased from 10 to allow more time for initialization (10 seconds total)
wait_interval = 0.5
- peer_manager: AsyncPeerConnectionManager | None = None # type: ignore[assignment]
+ peer_manager: Optional[AsyncPeerConnectionManager] = None # type: ignore[assignment]
peer_manager_source = "unknown"
for attempt in range(max_wait_attempts):
diff --git a/ccbt/session/scrape.py b/ccbt/session/scrape.py
index 5b5be09..f0800f4 100644
--- a/ccbt/session/scrape.py
+++ b/ccbt/session/scrape.py
@@ -4,7 +4,7 @@
import asyncio
import time
-from typing import Any
+from typing import Any, Optional
from ccbt.models import ScrapeResult
@@ -62,7 +62,7 @@ async def force_scrape(self, info_hash_hex: str) -> bool:
if isinstance(torrent_data, dict):
# Normalize announce_list to list[list[str]] format (BEP 12)
raw_announce_list = torrent_data.get("announce_list")
- normalized_announce_list: list[list[str]] | None = None
+ normalized_announce_list: Optional[list[list[str]]] = None
if raw_announce_list and isinstance(raw_announce_list, list):
normalized_announce_list = []
for item in raw_announce_list:
@@ -145,7 +145,7 @@ async def force_scrape(self, info_hash_hex: str) -> bool:
self.logger.exception("Error during force_scrape for %s", info_hash_hex)
return False
- async def get_cached_result(self, info_hash_hex: str) -> Any | None:
+ async def get_cached_result(self, info_hash_hex: str) -> Optional[Any]:
"""Get cached scrape result for a torrent.
Args:
diff --git a/ccbt/session/session.py b/ccbt/session/session.py
index ccfd756..d7bb68b 100644
--- a/ccbt/session/session.py
+++ b/ccbt/session/session.py
@@ -13,7 +13,7 @@
from collections import deque
from dataclasses import dataclass
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Callable, Coroutine, cast
+from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, Union, cast
if TYPE_CHECKING:
from ccbt.discovery.dht import AsyncDHTClient
@@ -67,8 +67,10 @@ class TorrentSessionInfo:
output_dir: str
added_time: float
status: str = "starting" # starting, downloading, seeding, stopped, error
- priority: str | None = None # Queue priority (TorrentPriority enum value as string)
- queue_position: int | None = (
+ priority: Optional[str] = (
+ None # Queue priority (TorrentPriority enum value as string)
+ )
+ queue_position: Optional[int] = (
None # Position in queue (0 = highest priority position)
)
@@ -78,9 +80,9 @@ class AsyncTorrentSession:
def __init__(
self,
- torrent_data: dict[str, Any] | TorrentInfoModel,
- output_dir: str | Path = ".",
- session_manager: AsyncSessionManager | None = None,
+ torrent_data: Union[dict[str, Any], TorrentInfoModel],
+ output_dir: Union[str, Path] = ".",
+ session_manager: Optional[AsyncSessionManager] = None,
) -> None:
"""Initialize TorrentSession with torrent data and output directory."""
self.config = get_config()
@@ -100,7 +102,7 @@ def __init__(
# Set the piece manager on the download manager for compatibility
self.download_manager.piece_manager = self.piece_manager
- self.file_selection_manager: FileSelectionManager | None = None
+ self.file_selection_manager: Optional[FileSelectionManager] = None
self.ensure_file_selection_manager()
# CRITICAL FIX: Pass session_manager to AsyncTrackerClient
@@ -114,16 +116,16 @@ def __init__(
# CRITICAL FIX: Register immediate connection callback for tracker responses
# This connects peers IMMEDIATELY when tracker responses arrive, before announce loop
# Note: Callback will be registered in start() after components are initialized
- self.pex_manager: PEXManager | None = None
+ self.pex_manager: Optional[PEXManager] = None
self.checkpoint_manager = CheckpointManager(self.config.disk)
# Initialize checkpoint controller (will be fully initialized after ctx is created)
- self.checkpoint_controller: CheckpointController | None = None
+ self.checkpoint_controller: Optional[CheckpointController] = None
# CRITICAL FIX: Timestamp to track when tracker peers are being connected
# This prevents DHT from starting until tracker connections complete
# Use timestamp instead of boolean to handle multiple concurrent callbacks
- self._tracker_peers_connecting_until: float | None = None # type: ignore[attr-defined]
+ self._tracker_peers_connecting_until: Optional[float] = None # type: ignore[attr-defined]
# Task tracking for piece verification and download completion
# These are sets to track asyncio tasks and prevent garbage collection
@@ -186,15 +188,15 @@ def __init__(
)
# Source tracking for checkpoint metadata
- self.torrent_file_path: str | None = None
- self.magnet_uri: str | None = None
+ self.torrent_file_path: Optional[str] = None
+ self.magnet_uri: Optional[str] = None
# Background tasks
self._task_supervisor = TaskSupervisor()
- self._announce_task: asyncio.Task[None] | None = None
- self._status_task: asyncio.Task[None] | None = None
- self._checkpoint_task: asyncio.Task[None] | None = None
- self._seeding_stats_task: asyncio.Task[None] | None = None
+ self._announce_task: Optional[asyncio.Task[None]] = None
+ self._status_task: Optional[asyncio.Task[None]] = None
+ self._checkpoint_task: Optional[asyncio.Task[None]] = None
+ self._seeding_stats_task: Optional[asyncio.Task[None]] = None
self._stop_event = asyncio.Event()
self._stopped = False # Flag for incoming peer queue processor
@@ -212,16 +214,16 @@ def __init__(
]
] = asyncio.Queue()
self._incoming_peer_handler = IncomingPeerHandler(self)
- self._incoming_queue_task: asyncio.Task[None] | None = None
+ self._incoming_queue_task: Optional[asyncio.Task[None]] = None
# Checkpoint state
self.checkpoint_loaded = False
self.resume_from_checkpoint = False
# Callbacks
- self.on_status_update: Callable[[dict[str, Any]], None] | None = None
- self.on_complete: Callable[[], None] | None = None
- self.on_error: Callable[[Exception], None] | None = None
+ self.on_status_update: Optional[Callable[[dict[str, Any]], None]] = None
+ self.on_complete: Optional[Callable[[], None]] = None
+ self.on_error: Optional[Callable[[Exception], None]] = None
# Cached status for synchronous property access
# Updated periodically by _status_loop
@@ -355,7 +357,7 @@ def ensure_file_selection_manager(self) -> bool:
def _attach_file_selection_manager(
self,
- torrent_info: TorrentInfoModel | None,
+ torrent_info: Optional[TorrentInfoModel],
) -> bool:
"""Attach a file selection manager if torrent metadata is available."""
if not torrent_info or not getattr(torrent_info, "files", None):
@@ -426,8 +428,8 @@ def _attach_file_selection_manager(
def _get_torrent_info(
self,
- torrent_data: dict[str, Any] | TorrentInfoModel,
- ) -> TorrentInfoModel | None:
+ torrent_data: Union[dict[str, Any], TorrentInfoModel],
+ ) -> Optional[TorrentInfoModel]:
"""Get TorrentInfo from torrent data.
Args:
@@ -478,7 +480,7 @@ async def _apply_magnet_file_selection_if_needed(self) -> None:
def _normalize_torrent_data(
self,
- td: dict[str, Any] | TorrentInfoModel,
+ td: Union[dict[str, Any], TorrentInfoModel],
) -> dict[str, Any]:
"""Convert TorrentInfoModel or legacy dict into a normalized dict expected by piece manager.
@@ -2863,7 +2865,7 @@ def tracker_connection_status(self, value: str) -> None:
self._tracker_connection_status = value
@property
- def last_tracker_error(self) -> str | None:
+ def last_tracker_error(self) -> Optional[str]:
"""Get last tracker error.
Returns:
@@ -2873,7 +2875,7 @@ def last_tracker_error(self) -> str | None:
return getattr(self, "_last_tracker_error", None)
@last_tracker_error.setter
- def last_tracker_error(self, value: str | None) -> None:
+ def last_tracker_error(self, value: Optional[str]) -> None:
"""Set last tracker error.
Args:
@@ -2942,7 +2944,7 @@ def collect_trackers(self, td: dict[str, Any]) -> list[str]:
return self._collect_trackers(td)
@property
- def dht_setup(self) -> Any | None:
+ def dht_setup(self) -> Optional[Any]:
"""Get DHT setup instance.
Returns:
@@ -3229,7 +3231,7 @@ def remove_dht_peer_task(self, task: asyncio.Task) -> None:
self._dht_peer_tasks.discard(task)
@property
- def discovery_controller(self) -> Any | None:
+ def discovery_controller(self) -> Optional[Any]:
"""Get discovery controller instance.
Returns:
@@ -3239,7 +3241,7 @@ def discovery_controller(self) -> Any | None:
return getattr(self, "_discovery_controller", None)
@discovery_controller.setter
- def discovery_controller(self, value: Any | None) -> None:
+ def discovery_controller(self, value: Optional[Any]) -> None:
"""Set discovery controller instance.
Args:
@@ -3281,7 +3283,7 @@ def remove_metadata_task(self, task: asyncio.Task) -> None:
self._metadata_tasks.discard(task)
@property
- def dht_discovery_task(self) -> asyncio.Task | None:
+ def dht_discovery_task(self) -> Optional[asyncio.Task]:
"""Get DHT discovery task.
Returns:
@@ -3291,7 +3293,7 @@ def dht_discovery_task(self) -> asyncio.Task | None:
return getattr(self, "_dht_discovery_task", None)
@dht_discovery_task.setter
- def dht_discovery_task(self, value: asyncio.Task | None) -> None:
+ def dht_discovery_task(self, value: Optional[asyncio.Task]) -> None:
"""Set DHT discovery task.
Args:
@@ -3321,7 +3323,7 @@ def stopped(self, value: bool) -> None:
self._stopped = value
@property
- def last_query_metrics(self) -> dict[str, Any] | None:
+ def last_query_metrics(self) -> Optional[dict[str, Any]]:
"""Get last query metrics.
Returns:
@@ -3331,7 +3333,7 @@ def last_query_metrics(self) -> dict[str, Any] | None:
return getattr(self, "_last_query_metrics", None)
@last_query_metrics.setter
- def last_query_metrics(self, value: dict[str, Any] | None) -> None:
+ def last_query_metrics(self, value: Optional[dict[str, Any]]) -> None:
"""Set last query metrics.
Args:
@@ -3341,7 +3343,7 @@ def last_query_metrics(self, value: dict[str, Any] | None) -> None:
self._last_query_metrics = value
@property
- def background_start_task(self) -> asyncio.Task | None:
+ def background_start_task(self) -> Optional[asyncio.Task]:
"""Get background start task.
Returns:
@@ -3351,7 +3353,7 @@ def background_start_task(self) -> asyncio.Task | None:
return getattr(self, "_background_start_task", None)
@background_start_task.setter
- def background_start_task(self, value: asyncio.Task | None) -> None:
+ def background_start_task(self, value: Optional[asyncio.Task]) -> None:
"""Set background start task.
Args:
@@ -3395,18 +3397,18 @@ def __init__(self, output_dir: str = "."):
self.lock = asyncio.Lock()
# Global components
- self.dht_client: AsyncDHTClient | None = None
- self.metrics: Metrics | None = None # Initialized in start() if enabled
- self.peer_service: PeerService | None = PeerService(
+ self.dht_client: Optional[AsyncDHTClient] = None
+ self.metrics: Optional[Metrics] = None # Initialized in start() if enabled
+ self.peer_service: Optional[PeerService] = PeerService(
max_peers=self.config.network.max_global_peers,
connection_timeout=self.config.network.connection_timeout,
)
# Background tasks
self._task_supervisor = TaskSupervisor()
- self._cleanup_task: asyncio.Task | None = None
- self._metrics_task: asyncio.Task | None = None
- self._metrics_restart_task: asyncio.Task | None = None
+ self._cleanup_task: Optional[asyncio.Task] = None
+ self._metrics_task: Optional[asyncio.Task] = None
+ self._metrics_restart_task: Optional[asyncio.Task] = None
self._metrics_sample_interval = 1.0
self._metrics_emit_interval = 10.0
self._last_metrics_emit = 0.0
@@ -3417,16 +3419,15 @@ def __init__(self, output_dir: str = "."):
self._metrics_heartbeat_interval = 5
# Callbacks
- self.on_torrent_added: Callable[[bytes, str], None] | None = None
- self.on_torrent_removed: Callable[[bytes], None] | None = None
- self.on_torrent_complete: (
+ self.on_torrent_added: Optional[Callable[[bytes, str], None]] = None
+ self.on_torrent_removed: Optional[Callable[[bytes], None]] = None
+ self.on_torrent_complete: Optional[
Callable[[bytes, str], None]
| Callable[[bytes, str], Coroutine[Any, Any, None]]
- | None
- ) = None
+ ] = None
# XET folder callbacks
- self.on_xet_folder_added: Callable[[str, str], None] | None = None
- self.on_xet_folder_removed: Callable[[str], None] | None = None
+ self.on_xet_folder_added: Optional[Callable[[str, str], None]] = None
+ self.on_xet_folder_removed: Optional[Callable[[str], None]] = None
self.logger = logging.getLogger(__name__)
@@ -3453,61 +3454,61 @@ def __init__(self, output_dir: str = "."):
)
# Optional dependency injection container
- self._di: DIContainer | None = None
+ self._di: Optional[DIContainer] = None
# Components initialized by startup functions
- self.security_manager: Any | None = None
- self.nat_manager: Any | None = None
- self.tcp_server: Any | None = None
+ self.security_manager: Optional[Any] = None
+ self.nat_manager: Optional[Any] = None
+ self.tcp_server: Optional[Any] = None
# CRITICAL FIX: Store reference to initialized UDP tracker client
# This ensures all torrent sessions use the same initialized socket
# The UDP tracker client is a singleton, but we store the reference
# to ensure it's accessible and to prevent any lazy initialization
- self.udp_tracker_client: Any | None = None
+ self.udp_tracker_client: Optional[Any] = None
# Queue manager for priority-based torrent scheduling
- self.queue_manager: Any | None = None
+ self.queue_manager: Optional[Any] = None
# CRITICAL FIX: Store executor initialized at daemon startup
# This ensures executor uses the session manager's initialized components
# and prevents duplicate executor creation
- self.executor: Any | None = None
+ self.executor: Optional[Any] = None
# CRITICAL FIX: Store protocol manager initialized at daemon startup
# Singleton pattern removed - protocol manager is now managed via session manager
# This ensures proper lifecycle management and prevents conflicts
- self.protocol_manager: Any | None = None
+ self.protocol_manager: Optional[Any] = None
# CRITICAL FIX: Store WebTorrent WebSocket server initialized at daemon startup
# WebSocket server socket must be initialized once and never recreated
# This prevents port conflicts and socket recreation issues
- self.webtorrent_websocket_server: Any | None = None
+ self.webtorrent_websocket_server: Optional[Any] = None
# CRITICAL FIX: Store WebRTC connection manager initialized at daemon startup
# WebRTC manager should be shared across all WebTorrent protocol instances
# This ensures proper resource management and prevents duplicate managers
- self.webrtc_manager: Any | None = None
+ self.webrtc_manager: Optional[Any] = None
# CRITICAL FIX: Store uTP socket manager initialized at daemon startup
# Singleton pattern removed - uTP socket manager is now managed via session manager
# This ensures proper socket lifecycle management and prevents socket recreation
- self.utp_socket_manager: Any | None = None
+ self.utp_socket_manager: Optional[Any] = None
# CRITICAL FIX: Store extension manager initialized at daemon startup
# Singleton pattern removed - extension manager is now managed via session manager
# This ensures proper lifecycle management and prevents conflicts
- self.extension_manager: Any | None = None
+ self.extension_manager: Optional[Any] = None
# CRITICAL FIX: Store disk I/O manager initialized at daemon startup
# Singleton pattern removed - disk I/O manager is now managed via session manager
# This ensures proper lifecycle management and prevents conflicts
- self.disk_io_manager: Any | None = None
+ self.disk_io_manager: Optional[Any] = None
# Private torrents set (used by DHT client factory)
self.private_torrents: set[bytes] = set()
# XET folder synchronization components
- self._xet_sync_manager: Any | None = None
- self._xet_realtime_sync: Any | None = None
+ self._xet_sync_manager: Optional[Any] = None
+ self._xet_realtime_sync: Optional[Any] = None
# XET folder sessions (keyed by info_hash or folder_path)
self.xet_folders: dict[str, Any] = {} # folder_path or info_hash -> XetFolder
self._xet_folders_lock = asyncio.Lock()
@@ -3526,33 +3527,33 @@ def __init__(self, output_dir: str = "."):
self.scrape_cache_lock = asyncio.Lock()
# Periodic scrape task (started in start() if auto-scrape enabled)
- self.scrape_task: asyncio.Task | None = None
+ self.scrape_task: Optional[asyncio.Task] = None
# Initialize torrent addition handler
self.torrent_addition_handler = TorrentAdditionHandler(self)
- def _make_security_manager(self) -> Any | None:
+ def _make_security_manager(self) -> Optional[Any]:
"""Create security manager using ComponentFactory."""
from ccbt.session.factories import ComponentFactory
factory = ComponentFactory(self)
return factory.create_security_manager()
- def _make_dht_client(self, bind_ip: str, bind_port: int) -> Any | None:
+ def _make_dht_client(self, bind_ip: str, bind_port: int) -> Optional[Any]:
"""Create DHT client using ComponentFactory."""
from ccbt.session.factories import ComponentFactory
factory = ComponentFactory(self)
return factory.create_dht_client(bind_ip=bind_ip, bind_port=bind_port)
- def _make_nat_manager(self) -> Any | None:
+ def _make_nat_manager(self) -> Optional[Any]:
"""Create NAT manager using ComponentFactory."""
from ccbt.session.factories import ComponentFactory
factory = ComponentFactory(self)
return factory.create_nat_manager()
- def _make_tcp_server(self) -> Any | None:
+ def _make_tcp_server(self) -> Optional[Any]:
"""Create TCP server using ComponentFactory."""
from ccbt.session.factories import ComponentFactory
@@ -4038,8 +4039,8 @@ async def start_web_interface(
async def add_torrent(
self,
- torrent_path: str | dict[str, Any],
- output_dir: str | None = None,
+ torrent_path: Union[str, dict[str, Any]],
+ output_dir: Optional[str] = None,
resume: bool = False,
) -> str:
"""Add a torrent file or torrent data dictionary.
@@ -4090,6 +4091,10 @@ async def add_torrent(
session = AsyncTorrentSession(torrent_data, session_output_dir, self)
self.torrents[info_hash] = session
+ # Add to private_torrents set if torrent is private (BEP 27)
+ if session.is_private:
+ self.private_torrents.add(info_hash)
+
# Get torrent name for callback
if isinstance(torrent_data, dict):
torrent_name = torrent_data.get("name", "Unknown")
@@ -4121,7 +4126,7 @@ async def add_torrent(
async def add_magnet(
self,
magnet_uri: str,
- output_dir: str | None = None,
+ output_dir: Optional[str] = None,
resume: bool = False,
) -> str:
"""Add a magnet link.
@@ -4208,7 +4213,7 @@ async def force_scrape(self, info_hash_hex: str) -> bool:
"""
return await self.scrape_manager.force_scrape(info_hash_hex)
- async def get_scrape_result(self, info_hash_hex: str) -> Any | None:
+ async def get_scrape_result(self, info_hash_hex: str) -> Optional[Any]:
"""Get cached scrape result for a torrent.
Args:
@@ -4251,7 +4256,7 @@ async def _auto_scrape_torrent(self, info_hash_hex: str) -> None:
except Exception:
self.logger.debug("Auto-scrape failed for %s", info_hash_hex, exc_info=True)
- def parse_magnet_link(self, magnet_uri: str) -> dict[str, Any] | None:
+ def parse_magnet_link(self, magnet_uri: str) -> Optional[dict[str, Any]]:
"""Parse magnet link and return torrent data.
Args:
@@ -4317,7 +4322,7 @@ async def set_rate_limits(
return True
- def get_per_torrent_limits(self, info_hash: bytes) -> dict[str, int] | None:
+ def get_per_torrent_limits(self, info_hash: bytes) -> Optional[dict[str, int]]:
"""Get per-torrent rate limits (public API).
Args:
@@ -4501,7 +4506,7 @@ async def force_announce(self, info_hash_hex: str) -> bool:
return False
- async def export_session_state(self, path: Path | str) -> None:
+ async def export_session_state(self, path: Union[Path, str]) -> None:
"""Export session state to JSON file.
Args:
@@ -4561,7 +4566,7 @@ async def export_session_state(self, path: Path | str) -> None:
self.logger.info("Session state exported to %s", path)
- async def import_session_state(self, path: Path | str) -> dict[str, Any]:
+ async def import_session_state(self, path: Union[Path, str]) -> dict[str, Any]:
"""Import session state from JSON file.
Args:
@@ -4612,7 +4617,7 @@ def peers(self) -> list[Any]:
return all_peers
@property
- def dht(self) -> Any | None:
+ def dht(self) -> Optional[Any]:
"""Get DHT client for status display compatibility.
Returns:
@@ -4891,7 +4896,7 @@ def remove_webtorrent_protocol(self, protocol: Any) -> None:
with contextlib.suppress(ValueError):
self._webtorrent_protocols.remove(protocol)
- def get_session_metrics(self) -> Metrics | None:
+ def get_session_metrics(self) -> Optional[Metrics]:
"""Get session metrics collector.
Returns:
@@ -4985,7 +4990,7 @@ async def get_status(self) -> dict[str, Any]:
}
return status_dict
- async def get_torrent_status(self, info_hash_hex: str) -> dict[str, Any] | None:
+ async def get_torrent_status(self, info_hash_hex: str) -> Optional[dict[str, Any]]:
"""Get status for a specific torrent.
Args:
@@ -5105,7 +5110,7 @@ async def refresh_pex(self, info_hash_hex: str) -> bool:
return False
async def checkpoint_backup_torrent(
- self, info_hash_hex: str, destination: Path | str
+ self, info_hash_hex: str, destination: Union[Path, str]
) -> bool:
"""Backup checkpoint for a torrent.
diff --git a/ccbt/session/tasks.py b/ccbt/session/tasks.py
index 44e0c85..00cc931 100644
--- a/ccbt/session/tasks.py
+++ b/ccbt/session/tasks.py
@@ -8,7 +8,7 @@
import asyncio
import contextlib
-from typing import Any, Awaitable
+from typing import Any, Awaitable, Optional
class TaskSupervisor:
@@ -19,7 +19,7 @@ def __init__(self) -> None:
self._tasks: set[asyncio.Task[Any]] = set()
def create_task(
- self, coro: Awaitable[Any], *, name: str | None = None
+ self, coro: Awaitable[Any], *, name: Optional[str] = None
) -> asyncio.Task[Any]:
"""Create and track a new async task.
diff --git a/ccbt/session/torrent_utils.py b/ccbt/session/torrent_utils.py
index 5df5981..7e0e9e8 100644
--- a/ccbt/session/torrent_utils.py
+++ b/ccbt/session/torrent_utils.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
from ccbt.core.magnet import build_minimal_torrent_data, parse_magnet
from ccbt.core.torrent import TorrentParser
@@ -13,9 +13,9 @@
def get_torrent_info(
- torrent_data: dict[str, Any] | TorrentInfoModel,
- logger: Any | None = None,
-) -> TorrentInfoModel | None:
+ torrent_data: Union[dict[str, Any], TorrentInfoModel],
+ logger: Optional[Any] = None,
+) -> Optional[TorrentInfoModel]:
"""Convert torrent_data to TorrentInfo if possible.
Args:
@@ -109,7 +109,7 @@ def get_torrent_info(
def extract_is_private(
- torrent_data: dict[str, Any] | TorrentInfoModel,
+ torrent_data: Union[dict[str, Any], TorrentInfoModel],
) -> bool:
"""Extract is_private flag from torrent data (BEP 27).
@@ -139,8 +139,8 @@ def extract_is_private(
def normalize_torrent_data(
- td: dict[str, Any] | TorrentInfoModel,
- logger: Any | None = None,
+ td: Union[dict[str, Any], TorrentInfoModel],
+ logger: Optional[Any] = None,
) -> dict[str, Any]:
"""Convert TorrentInfoModel or legacy dict into a normalized dict expected by piece manager.
@@ -278,8 +278,8 @@ def normalize_torrent_data(
def load_torrent(
- torrent_path: str | Path, logger: Any | None = None
-) -> dict[str, Any] | None:
+ torrent_path: Union[str, Path], logger: Optional[Any] = None
+) -> Optional[dict[str, Any]]:
"""Load torrent file and return parsed data.
Args:
@@ -316,8 +316,8 @@ def load_torrent(
def parse_magnet_link(
- magnet_uri: str, logger: Any | None = None
-) -> dict[str, Any] | None:
+ magnet_uri: str, logger: Optional[Any] = None
+) -> Optional[dict[str, Any]]:
"""Parse magnet link and return torrent data.
Args:
diff --git a/ccbt/session/types.py b/ccbt/session/types.py
index 01c6e8c..db2a7eb 100644
--- a/ccbt/session/types.py
+++ b/ccbt/session/types.py
@@ -6,7 +6,7 @@
from __future__ import annotations
-from typing import Any, Callable, Protocol, runtime_checkable
+from typing import Any, Callable, Optional, Protocol, runtime_checkable
@runtime_checkable
@@ -16,7 +16,7 @@ class DHTClientProtocol(Protocol):
def add_peer_callback( # noqa: D102
self,
callback: Callable[[list[tuple[str, int]]], None],
- info_hash: bytes | None = None,
+ info_hash: Optional[bytes] = None,
) -> None: ...
async def get_peers( # noqa: D102
@@ -43,7 +43,7 @@ async def announce( # pragma: no cover - protocol definition only # noqa: D102
port: int,
uploaded: int = 0,
downloaded: int = 0,
- left: int | None = None,
+ left: Optional[int] = None,
event: str = "started",
) -> Any: ...
diff --git a/ccbt/session/xet_conflict.py b/ccbt/session/xet_conflict.py
index 0b296c1..c677591 100644
--- a/ccbt/session/xet_conflict.py
+++ b/ccbt/session/xet_conflict.py
@@ -8,7 +8,7 @@
import logging
from enum import Enum
-from typing import Any
+from typing import Any, Optional
logger = logging.getLogger(__name__)
@@ -50,7 +50,7 @@ def detect_conflict(
_file_path: str,
_peer_id: str,
timestamp: float,
- existing_timestamp: float | None = None,
+ existing_timestamp: Optional[float] = None,
) -> bool:
"""Detect if there's a conflict.
@@ -76,7 +76,7 @@ def resolve_conflict(
file_path: str,
our_version: dict[str, Any],
their_version: dict[str, Any],
- base_version: dict[str, Any] | None = None,
+ base_version: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
"""Resolve conflict between versions.
@@ -201,7 +201,7 @@ def _three_way_merge(
self,
our_version: dict[str, Any],
their_version: dict[str, Any],
- base_version: dict[str, Any] | None,
+ base_version: Optional[dict[str, Any]],
) -> dict[str, Any]:
"""Three-way merge strategy.
@@ -252,7 +252,7 @@ def merge_files(
_file_path: str,
our_content: bytes,
their_content: bytes,
- base_content: bytes | None = None,
+ base_content: Optional[bytes] = None,
) -> bytes:
"""Merge file contents using selected strategy.
diff --git a/ccbt/session/xet_realtime_sync.py b/ccbt/session/xet_realtime_sync.py
index 51eab8a..0285347 100644
--- a/ccbt/session/xet_realtime_sync.py
+++ b/ccbt/session/xet_realtime_sync.py
@@ -9,7 +9,7 @@
import asyncio
import logging
import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
from ccbt.utils.events import Event, EventType, emit_event
@@ -26,7 +26,7 @@ def __init__(
self,
folder: XetFolder,
check_interval: float = 5.0,
- session_manager: Any | None = None, # AsyncSessionManager
+ session_manager: Optional[Any] = None, # AsyncSessionManager
) -> None:
"""Initialize real-time sync.
@@ -40,10 +40,10 @@ def __init__(
self.check_interval = check_interval
self.session_manager = session_manager
- self._sync_task: asyncio.Task | None = None
+ self._sync_task: Optional[asyncio.Task] = None
self._is_running = False
self._last_chunk_hashes: dict[str, bytes] = {} # file_path -> chunk_hash
- self._last_git_ref: str | None = None
+ self._last_git_ref: Optional[str] = None
self.logger = logging.getLogger(__name__)
diff --git a/ccbt/session/xet_sync_manager.py b/ccbt/session/xet_sync_manager.py
index 5689f83..15f5161 100644
--- a/ccbt/session/xet_sync_manager.py
+++ b/ccbt/session/xet_sync_manager.py
@@ -18,7 +18,7 @@
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
from ccbt.models import PeerInfo, XetSyncStatus
@@ -40,10 +40,10 @@ class UpdateEntry:
file_path: str
chunk_hash: bytes
- git_ref: str | None
+ git_ref: Optional[str]
timestamp: float
priority: int = 0 # Higher priority = processed first
- source_peer: str | None = None
+ source_peer: Optional[str] = None
retry_count: int = 0
max_retries: int = 3
@@ -54,8 +54,8 @@ class PeerSyncState:
peer_id: str
peer_info: PeerInfo
- last_sync_time: float | None = None
- current_git_ref: str | None = None
+ last_sync_time: Optional[float] = None
+ current_git_ref: Optional[str] = None
chunk_hashes: set[bytes] = field(default_factory=set)
is_source: bool = False # For designated mode
sync_progress: float = 0.0
@@ -67,10 +67,10 @@ class XetSyncManager:
def __init__(
self,
- session_manager: Any | None = None,
- folder_path: str | None = None,
+ session_manager: Optional[Any] = None,
+ folder_path: Optional[str] = None,
sync_mode: str = "best_effort",
- source_peers: list[str] | None = None,
+ source_peers: Optional[list[str]] = None,
consensus_threshold: float = 0.5,
max_queue_size: int = 100,
check_interval: float = 5.0,
@@ -96,13 +96,13 @@ def __init__(
self.check_interval = check_interval
# Consensus components
- self.raft_node: Any | None = None # RaftNode
- self.byzantine_consensus: Any | None = None # ByzantineConsensus
- self.conflict_resolver: Any | None = None # ConflictResolver
+ self.raft_node: Optional[Any] = None # RaftNode
+ self.byzantine_consensus: Optional[Any] = None # ByzantineConsensus
+ self.conflict_resolver: Optional[Any] = None # ConflictResolver
# Source peer election
self.source_election_interval = 300.0 # 5 minutes
- self._source_election_task: asyncio.Task | None = None
+ self._source_election_task: Optional[asyncio.Task] = None
# Update queue
self.update_queue: deque[UpdateEntry] = deque(maxlen=max_queue_size)
@@ -117,7 +117,7 @@ def __init__(
] = {} # chunk_hash -> {peer_id: vote}
# State persistence paths
- self._state_dir: Path | None = None
+ self._state_dir: Optional[Path] = None
if folder_path:
self._state_dir = Path(folder_path) / ".xet"
self._state_dir.mkdir(parents=True, exist_ok=True)
@@ -134,8 +134,8 @@ def __init__(
}
# Allowlist and git tracking
- self.allowlist_hash: bytes | None = None
- self.current_git_ref: str | None = None
+ self.allowlist_hash: Optional[bytes] = None
+ self.current_git_ref: Optional[str] = None
self._running = False
self.logger = logging.getLogger(__name__)
@@ -188,7 +188,7 @@ async def stop(self) -> None:
await self.clear_queue()
self.logger.info("XET sync manager stopped")
- def get_allowlist_hash(self) -> bytes | None:
+ def get_allowlist_hash(self) -> Optional[bytes]:
"""Get allowlist hash.
Returns:
@@ -197,7 +197,7 @@ def get_allowlist_hash(self) -> bytes | None:
"""
return self.allowlist_hash
- def set_allowlist_hash(self, allowlist_hash: bytes | None) -> None:
+ def set_allowlist_hash(self, allowlist_hash: Optional[bytes]) -> None:
"""Set allowlist hash.
Args:
@@ -215,7 +215,7 @@ def get_sync_mode(self) -> str:
"""
return self.sync_mode.value
- def get_current_git_ref(self) -> str | None:
+ def get_current_git_ref(self) -> Optional[str]:
"""Get current git reference.
Returns:
@@ -224,7 +224,7 @@ def get_current_git_ref(self) -> str | None:
"""
return self.current_git_ref
- def set_current_git_ref(self, git_ref: str | None) -> None:
+ def set_current_git_ref(self, git_ref: Optional[str]) -> None:
"""Set current git reference.
Args:
@@ -278,9 +278,9 @@ async def queue_update(
self,
file_path: str,
chunk_hash: bytes,
- git_ref: str | None = None,
+ git_ref: Optional[str] = None,
priority: int = 0,
- source_peer: str | None = None,
+ source_peer: Optional[str] = None,
) -> bool:
"""Queue an update for synchronization.
@@ -540,7 +540,7 @@ async def _process_broadcast_updates(self, update_handler: Any) -> int:
to_remove: list[UpdateEntry] = []
# Group updates by source peer
- updates_by_source: dict[str | None, list[UpdateEntry]] = {}
+ updates_by_source: dict[Optional[str], list[UpdateEntry]] = {}
for entry in self.update_queue:
source = entry.source_peer
if source not in updates_by_source:
@@ -645,7 +645,7 @@ async def _process_consensus_updates(self, update_handler: Any) -> int:
return processed
- async def _elect_source_peer(self) -> str | None:
+ async def _elect_source_peer(self) -> Optional[str]:
"""Elect source peer based on criteria.
Criteria: uptime, bandwidth, chunk availability
@@ -858,7 +858,7 @@ async def _apply_update_entry(
async def _send_raft_vote_request(
self, peer_id: str, _request: dict[str, Any]
- ) -> dict[str, Any] | None:
+ ) -> Optional[dict[str, Any]]:
"""Send Raft vote request to peer (simplified - would use network in production).
Args:
@@ -876,7 +876,7 @@ async def _send_raft_vote_request(
async def _send_raft_append_entries(
self, peer_id: str, _request: dict[str, Any]
- ) -> dict[str, Any] | None:
+ ) -> Optional[dict[str, Any]]:
"""Send Raft append entries to peer (simplified - would use network in production).
Args:
diff --git a/ccbt/storage/buffers.py b/ccbt/storage/buffers.py
index 747ac96..7d41a80 100644
--- a/ccbt/storage/buffers.py
+++ b/ccbt/storage/buffers.py
@@ -11,7 +11,7 @@
import threading
from collections import deque
from dataclasses import dataclass
-from typing import Any, Callable
+from typing import Any, Callable, Optional, Union
from ccbt.utils.logging_config import get_logger
@@ -125,7 +125,7 @@ def read(self, size: int) -> bytes:
self.used -= to_read
return bytes(result)
- def peek_views(self, size: int | None = None) -> list[memoryview]:
+ def peek_views(self, size: Optional[int] = None) -> list[memoryview]:
"""Return up to two memoryviews representing current readable data without consuming it.
Args:
@@ -230,7 +230,7 @@ def __init__(
self,
size: int,
count: int,
- factory: Callable[[], Any] | None = None,
+ factory: Optional[Callable[[], Any]] = None,
) -> None:
"""Initialize memory pool.
@@ -322,7 +322,7 @@ def __init__(self, size: int) -> None:
self.lock = threading.Lock()
self.logger = get_logger(__name__)
- def write(self, data: bytes | memoryview) -> int:
+ def write(self, data: Union[bytes, memoryview]) -> int:
"""Write data to buffer with zero-copy when possible.
Args:
@@ -430,7 +430,7 @@ def create_memory_pool(
self,
size: int,
count: int,
- factory: Callable[[], Any] | None = None,
+ factory: Optional[Callable[[], Any]] = None,
) -> MemoryPool:
"""Create a new memory pool."""
with self.lock:
@@ -457,7 +457,7 @@ def get_stats(self) -> dict[str, Any]:
# Global buffer manager instance
-_buffer_manager: BufferManager | None = None
+_buffer_manager: Optional[BufferManager] = None
def get_buffer_manager() -> BufferManager:
diff --git a/ccbt/storage/checkpoint.py b/ccbt/storage/checkpoint.py
index e27c6c7..bb5487b 100644
--- a/ccbt/storage/checkpoint.py
+++ b/ccbt/storage/checkpoint.py
@@ -18,7 +18,7 @@
import time
from dataclasses import dataclass
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
try:
import zstandard as zstd # type: ignore[unresolved-import]
@@ -81,7 +81,7 @@ class CheckpointManager:
MAGIC_BYTES = b"CCBT"
VERSION = 1
- def __init__(self, config: DiskConfig | None = None):
+ def __init__(self, config: Optional[DiskConfig] = None):
"""Initialize checkpoint manager.
Args:
@@ -102,8 +102,8 @@ def __init__(self, config: DiskConfig | None = None):
self.checkpoint_dir.mkdir(parents=True, exist_ok=True)
# Track last checkpoint state for incremental saves and deduplication
- self._last_checkpoint_hash: bytes | None = None
- self._last_checkpoint: TorrentCheckpoint | None = None
+ self._last_checkpoint_hash: Optional[bytes] = None
+ self._last_checkpoint: Optional[TorrentCheckpoint] = None
self.logger.info(
"Checkpoint manager initialized with directory: %s",
@@ -163,7 +163,7 @@ def _calculate_checkpoint_hash(self, checkpoint: TorrentCheckpoint) -> bytes:
async def save_checkpoint(
self,
checkpoint: TorrentCheckpoint,
- checkpoint_format: CheckpointFormat | None = None,
+ checkpoint_format: Optional[CheckpointFormat] = None,
) -> Path:
"""Save checkpoint to disk.
@@ -540,8 +540,8 @@ def _sync_compressed():
async def load_checkpoint(
self,
info_hash: bytes,
- checkpoint_format: CheckpointFormat | None = None,
- ) -> TorrentCheckpoint | None:
+ checkpoint_format: Optional[CheckpointFormat] = None,
+ ) -> Optional[TorrentCheckpoint]:
"""Load checkpoint from disk.
Args:
@@ -584,7 +584,7 @@ async def load_checkpoint(
async def _load_json_checkpoint(
self,
info_hash: bytes,
- ) -> TorrentCheckpoint | None:
+ ) -> Optional[TorrentCheckpoint]:
"""Load checkpoint from JSON checkpoint_format."""
path = self._get_checkpoint_path(info_hash, CheckpointFormat.JSON)
@@ -664,7 +664,7 @@ def _read_json():
async def _load_binary_checkpoint(
self,
info_hash: bytes,
- ) -> TorrentCheckpoint | None:
+ ) -> Optional[TorrentCheckpoint]:
"""Load checkpoint from binary checkpoint_format."""
if not HAS_MSGPACK:
msg = "msgpack is required for binary checkpoint checkpoint_format"
@@ -969,7 +969,7 @@ async def restore_checkpoint(
self,
backup_file: Path,
*,
- info_hash: bytes | None = None,
+ info_hash: Optional[bytes] = None,
) -> TorrentCheckpoint:
"""Restore a checkpoint from a backup file. Returns the restored checkpoint model."""
data = backup_file.read_bytes()
@@ -1142,7 +1142,7 @@ async def convert_checkpoint_format(
class GlobalCheckpointManager:
"""Manages global session manager checkpoints."""
- def __init__(self, config: DiskConfig | None = None):
+ def __init__(self, config: Optional[DiskConfig] = None):
"""Initialize global checkpoint manager.
Args:
@@ -1231,7 +1231,7 @@ def _write_json():
msg = f"Failed to save global checkpoint: {e}"
raise CheckpointError(msg) from e
- async def load_global_checkpoint(self) -> GlobalCheckpoint | None:
+ async def load_global_checkpoint(self) -> Optional[GlobalCheckpoint]:
"""Load global checkpoint from disk.
Returns:
@@ -1348,8 +1348,8 @@ async def save_incremental_checkpoint(
async def load_incremental_checkpoint(
self,
info_hash: bytes,
- base_checkpoint: TorrentCheckpoint | None = None,
- ) -> TorrentCheckpoint | None:
+ base_checkpoint: Optional[TorrentCheckpoint] = None,
+ ) -> Optional[TorrentCheckpoint]:
"""Load incremental checkpoint and merge with base.
Args:
diff --git a/ccbt/storage/disk_io.py b/ccbt/storage/disk_io.py
index f965c8a..3eb5542 100644
--- a/ccbt/storage/disk_io.py
+++ b/ccbt/storage/disk_io.py
@@ -18,7 +18,7 @@
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass, field
from pathlib import Path
-from typing import Any
+from typing import Any, Optional, Union
# Platform-specific imports
if (
@@ -172,7 +172,7 @@ def __init__(
max_workers=max_workers,
thread_name_prefix="disk-io",
)
- self._worker_adjustment_task: asyncio.Task[None] | None = None
+ self._worker_adjustment_task: Optional[asyncio.Task[None]] = None
# Lock to prevent concurrent executor recreation
self._executor_recreation_lock = threading.Lock()
# Tracking for worker adjustments
@@ -189,7 +189,7 @@ def __init__(
self._write_queue_heap: list[WriteRequest] = []
self._write_queue_lock = asyncio.Lock()
self._write_queue_condition = asyncio.Condition(self._write_queue_lock)
- self.write_queue: asyncio.Queue[WriteRequest] | None = (
+ self.write_queue: Optional[asyncio.Queue[WriteRequest]] = (
None # Will be handled by priority queue methods
)
else: # pragma: no cover - Non-priority queue mode not tested, priority queue is default
@@ -246,7 +246,7 @@ def __init__(
)
# io_uring wrapper (lazy initialization)
- self._io_uring_wrapper: Any | None = None
+ self._io_uring_wrapper: Optional[Any] = None
# Read pattern tracking for adaptive read-ahead
self._read_patterns: dict[Path, ReadPattern] = {}
@@ -257,18 +257,18 @@ def __init__(
self._read_buffer_pool_lock = threading.Lock()
# Background tasks
- self._write_batcher_task: asyncio.Task[None] | None = None
- self._cache_cleaner_task: asyncio.Task[None] | None = None
- self._cache_adaptive_task: asyncio.Task[None] | None = None
- self._worker_adjustment_task: asyncio.Task[None] | None = None
+ self._write_batcher_task: Optional[asyncio.Task[None]] = None
+ self._cache_cleaner_task: Optional[asyncio.Task[None]] = None
+ self._cache_adaptive_task: Optional[asyncio.Task[None]] = None
+ self._worker_adjustment_task: Optional[asyncio.Task[None]] = None
# Flag to track if manager is running (for cancellation checks)
self._running = False
# Xet deduplication (lazy initialization)
- self._xet_deduplication: Any | None = None
- self._xet_file_deduplication: Any | None = None
- self._xet_data_aggregator: Any | None = None
- self._xet_defrag_prevention: Any | None = None
+ self._xet_deduplication: Optional[Any] = None
+ self._xet_file_deduplication: Optional[Any] = None
+ self._xet_data_aggregator: Optional[Any] = None
+ self._xet_defrag_prevention: Optional[Any] = None
# Statistics
self.stats = {
@@ -305,7 +305,7 @@ def _get_thread_staging_buffer(self, min_size: int) -> bytearray:
min_size,
int(getattr(self.config.disk, "write_buffer_kib", 256)) * 1024,
)
- buf: bytearray | None = getattr(self._thread_local, "staging_buffer", None)
+ buf: Optional[bytearray] = getattr(self._thread_local, "staging_buffer", None)
if buf is None or len(buf) < default_size:
buf = bytearray(default_size)
self._thread_local.staging_buffer = buf
@@ -1195,7 +1195,7 @@ async def read_block(self, file_path: Path, offset: int, length: int) -> bytes:
async def read_block_mmap(
self,
- file_path: str | Path,
+ file_path: Union[str, Path],
offset: int,
length: int,
) -> bytes:
@@ -1293,7 +1293,7 @@ def _read_block_sync(self, file_path: Path, offset: int, length: int) -> bytes:
msg = f"Failed to read from {file_path}: {e}"
raise DiskIOError(msg) from e
- async def _get_write_request(self) -> WriteRequest | None:
+ async def _get_write_request(self) -> Optional[WriteRequest]:
"""Get next write request from queue (priority or regular).
Returns:
@@ -1752,8 +1752,8 @@ def _write_combined_sync_regular(
)
buffer = self._get_thread_staging_buffer(staging_threshold)
buf_pos = 0
- run_start: int | None = None
- prev_end: int | None = None
+ run_start: Optional[int] = None
+ prev_end: Optional[int] = None
def flush_run() -> None:
nonlocal run_start, buf_pos
@@ -1927,7 +1927,7 @@ async def _cache_cleaner(self) -> None:
# This allows cancellation to work and prevents CPU spinning
await asyncio.sleep(1.0)
- def _get_mmap_entry(self, file_path: Path) -> MmapCache | None:
+ def _get_mmap_entry(self, file_path: Path) -> Optional[MmapCache]:
"""Get or create a memory-mapped file entry."""
if file_path in self.mmap_cache: # Cache hit - return existing entry
cache_entry = self.mmap_cache[file_path]
@@ -1974,7 +1974,7 @@ def _get_mmap_entry(self, file_path: Path) -> MmapCache | None:
return cache_entry
async def warmup_cache(
- self, file_paths: list[Path], priority_order: list[int] | None = None
+ self, file_paths: list[Path], priority_order: Optional[list[int]] = None
) -> None:
"""Warmup cache by pre-loading frequently accessed files.
@@ -2383,7 +2383,7 @@ async def write_xet_chunk(
error_msg = f"Failed to write Xet chunk: {e}"
raise DiskIOError(error_msg) from e
- async def read_xet_chunk(self, chunk_hash: bytes) -> bytes | None:
+ async def read_xet_chunk(self, chunk_hash: bytes) -> Optional[bytes]:
"""Read chunk by hash from Xet storage.
Args:
@@ -2426,7 +2426,7 @@ async def read_xet_chunk(self, chunk_hash: bytes) -> bytes | None:
)
return None
- async def read_file_by_chunks(self, file_path: Path) -> bytes | None:
+ async def read_file_by_chunks(self, file_path: Path) -> Optional[bytes]:
"""Read file by reconstructing it from chunks.
If the file has XET chunk metadata, reconstructs the file
@@ -2587,7 +2587,7 @@ async def _store_new_chunk(
self,
chunk_hash: bytes,
chunk_data: bytes,
- dedup: Any | None = None,
+ dedup: Optional[Any] = None,
) -> bool:
"""Store a new chunk with metadata.
diff --git a/ccbt/storage/disk_io_init.py b/ccbt/storage/disk_io_init.py
index 37ccd60..822deea 100644
--- a/ccbt/storage/disk_io_init.py
+++ b/ccbt/storage/disk_io_init.py
@@ -12,7 +12,7 @@
from __future__ import annotations
import logging
-from typing import Any
+from typing import Any, Optional
from ccbt.config.config import get_config
from ccbt.storage.disk_io import DiskIOManager
@@ -20,7 +20,7 @@
# Singleton pattern removed - DiskIOManager is now managed via AsyncSessionManager.disk_io_manager
# This ensures proper lifecycle management and prevents conflicts between multiple session managers
# Deprecated singleton kept for backward compatibility
-_GLOBAL_DISK_IO_MANAGER: DiskIOManager | None = (
+_GLOBAL_DISK_IO_MANAGER: Optional[DiskIOManager] = (
None # Deprecated - use session_manager.disk_io_manager
)
@@ -74,7 +74,7 @@ def get_disk_io_manager() -> DiskIOManager:
return _GLOBAL_DISK_IO_MANAGER
-async def init_disk_io(manager: Any | None = None) -> DiskIOManager | None:
+async def init_disk_io(manager: Optional[Any] = None) -> Optional[DiskIOManager]:
"""Initialize and start disk I/O manager.
CRITICAL FIX: Singleton pattern removed. This function now accepts an optional
@@ -91,7 +91,7 @@ async def init_disk_io(manager: Any | None = None) -> DiskIOManager | None:
- Returns None on failure instead of raising exceptions
Returns:
- DiskIOManager | None: DiskIOManager instance if successfully started,
+ Optional[DiskIOManager]: DiskIOManager instance if successfully started,
None if initialization failed.
Note:
diff --git a/ccbt/storage/file_assembler.py b/ccbt/storage/file_assembler.py
index 06f9999..c5a10a8 100644
--- a/ccbt/storage/file_assembler.py
+++ b/ccbt/storage/file_assembler.py
@@ -5,7 +5,7 @@
import asyncio
import logging
import os
-from typing import Any, Sized
+from typing import Any, Optional, Sized, Union
from ccbt.config.config import get_config
from ccbt.core.torrent_attributes import apply_file_attributes, verify_file_sha1
@@ -41,9 +41,9 @@ class AsyncDownloadManager:
def __init__(
self,
- torrent_data: dict[str, Any] | TorrentInfo | None = None,
+ torrent_data: Optional[Union[dict[str, Any], TorrentInfo]] = None,
output_dir: str = ".",
- config: Any | None = None,
+ config: Optional[Any] = None,
):
"""Initialize async download manager.
@@ -70,7 +70,7 @@ def __init__(
async def start_download(
self,
- torrent_data: dict[str, Any] | TorrentInfo,
+ torrent_data: Union[dict[str, Any], TorrentInfo],
output_dir: str = ".",
) -> AsyncFileAssembler:
"""Start a new download for the given torrent.
@@ -105,7 +105,7 @@ async def start_download(
async def stop_download(
self,
- torrent_data: dict[str, Any] | TorrentInfo,
+ torrent_data: Union[dict[str, Any], TorrentInfo],
) -> None:
"""Stop a download and clean up resources.
@@ -129,8 +129,8 @@ async def stop_download(
def get_assembler(
self,
- torrent_data: dict[str, Any] | TorrentInfo,
- ) -> AsyncFileAssembler | None:
+ torrent_data: Union[dict[str, Any], TorrentInfo],
+ ) -> Optional[AsyncFileAssembler]:
"""Get the assembler for a torrent.
Args:
@@ -246,9 +246,9 @@ class AsyncFileAssembler:
def __init__(
self,
- torrent_data: dict[str, Any] | TorrentInfo,
+ torrent_data: Union[dict[str, Any], TorrentInfo],
output_dir: str = ".",
- disk_io_manager: DiskIOManager | None = None,
+ disk_io_manager: Optional[DiskIOManager] = None,
):
"""Initialize async file assembler.
@@ -458,7 +458,9 @@ def _build_file_segments(self) -> list[FileSegment]:
return segments
- def update_from_metadata(self, torrent_data: dict[str, Any] | TorrentInfo) -> None:
+ def update_from_metadata(
+ self, torrent_data: Union[dict[str, Any], TorrentInfo]
+ ) -> None:
"""Update file assembler with newly fetched metadata.
This method is called when metadata is fetched for a magnet link.
@@ -582,8 +584,8 @@ def update_from_metadata(self, torrent_data: dict[str, Any] | TorrentInfo) -> No
async def write_piece_to_file(
self,
piece_index: int,
- piece_data: bytes | memoryview,
- use_xet_chunking: bool | None = None,
+ piece_data: Union[bytes, memoryview],
+ use_xet_chunking: Optional[bool] = None,
) -> None:
"""Write a verified piece to its corresponding file(s) asynchronously.
@@ -657,7 +659,7 @@ async def write_piece_to_file(
async def _write_segment_to_file_async(
self,
segment: FileSegment,
- piece_data: bytes | memoryview,
+ piece_data: Union[bytes, memoryview],
) -> None:
"""Write a segment of piece data to a file asynchronously.
@@ -713,7 +715,7 @@ async def _write_segment_to_file_async(
async def _store_xet_chunks(
self,
piece_index: int,
- piece_data: bytes | memoryview,
+ piece_data: Union[bytes, memoryview],
piece_segments: list[FileSegment],
) -> None:
"""Store Xet chunks for a piece with deduplication.
@@ -1047,7 +1049,7 @@ async def read_block(
piece_index: int,
begin: int,
length: int,
- ) -> bytes | None:
+ ) -> Optional[bytes]:
"""Read a block of data for a given piece directly from files asynchronously.
Args:
@@ -1130,7 +1132,7 @@ async def read_block(
if self.config.disk.read_parallel_segments and len(segments_to_read) > 1:
from pathlib import Path
- async def read_segment(seg_info: tuple) -> tuple[int, bytes] | None:
+ async def read_segment(seg_info: tuple) -> Optional[tuple[int, bytes]]:
seg, file_offset, read_len, overlap_start, _overlap_end = seg_info
try:
chunk = await self.disk_io.read_block(
diff --git a/ccbt/storage/folder_watcher.py b/ccbt/storage/folder_watcher.py
index 5706183..7e00e86 100644
--- a/ccbt/storage/folder_watcher.py
+++ b/ccbt/storage/folder_watcher.py
@@ -10,7 +10,7 @@
import logging
import time
from pathlib import Path
-from typing import TYPE_CHECKING, Callable
+from typing import TYPE_CHECKING, Callable, Optional, Union
from ccbt.utils.events import Event, EventType, emit_event
@@ -84,7 +84,7 @@ class FolderWatcher:
def __init__(
self,
- folder_path: str | Path,
+ folder_path: Union[str, Path],
check_interval: float = 5.0,
use_watchdog: bool = True,
) -> None:
@@ -100,8 +100,8 @@ def __init__(
self.check_interval = check_interval
self.use_watchdog = use_watchdog and WATCHDOG_AVAILABLE
- self.observer: Observer | None = None # type: ignore[type-arg]
- self.polling_task: asyncio.Task | None = None
+ self.observer: Optional[Observer] = None # type: ignore[type-arg]
+ self.polling_task: Optional[asyncio.Task] = None
self.is_watching = False
self.last_check_time = time.time()
self.last_file_states: dict[str, float] = {} # file_path -> mtime
diff --git a/ccbt/storage/git_versioning.py b/ccbt/storage/git_versioning.py
index 2195e74..e43057f 100644
--- a/ccbt/storage/git_versioning.py
+++ b/ccbt/storage/git_versioning.py
@@ -10,7 +10,7 @@
import logging
import subprocess
from pathlib import Path
-from typing import Any
+from typing import Any, Optional, Union
logger = logging.getLogger(__name__)
@@ -24,7 +24,7 @@ class GitVersioning:
def __init__(
self,
- folder_path: str | Path,
+ folder_path: Union[str, Path],
auto_commit: bool = False,
) -> None:
"""Initialize git versioning.
@@ -48,7 +48,7 @@ def is_git_repo(self) -> bool:
git_dir = self.folder_path / ".git"
return git_dir.exists() and git_dir.is_dir()
- async def get_current_commit(self) -> str | None:
+ async def get_current_commit(self) -> Optional[str]:
"""Get current git commit hash.
Returns:
@@ -94,7 +94,7 @@ async def get_commit_refs(self, max_refs: int = 10) -> list[str]:
return []
- async def get_changed_files(self, since_ref: str | None = None) -> list[str]:
+ async def get_changed_files(self, since_ref: Optional[str] = None) -> list[str]:
"""Get list of changed files since a git ref.
Args:
@@ -124,7 +124,7 @@ async def get_changed_files(self, since_ref: str | None = None) -> list[str]:
return []
- async def get_diff(self, since_ref: str | None = None) -> str | None:
+ async def get_diff(self, since_ref: Optional[str] = None) -> Optional[str]:
"""Get git diff since a ref.
Args:
@@ -171,8 +171,8 @@ async def has_changes(self) -> bool:
return False
async def create_commit(
- self, message: str | None = None, files: list[str] | None = None
- ) -> str | None:
+ self, message: Optional[str] = None, files: Optional[list[str]] = None
+ ) -> Optional[str]:
"""Create a git commit.
Args:
@@ -211,7 +211,7 @@ async def create_commit(
return None
- async def auto_commit_if_changes(self) -> str | None:
+ async def auto_commit_if_changes(self) -> Optional[str]:
"""Automatically commit changes if auto_commit is enabled and changes exist.
Returns:
@@ -226,7 +226,7 @@ async def auto_commit_if_changes(self) -> str | None:
return None
- async def get_file_hash(self, file_path: str) -> str | None:
+ async def get_file_hash(self, file_path: str) -> Optional[str]:
"""Get git hash (blob SHA-1) for a file.
Args:
@@ -248,7 +248,7 @@ async def get_file_hash(self, file_path: str) -> str | None:
return None
- async def get_file_at_ref(self, file_path: str, ref: str) -> bytes | None:
+ async def get_file_at_ref(self, file_path: str, ref: str) -> Optional[bytes]:
"""Get file contents at a specific git ref.
Args:
@@ -283,7 +283,7 @@ async def get_file_at_ref(self, file_path: str, ref: str) -> bytes | None:
async def _run_git_command(
self, args: list[str], capture_output: bool = True
- ) -> str | None:
+ ) -> Optional[str]:
"""Run a git command and return output.
Args:
diff --git a/ccbt/storage/io_uring_wrapper.py b/ccbt/storage/io_uring_wrapper.py
index 689990c..5197322 100644
--- a/ccbt/storage/io_uring_wrapper.py
+++ b/ccbt/storage/io_uring_wrapper.py
@@ -9,7 +9,7 @@
import asyncio
import logging
import sys
-from typing import Any
+from typing import Any, Union
logger = logging.getLogger(__name__)
@@ -81,7 +81,7 @@ def __init__(self) -> None:
else:
logger.debug("io_uring not available, will use fallback I/O")
- async def read(self, file_path: str | Any, offset: int, length: int) -> bytes:
+ async def read(self, file_path: Union[str, Any], offset: int, length: int) -> bytes:
"""Read data using io_uring if available, otherwise fallback.
Args:
@@ -108,7 +108,7 @@ async def read(self, file_path: str | Any, offset: int, length: int) -> bytes:
logger.debug("io_uring read failed, using fallback: %s", e)
return await self._read_fallback(file_path, offset, length)
- async def write(self, file_path: str | Any, offset: int, data: bytes) -> int:
+ async def write(self, file_path: Union[str, Any], offset: int, data: bytes) -> int:
"""Write data using io_uring if available, otherwise fallback.
Args:
@@ -136,7 +136,7 @@ async def write(self, file_path: str | Any, offset: int, data: bytes) -> int:
return await self._write_fallback(file_path, offset, data)
async def _read_aiofiles(
- self, file_path: str | Any, offset: int, length: int
+ self, file_path: Union[str, Any], offset: int, length: int
) -> bytes:
"""Read using aiofiles."""
import aiofiles # type: ignore[import-untyped]
@@ -149,7 +149,7 @@ async def _read_aiofiles(
return data
async def _write_aiofiles(
- self, file_path: str | Any, offset: int, data: bytes
+ self, file_path: Union[str, Any], offset: int, data: bytes
) -> int:
"""Write using aiofiles."""
import aiofiles # type: ignore[import-untyped]
@@ -162,7 +162,7 @@ async def _write_aiofiles(
return len(data)
async def _read_fallback(
- self, file_path: str | Any, offset: int, length: int
+ self, file_path: Union[str, Any], offset: int, length: int
) -> bytes:
"""Fallback read using regular async I/O."""
loop = asyncio.get_event_loop()
@@ -176,7 +176,7 @@ def _read_sync() -> bytes:
return await loop.run_in_executor(None, _read_sync)
async def _write_fallback(
- self, file_path: str | Any, offset: int, data: bytes
+ self, file_path: Union[str, Any], offset: int, data: bytes
) -> int:
"""Fallback write using regular async I/O."""
loop = asyncio.get_event_loop()
diff --git a/ccbt/storage/resume_data.py b/ccbt/storage/resume_data.py
index d04b301..75e4988 100644
--- a/ccbt/storage/resume_data.py
+++ b/ccbt/storage/resume_data.py
@@ -9,7 +9,7 @@
import gzip
import time
-from typing import Any
+from typing import Any, Optional
from pydantic import BaseModel, Field
@@ -61,11 +61,11 @@ class FastResumeData(BaseModel):
)
# Queue state
- queue_position: int | None = Field(
+ queue_position: Optional[int] = Field(
default=None,
description="Torrent position in queue",
)
- queue_priority: str | None = Field(
+ queue_priority: Optional[str] = Field(
default=None,
description="Torrent queue priority",
)
@@ -241,7 +241,7 @@ def set_queue_state(self, position: int, priority: str) -> None:
self.queue_priority = priority
self.updated_at = time.time()
- def get_queue_state(self) -> tuple[int | None, str | None]:
+ def get_queue_state(self) -> tuple[Optional[int], Optional[str]]:
"""Retrieve queue state.
Returns:
diff --git a/ccbt/storage/xet_data_aggregator.py b/ccbt/storage/xet_data_aggregator.py
index a95f7b4..507376b 100644
--- a/ccbt/storage/xet_data_aggregator.py
+++ b/ccbt/storage/xet_data_aggregator.py
@@ -9,7 +9,7 @@
import asyncio
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.storage.xet_deduplication import XetDeduplication
@@ -77,8 +77,8 @@ async def aggregate_chunks(self, chunk_hashes: list[bytes]) -> bytes:
async def batch_store_chunks(
self,
chunks: list[tuple[bytes, bytes]],
- file_path: str | None = None,
- file_offsets: list[int] | None = None,
+ file_path: Optional[str] = None,
+ file_offsets: Optional[list[int]] = None,
) -> list[Path]:
"""Store multiple chunks in a batch operation.
@@ -173,7 +173,7 @@ async def batch_read_chunks(self, chunk_hashes: list[bytes]) -> dict[bytes, byte
return results
- async def _read_chunk_async(self, chunk_hash: bytes) -> bytes | None:
+ async def _read_chunk_async(self, chunk_hash: bytes) -> Optional[bytes]:
"""Read a single chunk asynchronously.
Args:
@@ -196,7 +196,7 @@ async def _read_chunk_async(self, chunk_hash: bytes) -> bytes | None:
return None
async def optimize_storage_layout(
- self, _chunk_hashes: list[bytes] | None = None
+ self, _chunk_hashes: Optional[list[bytes]] = None
) -> dict[str, Any]:
"""Optimize storage layout for chunks.
diff --git a/ccbt/storage/xet_deduplication.py b/ccbt/storage/xet_deduplication.py
index b488142..1037f04 100644
--- a/ccbt/storage/xet_deduplication.py
+++ b/ccbt/storage/xet_deduplication.py
@@ -12,7 +12,7 @@
import sqlite3
import time
from pathlib import Path
-from typing import Any
+from typing import Any, Optional, Union
from ccbt.models import PeerInfo, XetFileMetadata
@@ -35,8 +35,8 @@ class XetDeduplication:
def __init__(
self,
- cache_db_path: Path | str,
- dht_client: Any | None = None, # type: ignore[assignment]
+ cache_db_path: Union[Path, str],
+ dht_client: Optional[Any] = None, # type: ignore[assignment]
):
"""Initialize deduplication with local cache.
@@ -210,7 +210,7 @@ def _init_database(self) -> sqlite3.Connection:
return db
- async def check_chunk_exists(self, chunk_hash: bytes) -> Path | None:
+ async def check_chunk_exists(self, chunk_hash: bytes) -> Optional[Path]:
"""Check if chunk exists locally.
Queries the database for the chunk hash and updates the
@@ -242,8 +242,8 @@ async def store_chunk(
self,
chunk_hash: bytes,
chunk_data: bytes,
- file_path: str | None = None,
- file_offset: int | None = None,
+ file_path: Optional[str] = None,
+ file_offset: Optional[int] = None,
) -> Path:
"""Store chunk with deduplication.
@@ -456,7 +456,7 @@ async def get_file_chunks(self, file_path: str) -> list[tuple[bytes, int, int]]:
async def reconstruct_file_from_chunks(
self,
file_path: str,
- output_path: Path | None = None,
+ output_path: Optional[Path] = None,
) -> Path:
"""Reconstruct a file from its stored chunks.
@@ -597,7 +597,7 @@ async def store_file_metadata(self, metadata: XetFileMetadata) -> None:
except Exception as e:
self.logger.warning("Failed to store file metadata: %s", e, exc_info=True)
- async def get_file_metadata(self, file_path: str) -> XetFileMetadata | None:
+ async def get_file_metadata(self, file_path: str) -> Optional[XetFileMetadata]:
"""Get file metadata from persistent storage.
Retrieves and deserializes XetFileMetadata from the database.
@@ -635,7 +635,7 @@ async def get_file_metadata(self, file_path: str) -> XetFileMetadata | None:
self.logger.warning("Failed to get file metadata: %s", e, exc_info=True)
return None
- async def query_dht_for_chunk(self, chunk_hash: bytes) -> PeerInfo | None:
+ async def query_dht_for_chunk(self, chunk_hash: bytes) -> Optional[PeerInfo]:
"""Query DHT for peers that have this chunk.
Uses existing DHT infrastructure to find peers that have
@@ -744,7 +744,7 @@ async def query_dht_for_chunk(self, chunk_hash: bytes) -> PeerInfo | None:
) # pragma: no cover - Same context
return None # pragma: no cover - Same context
- def _extract_peer_from_dht_value(self, value: Any) -> PeerInfo | None: # type: ignore[return]
+ def _extract_peer_from_dht_value(self, value: Any) -> Optional[PeerInfo]: # type: ignore[return]
"""Extract PeerInfo from DHT stored value (BEP 44).
The value can be in various formats:
@@ -847,7 +847,7 @@ def _extract_peer_from_dht_value(self, value: Any) -> PeerInfo | None: # type:
return None
- def get_chunk_info(self, chunk_hash: bytes) -> dict | None:
+ def get_chunk_info(self, chunk_hash: bytes) -> Optional[dict]:
"""Get information about a stored chunk.
Args:
diff --git a/ccbt/storage/xet_defrag_prevention.py b/ccbt/storage/xet_defrag_prevention.py
index 529f147..d22e435 100644
--- a/ccbt/storage/xet_defrag_prevention.py
+++ b/ccbt/storage/xet_defrag_prevention.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from ccbt.storage.xet_deduplication import XetDeduplication
@@ -175,7 +175,7 @@ async def prevent_fragmentation(self) -> dict[str, Any]:
}
async def optimize_chunk_layout(
- self, chunk_hashes: list[bytes] | None = None
+ self, chunk_hashes: Optional[list[bytes]] = None
) -> dict[str, Any]:
"""Optimize layout for specific chunks.
diff --git a/ccbt/storage/xet_file_deduplication.py b/ccbt/storage/xet_file_deduplication.py
index 7c5d582..51ae4a3 100644
--- a/ccbt/storage/xet_file_deduplication.py
+++ b/ccbt/storage/xet_file_deduplication.py
@@ -7,7 +7,7 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from pathlib import Path
@@ -53,7 +53,7 @@ async def deduplicate_file(self, file_path: Path) -> dict[str, Any]:
Returns:
Dictionary with deduplication statistics:
- duplicate_found: bool
- - duplicate_path: str | None
+ - duplicate_path: Optional[str]
- file_hash: bytes
- chunks_skipped: int
- storage_saved: int (bytes)
@@ -112,7 +112,7 @@ async def deduplicate_file(self, file_path: Path) -> dict[str, Any]:
async def _find_file_by_hash(
self, file_hash: bytes, exclude_path: str
- ) -> str | None:
+ ) -> Optional[str]:
"""Find a file with the given hash, excluding the specified path.
Args:
@@ -215,7 +215,7 @@ async def get_file_deduplication_stats(self) -> dict[str, Any]:
}
async def find_duplicate_files(
- self, file_hash: bytes | None = None
+ self, file_hash: Optional[bytes] = None
) -> list[list[str]]:
"""Find groups of duplicate files.
diff --git a/ccbt/storage/xet_folder_manager.py b/ccbt/storage/xet_folder_manager.py
index af4882f..3f3ec4c 100644
--- a/ccbt/storage/xet_folder_manager.py
+++ b/ccbt/storage/xet_folder_manager.py
@@ -9,7 +9,7 @@
import asyncio
import logging
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Optional, Union
from ccbt.session.xet_sync_manager import XetSyncManager
@@ -26,9 +26,9 @@ class XetFolder:
def __init__(
self,
- folder_path: str | Path,
+ folder_path: Union[str, Path],
sync_mode: str = "best_effort",
- source_peers: list[str] | None = None,
+ source_peers: Optional[list[str]] = None,
check_interval: float = 5.0,
enable_git: bool = True,
) -> None:
@@ -60,7 +60,7 @@ def __init__(
check_interval=check_interval,
)
- self.git_versioning: GitVersioning | None = None
+ self.git_versioning: Optional[GitVersioning] = None
if enable_git:
self.git_versioning = GitVersioning(folder_path=self.folder_path)
@@ -146,7 +146,7 @@ async def remove_peer(self, peer_id: str) -> None:
self.logger.info("Removed peer %s from folder sync", peer_id)
def set_sync_mode(
- self, sync_mode: str, source_peers: list[str] | None = None
+ self, sync_mode: str, source_peers: Optional[list[str]] = None
) -> None:
"""Set synchronization mode for folder.
diff --git a/ccbt/storage/xet_hashing.py b/ccbt/storage/xet_hashing.py
index d09dd66..ba5c186 100644
--- a/ccbt/storage/xet_hashing.py
+++ b/ccbt/storage/xet_hashing.py
@@ -12,7 +12,7 @@
import hashlib
import logging
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from collections.abc import Callable
@@ -179,7 +179,7 @@ def verify_chunk_hash(chunk_data: bytes, expected_hash: bytes) -> bool:
@staticmethod
def hash_file_incremental(
file_path: str,
- chunk_callback: Callable[[bytes], None] | None = None,
+ chunk_callback: Optional[Callable[[bytes], None]] = None,
) -> bytes:
"""Compute file hash incrementally by reading and hashing chunks.
diff --git a/ccbt/storage/xet_shard.py b/ccbt/storage/xet_shard.py
index 5958d5e..0335a45 100644
--- a/ccbt/storage/xet_shard.py
+++ b/ccbt/storage/xet_shard.py
@@ -10,8 +10,7 @@
import hmac
import logging
import struct
-
-# No Optional needed - using X | None syntax
+from typing import Optional
logger = logging.getLogger(__name__)
@@ -107,7 +106,7 @@ def add_xorb_hash(self, xorb_hash: bytes) -> None:
if xorb_hash not in self.xorbs:
self.xorbs.append(xorb_hash)
- def serialize(self, hmac_key: bytes | None = None) -> bytes:
+ def serialize(self, hmac_key: Optional[bytes] = None) -> bytes:
"""Serialize shard to binary format with optional HMAC.
Format:
@@ -229,7 +228,7 @@ def _serialize_cas_info(self) -> bytes:
return data
- def _serialize_footer(self, hmac_key: bytes | None, data: bytes) -> bytes:
+ def _serialize_footer(self, hmac_key: Optional[bytes], data: bytes) -> bytes:
"""Serialize footer with HMAC.
Args:
@@ -245,7 +244,7 @@ def _serialize_footer(self, hmac_key: bytes | None, data: bytes) -> bytes:
return b""
@staticmethod
- def deserialize(data: bytes, hmac_key: bytes | None = None) -> XetShard:
+ def deserialize(data: bytes, hmac_key: Optional[bytes] = None) -> XetShard:
"""Deserialize shard from binary format.
Args:
@@ -392,7 +391,7 @@ def get_file_count(self) -> int:
"""
return len(self.files)
- def get_file_by_path(self, file_path: str) -> dict | None:
+ def get_file_by_path(self, file_path: str) -> Optional[dict]:
"""Get file information by path.
Args:
diff --git a/ccbt/storage/xet_xorb.py b/ccbt/storage/xet_xorb.py
index ee816e6..35086b9 100644
--- a/ccbt/storage/xet_xorb.py
+++ b/ccbt/storage/xet_xorb.py
@@ -18,6 +18,7 @@
import logging
import struct
+from typing import Optional
try:
import lz4.frame
@@ -358,7 +359,7 @@ def deserialize(data: bytes) -> Xorb:
return xorb
- def get_chunk_by_hash(self, chunk_hash: bytes) -> bytes | None:
+ def get_chunk_by_hash(self, chunk_hash: bytes) -> Optional[bytes]:
"""Get chunk data by hash.
Args:
diff --git a/ccbt/transport/utp.py b/ccbt/transport/utp.py
index 0916b8f..b6e44fc 100644
--- a/ccbt/transport/utp.py
+++ b/ccbt/transport/utp.py
@@ -20,7 +20,7 @@
import time
from dataclasses import dataclass, field
from enum import Enum
-from typing import Callable
+from typing import Callable, Optional, Tuple
from ccbt.config.config import get_config
@@ -230,7 +230,7 @@ def unpack(data: bytes) -> UTPPacket:
# Connection state tracking tuple: (packet, send_time, retry_count)
-_PacketInfo = tuple[UTPPacket, float, int]
+_PacketInfo = Tuple[UTPPacket, float, int]
class UTPConnection:
@@ -256,7 +256,7 @@ class UTPConnection:
def __init__(
self,
remote_addr: tuple[str, int],
- connection_id: int | None = None,
+ connection_id: Optional[int] = None,
_send_window_size: int = 65535,
recv_window_size: int = 65535,
):
@@ -332,7 +332,7 @@ def __init__(
# Delayed ACK support
self.pending_acks: list[UTPPacket] = [] # Queue of packets waiting for ACK
- self.ack_timer: asyncio.Task | None = None # Delayed ACK timer task
+ self.ack_timer: Optional[asyncio.Task] = None # Delayed ACK timer task
self.ack_delay: float = (
self.config.network.utp.ack_interval
if hasattr(self.config, "network")
@@ -340,20 +340,22 @@ def __init__(
and hasattr(self.config.network.utp, "ack_interval")
else 0.04
) # ACK delay in seconds (default 40ms)
- self.last_ack_packet: UTPPacket | None = None # Last packet that triggered ACK
+ self.last_ack_packet: Optional[UTPPacket] = (
+ None # Last packet that triggered ACK
+ )
self.ack_packet_count: int = 0 # Count of packets received since last ACK
# Transport (UDP socket) - set via set_transport()
- self.transport: asyncio.DatagramTransport | None = None
+ self.transport: Optional[asyncio.DatagramTransport] = None
# Background tasks
- self._retransmission_task: asyncio.Task | None = None
- self._send_task: asyncio.Task | None = None
- self._receive_task: asyncio.Task | None = None
+ self._retransmission_task: Optional[asyncio.Task] = None
+ self._send_task: Optional[asyncio.Task] = None
+ self._receive_task: Optional[asyncio.Task] = None
# Connection timeout
self.connection_timeout: float = 30.0
- self._connection_timeout_task: asyncio.Task | None = None
+ self._connection_timeout_task: Optional[asyncio.Task] = None
# Congestion control
self.target_send_rate: float = 1500.0 # bytes/second
@@ -368,7 +370,7 @@ def __init__(
self.packets_retransmitted: int = 0
# Connection callbacks
- self.on_connected: Callable[[], None] | None = None
+ self.on_connected: Optional[Callable[[], None]] = None
# Extension support
from ccbt.transport.utp_extensions import UTPExtensionType
@@ -522,7 +524,7 @@ def _send_packet(self, packet: UTPPacket) -> None:
len(packet_bytes),
)
- async def connect(self, timeout: float | None = None) -> None:
+ async def connect(self, timeout: Optional[float] = None) -> None:
"""Establish uTP connection (initiate connection).
Args:
@@ -1126,7 +1128,7 @@ def _process_out_of_order_packets(self) -> None:
) % 0x10000
def _send_ack(
- self, packet: UTPPacket | None = None, immediate: bool = False
+ self, packet: Optional[UTPPacket] = None, immediate: bool = False
) -> None:
"""Send acknowledgment (ST_STATE) packet.
diff --git a/ccbt/transport/utp_socket.py b/ccbt/transport/utp_socket.py
index f28f42d..c9f3193 100644
--- a/ccbt/transport/utp_socket.py
+++ b/ccbt/transport/utp_socket.py
@@ -10,7 +10,7 @@
import logging
import random
import struct
-from typing import TYPE_CHECKING, Callable
+from typing import TYPE_CHECKING, Callable, Optional
from ccbt.config.config import get_config
@@ -70,7 +70,7 @@ class UTPSocketManager:
# Singleton pattern removed - UTPSocketManager is now managed via AsyncSessionManager.utp_socket_manager
# This ensures proper lifecycle management and prevents socket recreation issues
- _instance: UTPSocketManager | None = (
+ _instance: Optional[UTPSocketManager] = (
None # Deprecated - use session_manager.utp_socket_manager
)
_lock = asyncio.Lock() # Deprecated - kept for backward compatibility
@@ -85,8 +85,8 @@ def __init__(self):
self.logger = logging.getLogger(__name__)
# UDP socket
- self.transport: asyncio.DatagramTransport | None = None
- self.protocol: UTPProtocol | None = None
+ self.transport: Optional[asyncio.DatagramTransport] = None
+ self.protocol: Optional[UTPProtocol] = None
self._socket_ready = asyncio.Event()
# Active connections: (ip, port, connection_id) -> UTPConnection
@@ -99,9 +99,9 @@ def __init__(self):
self.active_connection_ids: set[int] = set()
# Callback for incoming connections
- self.on_incoming_connection: (
- Callable[[UTPConnection, tuple[str, int]], None] | None
- ) = None
+ self.on_incoming_connection: Optional[
+ Callable[[UTPConnection, tuple[str, int]], None]
+ ] = None
# Statistics
self.total_packets_received: int = 0
diff --git a/ccbt/utils/console_utils.py b/ccbt/utils/console_utils.py
index d86f74a..4afe402 100644
--- a/ccbt/utils/console_utils.py
+++ b/ccbt/utils/console_utils.py
@@ -9,7 +9,7 @@
import contextlib
import logging
import sys
-from typing import Any, Iterator
+from typing import Any, Iterator, Optional
from rich.console import Console
from rich.status import Status
@@ -57,7 +57,7 @@ def create_console() -> Console:
@contextlib.contextmanager
def spinner(
message: str,
- console: Console | None = None,
+ console: Optional[Console] = None,
spinner_style: str = "dots",
) -> Iterator[Status]:
"""Context manager for showing a spinner during async operations.
@@ -92,7 +92,7 @@ def spinner(
def print_success(
message: str,
- console: Console | None = None,
+ console: Optional[Console] = None,
**kwargs: Any,
) -> None:
"""Print a success message with Rich formatting and i18n.
@@ -112,7 +112,7 @@ def print_success(
def print_error(
message: str,
- console: Console | None = None,
+ console: Optional[Console] = None,
**kwargs: Any,
) -> None:
"""Print an error message with Rich formatting and i18n.
@@ -132,7 +132,7 @@ def print_error(
def print_warning(
message: str,
- console: Console | None = None,
+ console: Optional[Console] = None,
**kwargs: Any,
) -> None:
"""Print a warning message with Rich formatting and i18n.
@@ -152,7 +152,7 @@ def print_warning(
def print_info(
message: str,
- console: Console | None = None,
+ console: Optional[Console] = None,
**kwargs: Any,
) -> None:
"""Print an info message with Rich formatting and i18n.
@@ -171,13 +171,13 @@ def print_info(
def print_table(
- title: str | None = None,
- console: Console | None = None,
+ title: Optional[str] = None,
+ console: Optional[Console] = None,
show_header: bool = True,
show_footer: bool = False,
border_style: str = "blue",
header_style: str = "bold cyan",
- row_styles: list[str] | None = None,
+ row_styles: Optional[list[str]] = None,
**kwargs: Any,
) -> Any:
"""Create and print a Rich table with i18n support and enhanced styling.
@@ -222,8 +222,8 @@ def print_table(
def print_panel(
content: str,
- title: str | None = None,
- console: Console | None = None,
+ title: Optional[str] = None,
+ console: Optional[Console] = None,
border_style: str = "blue",
title_align: str = "left",
expand: bool = False,
@@ -263,7 +263,7 @@ def print_panel(
def print_markdown(
content: str,
- console: Console | None = None,
+ console: Optional[Console] = None,
code_theme: str = "monokai",
**kwargs: Any,
) -> None:
@@ -293,8 +293,8 @@ def print_markdown(
@contextlib.contextmanager
def live_display(
- renderable: Any | None = None,
- console: Console | None = None,
+ renderable: Optional[Any] = None,
+ console: Optional[Console] = None,
refresh_per_second: float = 4.0,
vertical_overflow: str = "visible",
) -> Iterator[Any]:
@@ -335,8 +335,8 @@ def live_display(
def create_progress(
- console: Console | None = None,
- _description: str | None = None,
+ console: Optional[Console] = None,
+ _description: Optional[str] = None,
) -> Progress:
"""Create a Rich Progress bar with i18n support.
@@ -370,8 +370,8 @@ def create_progress(
def log_user_output(
message: str,
- verbosity_manager: Any | None = None,
- logger: logging.Logger | None = None,
+ verbosity_manager: Optional[Any] = None,
+ logger: Optional[logging.Logger] = None,
level: int = logging.INFO,
*args: Any,
**kwargs: Any,
@@ -413,8 +413,8 @@ def log_user_output(
def log_operation(
operation: str,
status: str = "started",
- verbosity_manager: Any | None = None,
- logger: logging.Logger | None = None,
+ verbosity_manager: Optional[Any] = None,
+ logger: Optional[logging.Logger] = None,
**kwargs: Any,
) -> None:
"""Log an operation status message.
@@ -460,9 +460,9 @@ def log_operation(
def log_result(
operation: str,
success: bool,
- details: str | None = None,
- verbosity_manager: Any | None = None,
- logger: logging.Logger | None = None,
+ details: Optional[str] = None,
+ verbosity_manager: Optional[Any] = None,
+ logger: Optional[logging.Logger] = None,
**kwargs: Any,
) -> None:
"""Log a command result.
diff --git a/ccbt/utils/di.py b/ccbt/utils/di.py
index ff1a78a..3c9b992 100644
--- a/ccbt/utils/di.py
+++ b/ccbt/utils/di.py
@@ -7,7 +7,7 @@
from __future__ import annotations
from dataclasses import dataclass
-from typing import Any, Callable, Protocol
+from typing import Any, Callable, Optional, Protocol
from ccbt.config.config import Config, get_config
@@ -25,32 +25,32 @@ class DIContainer:
"""
# Core providers
- config_provider: Callable[[], Config] | None = None
- logger_factory: _Factory | None = None
- metrics_factory: _Factory | None = None
+ config_provider: Optional[Callable[[], Config]] = None
+ logger_factory: Optional[_Factory] = None
+ metrics_factory: Optional[_Factory] = None
# Networking / discovery
- tracker_client_factory: _Factory | None = None
- udp_tracker_client_provider: _Factory | None = None
- dht_client_factory: _Factory | None = None
- nat_manager_factory: _Factory | None = None
- tcp_server_factory: _Factory | None = None
+ tracker_client_factory: Optional[_Factory] = None
+ udp_tracker_client_provider: Optional[_Factory] = None
+ dht_client_factory: Optional[_Factory] = None
+ nat_manager_factory: Optional[_Factory] = None
+ tcp_server_factory: Optional[_Factory] = None
# Security / protocol / peers
- security_manager_factory: _Factory | None = None
- protocol_manager_factory: _Factory | None = None
- peer_service_factory: _Factory | None = None
- peer_connection_manager_factory: _Factory | None = None
- piece_manager_factory: _Factory | None = None
- metadata_exchange_factory: _Factory | None = None
+ security_manager_factory: Optional[_Factory] = None
+ protocol_manager_factory: Optional[_Factory] = None
+ peer_service_factory: Optional[_Factory] = None
+ peer_connection_manager_factory: Optional[_Factory] = None
+ piece_manager_factory: Optional[_Factory] = None
+ metadata_exchange_factory: Optional[_Factory] = None
# Infra
- task_scheduler: _Factory | None = None
- time_provider: _Factory | None = None
- backoff_policy: _Factory | None = None
+ task_scheduler: Optional[_Factory] = None
+ time_provider: Optional[_Factory] = None
+ backoff_policy: Optional[_Factory] = None
-def default_container(config: Config | None = None) -> DIContainer:
+def default_container(config: Optional[Config] = None) -> DIContainer:
"""Build a container with minimal sensible defaults."""
cfg = config or get_config()
diff --git a/ccbt/utils/events.py b/ccbt/utils/events.py
index f89a140..7d9bafd 100644
--- a/ccbt/utils/events.py
+++ b/ccbt/utils/events.py
@@ -16,7 +16,7 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from ccbt.utils.exceptions import CCBTError
from ccbt.utils.logging_config import get_logger
@@ -239,9 +239,9 @@ class Event:
timestamp: float = field(default_factory=time.time)
event_id: str = field(default_factory=lambda: str(uuid.uuid4()))
priority: EventPriority = EventPriority.NORMAL
- source: str | None = None
+ source: Optional[str] = None
data: dict[str, Any] = field(default_factory=dict)
- correlation_id: str | None = None
+ correlation_id: Optional[str] = None
def to_dict(self) -> dict[str, Any]:
"""Convert event to dictionary."""
@@ -285,7 +285,7 @@ class PeerConnectedEvent(Event):
peer_ip: str = ""
peer_port: int = 0
- peer_id: str | None = None
+ peer_id: Optional[str] = None
def __post_init__(self):
"""Initialize event type and data."""
@@ -305,7 +305,7 @@ class PeerDisconnectedEvent(Event):
peer_ip: str = ""
peer_port: int = 0
- reason: str | None = None
+ reason: Optional[str] = None
def __post_init__(self):
"""Initialize event type and data."""
@@ -324,7 +324,7 @@ class PeerCountLowEvent(Event):
"""Event emitted when peer count is low, triggering discovery."""
active_peers: int = 0
- info_hash: bytes | None = None
+ info_hash: Optional[bytes] = None
total_peers: int = 0
def __post_init__(self):
@@ -350,7 +350,7 @@ class PieceDownloadedEvent(Event):
piece_index: int = 0
piece_size: int = 0
download_time: float = 0.0
- peer_ip: str | None = None
+ peer_ip: Optional[str] = None
def __post_init__(self):
"""Initialize event type and data."""
@@ -436,7 +436,7 @@ def __init__(
batch_timeout: float = 0.05,
emit_timeout: float = 0.01,
queue_full_threshold: float = 0.9,
- throttle_intervals: dict[str, float] | None = None,
+ throttle_intervals: Optional[dict[str, float]] = None,
):
"""Initialize event bus.
@@ -457,8 +457,8 @@ def __init__(
self.max_replay_events = 1000
self.running = False
self.logger = get_logger(__name__)
- self._loop: asyncio.AbstractEventLoop | None = None
- self._task: asyncio.Task | None = None
+ self._loop: Optional[asyncio.AbstractEventLoop] = None
+ self._task: Optional[asyncio.Task] = None
# Batch processing configuration
self.batch_size = batch_size
@@ -796,7 +796,7 @@ async def _handle_with_handler(self, event: Event, handler: EventHandler) -> Non
def get_replay_events(
self,
- event_type: str | None = None,
+ event_type: Optional[str] = None,
limit: int = 100,
) -> list[Event]:
"""Get events from replay buffer.
@@ -830,7 +830,7 @@ def get_stats(self) -> dict[str, Any]:
# Global event bus instance
-_event_bus: EventBus | None = None
+_event_bus: Optional[EventBus] = None
def get_event_bus() -> EventBus:
@@ -866,7 +866,9 @@ def get_event_bus() -> EventBus:
return _event_bus
-def get_recent_events(limit: int = 100, event_type: str | None = None) -> list[Event]:
+def get_recent_events(
+ limit: int = 100, event_type: Optional[str] = None
+) -> list[Event]:
"""Get recent events from the global event bus.
Args:
@@ -890,7 +892,7 @@ async def emit_event(event: Event) -> None:
async def emit_peer_connected(
peer_ip: str,
peer_port: int,
- peer_id: str | None = None,
+ peer_id: Optional[str] = None,
) -> None:
"""Emit peer connected event."""
event = PeerConnectedEvent(
@@ -905,7 +907,7 @@ async def emit_peer_connected(
async def emit_peer_disconnected(
peer_ip: str,
peer_port: int,
- reason: str | None = None,
+ reason: Optional[str] = None,
) -> None:
"""Emit peer disconnected event."""
event = PeerDisconnectedEvent(
@@ -921,7 +923,7 @@ async def emit_piece_downloaded(
piece_index: int,
piece_size: int,
download_time: float,
- peer_ip: str | None = None,
+ peer_ip: Optional[str] = None,
) -> None:
"""Emit piece downloaded event."""
event = PieceDownloadedEvent(
@@ -955,7 +957,7 @@ async def emit_performance_metric(
metric_name: str,
metric_value: float,
metric_unit: str,
- tags: dict[str, str] | None = None,
+ tags: Optional[dict[str, str]] = None,
) -> None:
"""Emit performance metric event."""
event = PerformanceMetricEvent(
diff --git a/ccbt/utils/exceptions.py b/ccbt/utils/exceptions.py
index 3e053e7..f5d0c75 100644
--- a/ccbt/utils/exceptions.py
+++ b/ccbt/utils/exceptions.py
@@ -8,13 +8,13 @@
from __future__ import annotations
-from typing import Any
+from typing import Any, Optional
class CCBTError(Exception):
"""Base exception for all ccBitTorrent errors."""
- def __init__(self, message: str, details: dict[str, Any] | None = None):
+ def __init__(self, message: str, details: Optional[dict[str, Any]] = None):
"""Initialize CCBT error."""
super().__init__(message)
self.message = message
diff --git a/ccbt/utils/logging_config.py b/ccbt/utils/logging_config.py
index 4c2356a..3b0e0ae 100644
--- a/ccbt/utils/logging_config.py
+++ b/ccbt/utils/logging_config.py
@@ -17,7 +17,7 @@
import uuid
from contextvars import ContextVar
from pathlib import Path
-from typing import TYPE_CHECKING, Any, ClassVar, cast
+from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast
from ccbt.utils.exceptions import CCBTError
from ccbt.utils.rich_logging import (
@@ -31,8 +31,8 @@
# Context variable for correlation ID
# Help type checker understand the ContextVar generic with a None default
-correlation_id: ContextVar[str | None] = cast(
- "ContextVar[str | None]",
+correlation_id: ContextVar[Optional[str]] = cast(
+ "ContextVar[Optional[str]]",
ContextVar("correlation_id", default=None),
)
@@ -152,7 +152,7 @@ def format(self, record: logging.LogRecord) -> str:
return f"Logging error: {record.levelname} {record.name}"
-def _generate_timestamped_log_filename(base_path: str | None) -> str:
+def _generate_timestamped_log_filename(base_path: Optional[str]) -> str:
"""Generate a unique timestamped log file name.
Args:
@@ -426,7 +426,7 @@ def get_logger(name: str) -> logging.Logger:
return logging.getLogger(f"ccbt.{name}")
-def set_correlation_id(corr_id: str | None = None) -> str:
+def set_correlation_id(corr_id: Optional[str] = None) -> str:
"""Set correlation ID for the current context."""
if corr_id is None:
corr_id = str(uuid.uuid4())
@@ -434,7 +434,7 @@ def set_correlation_id(corr_id: str | None = None) -> str:
return corr_id
-def get_correlation_id() -> str | None:
+def get_correlation_id() -> Optional[str]:
"""Get the current correlation ID."""
return correlation_id.get()
@@ -445,9 +445,9 @@ class LoggingContext:
def __init__(
self,
operation: str,
- log_level: int | None = None,
+ log_level: Optional[int] = None,
slow_threshold: float = 1.0,
- verbosity_manager: Any | None = None,
+ verbosity_manager: Optional[Any] = None,
**kwargs,
):
"""Initialize operation context manager.
@@ -582,7 +582,7 @@ def log_exception(logger: logging.Logger, exc: Exception, context: str = "") ->
def log_with_verbosity(
logger: logging.Logger,
- verbosity_manager: Any | None,
+ verbosity_manager: Optional[Any],
level: int,
message: str,
*args: Any,
@@ -611,7 +611,7 @@ def log_with_verbosity(
def log_info_verbose(
logger: logging.Logger,
- verbosity_manager: Any | None,
+ verbosity_manager: Optional[Any],
message: str,
*args: Any,
**kwargs: Any,
@@ -640,7 +640,7 @@ def log_info_verbose(
def log_info_normal(
logger: logging.Logger,
- verbosity_manager: Any | None,
+ verbosity_manager: Optional[Any],
message: str,
*args: Any,
**kwargs: Any,
diff --git a/ccbt/utils/metadata_utils.py b/ccbt/utils/metadata_utils.py
index 7133a40..344b054 100644
--- a/ccbt/utils/metadata_utils.py
+++ b/ccbt/utils/metadata_utils.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import hashlib
-from typing import Any
+from typing import Any, Optional
from ccbt.core.bencode import BencodeEncoder
@@ -16,7 +16,7 @@ def calculate_info_hash(info_dict: dict[bytes, Any]) -> bytes:
def validate_info_dict(
- info_dict: dict[bytes, Any], expected_info_hash: bytes | None
+ info_dict: dict[bytes, Any], expected_info_hash: Optional[bytes]
) -> bool:
"""Validate info dict matches expected v1 info hash if provided."""
if expected_info_hash is None:
diff --git a/ccbt/utils/metrics.py b/ccbt/utils/metrics.py
index 28d5bf7..6789cb1 100644
--- a/ccbt/utils/metrics.py
+++ b/ccbt/utils/metrics.py
@@ -16,13 +16,13 @@
from collections import deque
from dataclasses import dataclass, field
from enum import Enum
-from typing import Any, Callable
+from typing import Any, Callable, Optional
# Define at module level so they always exist for patching/mocking
-CollectorRegistry: type | None = None # type: ignore[assignment, misc]
-Counter: type | None = None # type: ignore[assignment, misc]
-Gauge: type | None = None # type: ignore[assignment, misc]
-start_http_server: Callable | None = None # type: ignore[assignment, misc]
+CollectorRegistry: Optional[type] = None # type: ignore[assignment, misc]
+Counter: Optional[type] = None # type: ignore[assignment, misc]
+Gauge: Optional[type] = None # type: ignore[assignment, misc]
+start_http_server: Optional[Callable] = None # type: ignore[assignment, misc]
try:
from prometheus_client import (
@@ -203,11 +203,11 @@ def __init__(self):
self._setup_prometheus_metrics()
# Background tasks
- self._metrics_task: asyncio.Task | None = None
- self._cleanup_task: asyncio.Task | None = None
+ self._metrics_task: Optional[asyncio.Task] = None
+ self._cleanup_task: Optional[asyncio.Task] = None
# Callbacks
- self.on_metrics_update: Callable[[dict[str, Any]], None] | None = None
+ self.on_metrics_update: Optional[Callable[[dict[str, Any]], None]] = None
self.logger = logging.getLogger(__name__)
@@ -700,11 +700,11 @@ def get_metrics_summary(self) -> dict[str, Any]:
"peers": len(self.peer_metrics),
}
- def get_torrent_metrics(self, torrent_id: str) -> TorrentMetrics | None:
+ def get_torrent_metrics(self, torrent_id: str) -> Optional[TorrentMetrics]:
"""Get metrics for a specific torrent."""
return self.torrent_metrics.get(torrent_id)
- def get_peer_metrics(self, peer_key: str) -> PeerMetrics | None:
+ def get_peer_metrics(self, peer_key: str) -> Optional[PeerMetrics]:
"""Get metrics for a specific peer."""
return self.peer_metrics.get(peer_key)
diff --git a/ccbt/utils/network_optimizer.py b/ccbt/utils/network_optimizer.py
index 979797c..9d1653e 100644
--- a/ccbt/utils/network_optimizer.py
+++ b/ccbt/utils/network_optimizer.py
@@ -16,7 +16,7 @@
from collections import deque
from dataclasses import dataclass
from enum import Enum
-from typing import Any
+from typing import Any, Optional
from ccbt.utils.exceptions import NetworkError
from ccbt.utils.logging_config import get_logger
@@ -120,9 +120,9 @@ def __init__(self) -> None:
def _calculate_optimal_buffer_size(
self,
- bandwidth_bps: float | None = None,
- rtt_ms: float | None = None,
- connection_stats: ConnectionStats | None = None,
+ bandwidth_bps: Optional[float] = None,
+ rtt_ms: Optional[float] = None,
+ connection_stats: Optional[ConnectionStats] = None,
) -> int:
"""Calculate optimal buffer size using BDP (Bandwidth-Delay Product).
@@ -232,7 +232,7 @@ def optimize_socket(
self,
sock: socket.socket,
socket_type: SocketType,
- connection_stats: ConnectionStats | None = None,
+ connection_stats: Optional[ConnectionStats] = None,
) -> None:
"""Optimize socket settings for the given type.
@@ -366,6 +366,9 @@ def create_optimized_socket(
class ConnectionPool:
"""Connection pool for efficient connection management."""
+ # Track all active instances for debugging and forced cleanup
+ _active_instances: set = set()
+
def __init__(
self,
max_connections: int = 100,
@@ -407,13 +410,15 @@ def __init__(
daemon=True,
)
self._cleanup_task.start()
+ # Track this instance for debugging and forced cleanup
+ ConnectionPool._active_instances.add(self)
def get_connection(
self,
host: str,
port: int,
socket_type: SocketType = SocketType.PEER_CONNECTION,
- ) -> socket.socket | None:
+ ) -> Optional[socket.socket]:
"""Get a connection from the pool.
Args:
@@ -489,7 +494,7 @@ def _create_connection(
host: str,
port: int,
socket_type: SocketType,
- ) -> socket.socket | None:
+ ) -> Optional[socket.socket]:
"""Create a new connection."""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@@ -528,8 +533,12 @@ def _cleanup_connections(self) -> None:
# Full coverage requires running thread for 60+ seconds which is impractical in unit tests
# Logic is tested via direct method calls in test suite
try:
- # Wait up to 60 seconds, but check shutdown event
- if self._shutdown_event.wait(timeout=60):
+ # CRITICAL FIX: Check shutdown event before waiting to allow immediate exit
+ if self._shutdown_event.is_set():
+ break
+ # Wait up to 5 seconds (reduced from 60s to prevent thread accumulation)
+ # Threads check shutdown event 12x more frequently, reducing accumulation
+ if self._shutdown_event.wait(timeout=5):
# Shutdown event was set, exit loop
break
@@ -560,18 +569,23 @@ def stop(self) -> None:
# CRITICAL FIX: Always set shutdown event, even if thread is not alive
# This ensures the event is set for any waiting threads
self._shutdown_event.set()
+ # Remove from active instances tracking
+ ConnectionPool._active_instances.discard(self)
# CRITICAL FIX: Add defensive check for None _cleanup_task
if self._cleanup_task is None:
return
if self._cleanup_task.is_alive():
- # Wait for thread to finish with timeout
- self._cleanup_task.join(timeout=5.0)
- # If thread is still alive after timeout, log warning
+ # Wait for thread to finish with timeout (reduced from 5.0s to 2.0s for faster cleanup)
+ self._cleanup_task.join(timeout=2.0)
+ # If thread is still alive after timeout, force cleanup to prevent accumulation
if self._cleanup_task.is_alive():
self.logger.warning(
"Cleanup thread did not stop within timeout, "
- "it will continue as daemon thread"
+ "forcing cleanup to prevent thread accumulation"
)
+ # Force cleanup: clear reference to allow thread to be garbage collected
+ # Thread is daemon so it will be terminated when main process exits
+ self._cleanup_task = None
def update_bytes_transferred(
self, sock: socket.socket, bytes_sent: int, bytes_received: int
@@ -635,7 +649,7 @@ def update_rtt(self, _sock: socket.socket, rtt_ms: float) -> None:
alpha = 0.125 # RFC 6298 default
self.stats.rtt_ms = alpha * rtt_ms + (1 - alpha) * self.stats.rtt_ms
- def get_connection_stats(self, sock: socket.socket) -> ConnectionStats | None:
+ def get_connection_stats(self, sock: socket.socket) -> Optional[ConnectionStats]:
"""Get statistics for a specific connection.
Args:
@@ -714,7 +728,7 @@ def optimize_socket(
self,
sock: socket.socket,
socket_type: SocketType,
- connection_stats: ConnectionStats | None = None,
+ connection_stats: Optional[ConnectionStats] = None,
) -> None:
"""Optimize socket settings for the given type.
@@ -743,7 +757,7 @@ def get_connection(
host: str,
port: int,
socket_type: SocketType = SocketType.PEER_CONNECTION,
- ) -> socket.socket | None:
+ ) -> Optional[socket.socket]:
"""Get an optimized connection."""
return self.connection_pool.get_connection(host, port, socket_type)
@@ -766,7 +780,7 @@ def get_stats(self) -> dict[str, Any]:
# Global network optimizer instance
-_network_optimizer: NetworkOptimizer | None = None
+_network_optimizer: Optional[NetworkOptimizer] = None
def get_network_optimizer() -> NetworkOptimizer:
@@ -783,3 +797,18 @@ def reset_network_optimizer() -> None:
if _network_optimizer is not None:
_network_optimizer.stop()
_network_optimizer = None
+
+
+def force_cleanup_all_connection_pools() -> None:
+ """Force cleanup all ConnectionPool instances (emergency use for test teardown).
+
+ This function should be used in test fixtures to ensure all ConnectionPool
+ instances are properly stopped, preventing thread leaks and test timeouts.
+ """
+ for pool in list(ConnectionPool._active_instances):
+ try:
+ pool.stop()
+ except Exception:
+ # Best effort cleanup - ignore errors to ensure all pools are attempted
+ pass
+ ConnectionPool._active_instances.clear()
diff --git a/ccbt/utils/port_checker.py b/ccbt/utils/port_checker.py
index c4d5f72..51b56e0 100644
--- a/ccbt/utils/port_checker.py
+++ b/ccbt/utils/port_checker.py
@@ -5,11 +5,12 @@
import contextlib
import socket
import sys
+from typing import Optional
def is_port_available(
host: str, port: int, protocol: str = "tcp"
-) -> tuple[bool, str | None]:
+) -> tuple[bool, Optional[str]]:
"""Check if a port is available for binding.
Args:
@@ -140,7 +141,7 @@ def get_port_conflict_resolution(port: int, _protocol: str = "tcp") -> str:
def get_permission_error_resolution(
- port: int, protocol: str = "tcp", config_key: str | None = None
+ port: int, protocol: str = "tcp", config_key: Optional[str] = None
) -> str:
"""Get resolution steps for permission denied errors.
diff --git a/ccbt/utils/resilience.py b/ccbt/utils/resilience.py
index b5069f2..d9af13f 100644
--- a/ccbt/utils/resilience.py
+++ b/ccbt/utils/resilience.py
@@ -10,7 +10,7 @@
import functools
import logging
import time
-from typing import Any, Awaitable, Callable, TypeVar, Union, cast
+from typing import Any, Awaitable, Callable, Optional, TypeVar, Union, cast
T = TypeVar("T")
AsyncFunc = Callable[..., Awaitable[T]]
@@ -194,7 +194,9 @@ def __init__(
self,
failure_threshold: int = 5,
recovery_timeout: float = 60.0,
- expected_exception: type[Exception] | tuple[type[Exception], ...] = Exception,
+ expected_exception: Union[
+ type[Exception], tuple[type[Exception], ...]
+ ] = Exception,
):
"""Initialize circuit breaker.
@@ -457,7 +459,7 @@ async def process_batches(
self,
items: list[Any],
operation: Callable[[list[Any]], Any],
- error_handler: Callable[[Exception, list[Any]], None] | None = None,
+ error_handler: Optional[Callable[[Exception, list[Any]], None]] = None,
) -> list[Any]:
"""Process items in batches.
diff --git a/ccbt/utils/rich_logging.py b/ccbt/utils/rich_logging.py
index f5dd638..effa3de 100644
--- a/ccbt/utils/rich_logging.py
+++ b/ccbt/utils/rich_logging.py
@@ -7,7 +7,7 @@
import logging
import re
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
if TYPE_CHECKING:
from rich.console import Console
@@ -64,9 +64,9 @@ class CorrelationRichHandler(RichHandler): # type: ignore[misc]
def __init__(
self,
*args: Any,
- console: Console | None = None,
+ console: Optional[Console] = None,
show_icons: bool = False, # noqa: ARG002 # Deprecated, reserved for future use
- _show_icons: bool | None = None, # Deprecated, use show_icons
+ _show_icons: Optional[bool] = None, # Deprecated, use show_icons
show_colors: bool = True,
**kwargs: Any,
) -> None:
@@ -322,7 +322,7 @@ def format(self, record: logging.LogRecord) -> str:
def create_rich_handler(
- console: Console | None = None,
+ console: Optional[Console] = None,
level: int = logging.INFO,
show_path: bool = False,
rich_tracebacks: bool = True,
diff --git a/ccbt/utils/rtt_measurement.py b/ccbt/utils/rtt_measurement.py
index 79cdfae..1829859 100644
--- a/ccbt/utils/rtt_measurement.py
+++ b/ccbt/utils/rtt_measurement.py
@@ -8,7 +8,7 @@
import time
from collections import deque
-from typing import Any
+from typing import Any, Optional
logger = None
@@ -65,7 +65,7 @@ def __init__(
self.total_samples = 0
self.retransmission_count = 0
- def record_send(self, sequence: int, timestamp: float | None = None) -> None:
+ def record_send(self, sequence: int, timestamp: Optional[float] = None) -> None:
"""Record packet send time for RTT measurement.
Args:
@@ -79,8 +79,8 @@ def record_send(self, sequence: int, timestamp: float | None = None) -> None:
self.pending_measurements[sequence] = timestamp
def record_receive(
- self, sequence: int, timestamp: float | None = None
- ) -> float | None:
+ self, sequence: int, timestamp: Optional[float] = None
+ ) -> Optional[float]:
"""Record packet receive time and calculate RTT.
Args:
diff --git a/ccbt/utils/tasks.py b/ccbt/utils/tasks.py
index ff65403..a363512 100644
--- a/ccbt/utils/tasks.py
+++ b/ccbt/utils/tasks.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import asyncio
-from typing import Any, Coroutine
+from typing import Any, Coroutine, Optional
class BackgroundTaskGroup:
@@ -20,7 +20,7 @@ def create(self, coro: Coroutine[Any, Any, Any]) -> asyncio.Task[Any]:
task.add_done_callback(self._tasks.discard)
return task
- async def cancel_and_wait(self, timeout: float | None = None) -> None:
+ async def cancel_and_wait(self, timeout: Optional[float] = None) -> None:
"""Cancel all tracked tasks and wait for completion (with optional timeout)."""
if not self._tasks:
return
diff --git a/ccbt/utils/timeout_adapter.py b/ccbt/utils/timeout_adapter.py
index acb390f..8a529aa 100644
--- a/ccbt/utils/timeout_adapter.py
+++ b/ccbt/utils/timeout_adapter.py
@@ -8,7 +8,7 @@
from __future__ import annotations
import logging
-from typing import Any
+from typing import Any, Optional
logger = logging.getLogger(__name__)
@@ -19,7 +19,7 @@ class AdaptiveTimeoutCalculator:
def __init__(
self,
config: Any,
- peer_manager: Any | None = None,
+ peer_manager: Optional[Any] = None,
) -> None:
"""Initialize adaptive timeout calculator.
diff --git a/ccbt/utils/version.py b/ccbt/utils/version.py
index 203235b..f405e0e 100644
--- a/ccbt/utils/version.py
+++ b/ccbt/utils/version.py
@@ -11,7 +11,7 @@
import importlib.metadata
import re
-from typing import Final
+from typing import Final, Optional
# Client names
NETWORK_CLIENT_NAME: Final[str] = "btonic"
@@ -69,7 +69,7 @@ def parse_version(version: str) -> tuple[int, int, int]:
return (major, minor, patch)
-def get_peer_id_prefix(version: str | None = None) -> bytes:
+def get_peer_id_prefix(version: Optional[str] = None) -> bytes:
"""Generate peer_id prefix from version.
Pattern: -BT{major:02d}{minor:02d}-
@@ -125,7 +125,7 @@ def get_ui_client_name() -> str:
return UI_CLIENT_NAME
-def get_user_agent(version: str | None = None) -> str:
+def get_user_agent(version: Optional[str] = None) -> str:
"""Format user-agent string for HTTP requests.
Format: "btonic/{version}"
@@ -143,7 +143,7 @@ def get_user_agent(version: str | None = None) -> str:
return f"{NETWORK_CLIENT_NAME}/{version}"
-def get_full_peer_id(version: str | None = None) -> bytes:
+def get_full_peer_id(version: Optional[str] = None) -> bytes:
"""Generate a complete 20-byte peer_id.
Format: {prefix}{random_bytes}
diff --git a/.readthedocs.yaml b/dev/.readthedocs.yaml
similarity index 100%
rename from .readthedocs.yaml
rename to dev/.readthedocs.yaml
diff --git a/dev/COMPATIBILITY_LINTING.md b/dev/COMPATIBILITY_LINTING.md
new file mode 100644
index 0000000..2babd09
--- /dev/null
+++ b/dev/COMPATIBILITY_LINTING.md
@@ -0,0 +1,241 @@
+# Python 3.8/3.9 Compatibility Linting
+
+This document describes the compatibility linting rules integrated into the ccBitTorrent project to ensure Python 3.8 and 3.9 compatibility.
+
+## Overview
+
+The compatibility linter enforces design patterns from [`compatibility_tests/COMPREHENSIVE_RESOLUTION_PLAN.md`](../compatibility_tests/COMPREHENSIVE_RESOLUTION_PLAN.md) to prevent Python 3.10+ syntax from being introduced into the codebase.
+
+## Linting Tools
+
+### 1. Custom Compatibility Linter
+
+**Location**: [`dev/compatibility_linter.py`](compatibility_linter.py)
+
+A custom Python script that checks for:
+- **Union type syntax (`|`)**: Detects `type | None` and `type1 | type2` patterns
+- **Built-in generic types**: Detects `tuple[...]`, `list[...]`, `dict[...]`, `set[...]` without `from __future__ import annotations`
+
+**Usage**:
+```bash
+# Check all files in ccbt/
+uv run python dev/compatibility_linter.py ccbt/
+
+# Check specific files
+uv run python dev/compatibility_linter.py ccbt/session/session.py
+
+# JSON output
+uv run python dev/compatibility_linter.py ccbt/ --format json
+```
+
+**Integration**: Automatically runs as part of pre-commit hooks (see [`dev/pre-commit-config.yaml`](pre-commit-config.yaml))
+
+### 2. Ruff Configuration
+
+**Location**: [`dev/ruff.toml`](ruff.toml)
+
+Ruff is configured with:
+- **Target Python version**: `py38` (ensures compatibility checks)
+- **Ignored rules**: `UP045` and `UP007` (which suggest using `|` syntax) are intentionally ignored to enforce compatibility
+
+## Design Patterns Enforced
+
+### Pattern 1: Union Type Syntax
+
+**Invalid (Python 3.10+ only)**:
+```python
+def func(param: str | None = None) -> dict | None:
+ pass
+
+var: int | float = 1.0
+```
+
+**Valid (Python 3.8/3.9 compatible)**:
+```python
+from typing import Optional, Union
+
+def func(param: Optional[str] = None) -> Optional[dict]:
+ pass
+
+var: Union[int, float] = 1.0
+```
+
+**Detection**: The compatibility linter detects union syntax (`|`) in:
+- Function parameters: `param: type | None`
+- Return types: `-> type | None`
+- Variable annotations: `var: type | None`
+- Type aliases: `TypeAlias = type | None`
+
+### Pattern 2: Built-in Generic Types
+
+**❌ Invalid (Python 3.8 without `__future__`)**:
+```python
+_PacketInfo = tuple[UTPPacket, float, int]
+items: list[str] = []
+mapping: dict[str, int] = {}
+```
+
+**Valid Option 1 (Recommended)**:
+```python
+from __future__ import annotations
+
+_PacketInfo = tuple[UTPPacket, float, int]
+items: list[str] = []
+mapping: dict[str, int] = {}
+```
+
+**Valid Option 2 (Alternative)**:
+```python
+from typing import Tuple, List, Dict
+
+_PacketInfo = Tuple[UTPPacket, float, int]
+items: List[str] = []
+mapping: Dict[str, int] = {}
+```
+
+**Detection**: The compatibility linter detects built-in generic types (`tuple[...]`, `list[...]`, `dict[...]`, `set[...]`) and checks if `from __future__ import annotations` is present in the first 20 lines of the file.
+
+## Issue Types
+
+The compatibility linter reports issues with the following types:
+
+1. **`union-syntax-param`**: Union syntax in function parameter
+2. **`union-syntax-return`**: Union syntax in return type
+3. **`union-syntax-var`**: Union syntax in variable annotation
+4. **`union-syntax-alias`**: Union syntax in type alias
+5. **`builtin-generic-tuple`**: `tuple[...]` without `__future__` import
+6. **`builtin-generic-list`**: `list[...]` without `__future__` import
+7. **`builtin-generic-dict`**: `dict[...]` without `__future__` import
+8. **`builtin-generic-set`**: `set[...]` without `__future__` import
+
+## Integration
+
+### Pre-commit Hooks
+
+The compatibility linter runs automatically before commits via pre-commit hooks:
+
+```yaml
+- id: compatibility-linter
+ name: compatibility-linter
+ entry: uv run python dev/compatibility_linter.py ccbt/
+ language: system
+ types: [python]
+ files: ^ccbt/.*\.py$
+```
+
+### CI/CD Pipeline
+
+The compatibility linter should be integrated into CI/CD pipelines to catch issues before merging. Add to `.github/workflows/ci.yml`:
+
+```yaml
+- name: Check Python 3.8/3.9 compatibility
+ run: |
+ uv run python dev/compatibility_linter.py ccbt/
+```
+
+## Fixing Issues
+
+### Automatic Fixes
+
+Some issues can be fixed automatically:
+
+1. **Union syntax**: Replace `type | None` with `Optional[type]`
+2. **Complex unions**: Replace `A | B | C` with `Union[A, B, C]`
+3. **Built-in generics**: Add `from __future__ import annotations` at the top of the file
+
+### Manual Fixes
+
+Complex cases may require manual review:
+- Nested types: `dict[str, int | None]` → `dict[str, Optional[int]]`
+- Type aliases with unions
+- Context-specific type annotations
+
+## Examples
+
+### Example 1: Function with Union Type
+
+**Before**:
+```python
+def get_value(key: str) -> str | None:
+ return cache.get(key)
+```
+
+**After**:
+```python
+from typing import Optional
+
+def get_value(key: str) -> Optional[str]:
+ return cache.get(key)
+```
+
+### Example 2: Built-in Generic Type
+
+**Before**:
+```python
+def process_items(items: list[str]) -> dict[str, int]:
+ return {item: len(item) for item in items}
+```
+
+**After**:
+```python
+from __future__ import annotations
+
+def process_items(items: list[str]) -> dict[str, int]:
+ return {item: len(item) for item in items}
+```
+
+### Example 3: Complex Union
+
+**Before**:
+```python
+def parse_value(value: str | int | float) -> str | None:
+ try:
+ return str(value)
+ except Exception:
+ return None
+```
+
+**After**:
+```python
+from typing import Optional, Union
+
+def parse_value(value: Union[str, int, float]) -> Optional[str]:
+ try:
+ return str(value)
+ except Exception:
+ return None
+```
+
+## Related Documentation
+
+- [`compatibility_tests/COMPREHENSIVE_RESOLUTION_PLAN.md`](../compatibility_tests/COMPREHENSIVE_RESOLUTION_PLAN.md) - Full compatibility resolution plan
+- [`dev/ruff.toml`](ruff.toml) - Ruff linting configuration
+- [`dev/pre-commit-config.yaml`](pre-commit-config.yaml) - Pre-commit hook configuration
+
+## Troubleshooting
+
+### False Positives
+
+The linter may report false positives for:
+- Bitwise OR operations (e.g., `flags | MASK`)
+- String literals containing `|`
+- Comments containing type annotations
+
+These are filtered out automatically, but if you encounter issues, please report them.
+
+### Performance
+
+The linter processes files sequentially. For large codebases, consider:
+- Running on specific directories: `uv run python dev/compatibility_linter.py ccbt/session/`
+- Using JSON output for programmatic processing
+- Excluding test files if not needed
+
+## Contributing
+
+When adding new compatibility checks:
+
+1. Add the pattern to `dev/compatibility_linter.py`
+2. Update this documentation
+3. Test with existing codebase
+4. Add to pre-commit hooks if appropriate
+
diff --git a/dev/build_docs_patched_clean.py b/dev/build_docs_patched_clean.py
index afe40ab..b9670ab 100644
--- a/dev/build_docs_patched_clean.py
+++ b/dev/build_docs_patched_clean.py
@@ -140,26 +140,127 @@ def patched_reconfigure_files(self, files, mkdocs_config):
import sys
import os
import logging
- from mkdocs.__main__ import cli
+ from pathlib import Path
- # Patch mkdocs logger to filter out autorefs warnings about multiple primary URLs
- # These are expected with i18n plugin when same objects are documented in multiple languages
- class AutorefsWarningFilter(logging.Filter):
- """Filter out autorefs warnings about multiple primary URLs (expected with i18n)."""
+ # Patch mkdocs logger BEFORE importing mkdocs to catch all warnings
+ # This must be done before any mkdocs imports
+ class WarningFilter(logging.Filter):
+ """Filter out expected warnings that are acceptable in strict mode."""
def filter(self, record):
- # Filter out warnings about multiple primary URLs from mkdocs-autorefs
- if 'Multiple primary URLs found' in record.getMessage():
+ msg = record.getMessage()
+ # Filter autorefs warnings about multiple primary URLs (expected with i18n)
+ if 'Multiple primary URLs found' in msg:
+ return False
+ # Filter coverage warnings about missing directory (acceptable if tests didn't run)
+ if 'No such HTML report directory' in msg or ('mkdocs_coverage' in msg and 'htmlcov' in msg):
return False
return True
- # Apply filter to mkdocs logger
- mkdocs_logger = logging.getLogger('mkdocs')
- autorefs_filter = AutorefsWarningFilter()
- mkdocs_logger.addFilter(autorefs_filter)
+ # Apply filter to root logger to catch all warnings
+ root_logger = logging.getLogger()
+ warning_filter = WarningFilter()
+ root_logger.addFilter(warning_filter)
+
+ # Also apply to mkdocs loggers specifically
+ for logger_name in ['mkdocs', 'mkdocs.plugins', 'mkdocs_autorefs', 'mkdocs_coverage']:
+ logger = logging.getLogger(logger_name)
+ logger.addFilter(warning_filter)
+
+ # Note: Plugins use mkdocs' log system, so we patch mkdocs.utils.log instead
+ # This is done after mkdocs import below
+
+ # Import mkdocs and patch its log system
+ from mkdocs import utils
+
+ # Patch mkdocs' log.warning to filter expected warnings
+ if hasattr(utils, 'log'):
+ original_mkdocs_warning = utils.log.warning
+
+ def patched_mkdocs_warning(message, *args, **kwargs):
+ """Patch mkdocs warning to suppress expected warnings in strict mode."""
+ msg_str = str(message) % args if args else str(message)
+ # Suppress autorefs warnings about multiple primary URLs
+ if 'Multiple primary URLs found' in msg_str:
+ return
+ # Suppress coverage warnings about missing directory
+ if 'No such HTML report directory' in msg_str or ('mkdocs_coverage' in msg_str and 'htmlcov' in msg_str):
+ return
+ # Call original warning for all other messages
+ original_mkdocs_warning(message, *args, **kwargs)
+
+ utils.log.warning = patched_mkdocs_warning
+
+ # Now import mkdocs CLI - this will load plugins which may use log.warning
+ from mkdocs.__main__ import cli
+
+ # After plugins are loaded, patch their internal log objects
+ # mkdocs-autorefs uses _log.warning() from its internal plugin module
+ try:
+ import mkdocs_autorefs._internal.plugin as autorefs_plugin
+ if hasattr(autorefs_plugin, '_log') and hasattr(autorefs_plugin._log, 'warning'):
+ original_autorefs_log_warning = autorefs_plugin._log.warning
+
+ def patched_autorefs_log_warning(msg, *args, **kwargs):
+ """Patch autorefs _log.warning to suppress multiple primary URLs warnings."""
+ msg_str = str(msg) % args if args else str(msg)
+ if 'Multiple primary URLs found' not in msg_str:
+ original_autorefs_log_warning(msg, *args, **kwargs)
+
+ autorefs_plugin._log.warning = patched_autorefs_log_warning
+ except (ImportError, AttributeError):
+ pass
+
+ # Also ensure plugin loggers have the filter
+ if 'mkdocs_filter' in locals():
+ try:
+ autorefs_logger = logging.getLogger('mkdocs_autorefs')
+ # Check if filter is already added
+ has_filter = any('MkDocsWarningFilter' in str(type(f)) for f in autorefs_logger.filters)
+ if not has_filter:
+ autorefs_logger.addFilter(mkdocs_filter)
+ except (NameError, AttributeError):
+ pass
+
+ # Hook into mkdocs build process to ensure coverage directory exists after site cleanup
+ # Patch mkdocs' clean_directory to recreate coverage dir after cleanup
+ try:
+ original_clean_directory = utils.clean_directory
+
+ def patched_clean_directory(directory):
+ """Clean directory but recreate coverage subdirectory."""
+ result = original_clean_directory(directory)
+ # Recreate coverage directory after cleanup if cleaning site directory
+ if 'site' in str(directory) or str(directory).endswith('site'):
+ coverage_dir = Path('site/reports/htmlcov')
+ coverage_dir.mkdir(parents=True, exist_ok=True)
+ coverage_index = coverage_dir / 'index.html'
+ if not coverage_index.exists():
+ coverage_index.write_text('Coverage Report
Coverage report not available. Run tests to generate coverage data.
')
+ return result
+
+ utils.clean_directory = patched_clean_directory
+ except (ImportError, AttributeError):
+ pass
- # Also filter mkdocs_autorefs logger if it exists
- autorefs_logger = logging.getLogger('mkdocs_autorefs')
- autorefs_logger.addFilter(autorefs_filter)
+ # Also patch the coverage plugin's on_config method to ensure directory exists
+ try:
+ import mkdocs_coverage
+ original_on_config = mkdocs_coverage.MkDocsCoveragePlugin.on_config
+
+ def patched_coverage_on_config(self, config, **kwargs):
+ """Ensure coverage directory exists before plugin checks for it."""
+ # Ensure directory exists
+ coverage_dir = Path('site/reports/htmlcov')
+ coverage_dir.mkdir(parents=True, exist_ok=True)
+ coverage_index = coverage_dir / 'index.html'
+ if not coverage_index.exists():
+ coverage_index.write_text('Coverage Report
Coverage report not available. Run tests to generate coverage data.
')
+ # Call original method
+ return original_on_config(self, config, **kwargs)
+
+ mkdocs_coverage.MkDocsCoveragePlugin.on_config = patched_coverage_on_config
+ except (ImportError, AttributeError):
+ pass
# Use --strict only if explicitly requested via environment variable
# Otherwise, respect strict: false in mkdocs.yml
diff --git a/dev/compatibility_linter.py b/dev/compatibility_linter.py
new file mode 100644
index 0000000..5dc4e23
--- /dev/null
+++ b/dev/compatibility_linter.py
@@ -0,0 +1,855 @@
+#!/usr/bin/env python3
+"""
+Compatibility Linter for Python 3.8/3.9 Compatibility.
+
+This script checks for Python 3.8/3.9 compatibility issues:
+1. Union type syntax (`|`) - should use `Optional` or `Union` instead
+2. Built-in generic types without `__future__` import - requires `from __future__ import annotations` for Python 3.8
+3. `tuple[...]` usage - should use `Tuple[...]` from typing for Python 3.8 compatibility
+4. `tuple[...]` in type aliases - even with `__future__` import, type aliases are evaluated at runtime in Python 3.8
+5. `Tuple[...]` usage without proper import from typing - must import `Tuple` from typing
+6. Other compatibility patterns
+
+Based on patterns from compatibility_tests/COMPREHENSIVE_RESOLUTION_PLAN.md and
+compatibility_tests/PYTHON38_RESOLUTION_PLAN.md
+"""
+
+from __future__ import annotations
+
+import ast
+import re
+import sys
+from pathlib import Path
+from typing import NamedTuple, Optional
+
+
+class CompatibilityIssue(NamedTuple):
+ """Represents a compatibility issue found in code."""
+
+ file_path: Path
+ line_number: int
+ issue_type: str
+ message: str
+ code: str
+
+
+class CompatibilityLinter:
+ """Linter for Python 3.8/3.9 compatibility issues."""
+
+ def __init__(self, root_dir: Path) -> None:
+ """Initialize the linter with root directory."""
+ self.root_dir = root_dir
+ self.issues: list[CompatibilityIssue] = []
+
+ def check_file(self, file_path: Path) -> list[CompatibilityIssue]:
+ """Check a single file for compatibility issues."""
+ file_issues: list[CompatibilityIssue] = []
+
+ try:
+ content = file_path.read_text(encoding="utf-8")
+ lines = content.splitlines()
+
+ # Check for __future__ import
+ has_future_annotations = self._has_future_annotations(content)
+
+ # Check for typing imports
+ has_tuple_import = self._has_tuple_import(content)
+
+ # Check each line
+ for line_num, line in enumerate(lines, start=1):
+ # Check for union syntax (|) in type annotations
+ union_issues = self._check_union_syntax(
+ file_path, line_num, line, content
+ )
+ file_issues.extend(union_issues)
+
+ # Check for built-in generics without __future__ import
+ if not has_future_annotations:
+ generic_issues = self._check_builtin_generics(
+ file_path, line_num, line
+ )
+ file_issues.extend(generic_issues)
+
+ # Check for tuple[...] usage (should use Tuple[...] for Python 3.8 compatibility)
+ # Skip if file has __future__ import annotations (tuple[...] is compatible then)
+ if not has_future_annotations:
+ tuple_issues = self._check_tuple_usage(
+ file_path, line_num, line
+ )
+ file_issues.extend(tuple_issues)
+
+ # Check for tuple[...] in type aliases (even with __future__ import)
+ # Type aliases are evaluated at runtime in Python 3.8, so they need Tuple from typing
+ tuple_alias_issues = self._check_tuple_type_alias(
+ file_path, line_num, line
+ )
+ file_issues.extend(tuple_alias_issues)
+
+ # Check for Tuple[...] usage without proper import
+ if not has_tuple_import:
+ tuple_import_issues = self._check_tuple_import(
+ file_path, line_num, line
+ )
+ file_issues.extend(tuple_import_issues)
+
+ except Exception as e:
+ # Skip files that can't be read (binary, etc.)
+ if "encoding" not in str(e).lower():
+ print(f"Warning: Could not check {file_path}: {e}", file=sys.stderr)
+
+ # Deduplicate issues: same line, same issue type, same code
+ # This prevents reporting the same issue multiple times
+ seen: set[tuple[int, str, str]] = set()
+ deduplicated: list[CompatibilityIssue] = []
+ for issue in file_issues:
+ key = (issue.line_number, issue.issue_type, issue.code)
+ if key not in seen:
+ seen.add(key)
+ deduplicated.append(issue)
+
+ return deduplicated
+
+ def _has_future_annotations(self, content: str) -> bool:
+ """
+ Check if file has `from __future__ import annotations`.
+
+ The __future__ import must be at the top of the file (before any other imports
+ or code, except for module docstrings and comments). We check the first 50 lines
+ to allow for longer module docstrings and comments before the import.
+
+ This method is more robust and handles various edge cases:
+ - Multi-line docstrings
+ - Comments before the import
+ - Different quote styles
+ - Case-insensitive matching
+ """
+ # Check first 50 lines for __future__ import
+ # This allows for longer module docstrings and comments before the import
+ lines = content.splitlines()[:50]
+ in_docstring = False
+ docstring_quote = None
+
+ for line in lines:
+ stripped = line.strip()
+
+ # Handle docstrings (single or triple quotes)
+ if not in_docstring:
+ # Check for opening docstring
+ if stripped.startswith('"""') or stripped.startswith("'''"):
+ docstring_quote = stripped[:3]
+ in_docstring = True
+ # Check if it's a closing docstring on the same line
+ if stripped.count(docstring_quote) >= 2:
+ in_docstring = False
+ docstring_quote = None
+ continue
+ else:
+ # Inside docstring - check for closing
+ if docstring_quote in line:
+ in_docstring = False
+ docstring_quote = None
+ continue
+
+ # Skip empty lines and comments (but not docstrings)
+ if not stripped or stripped.startswith("#"):
+ continue
+
+ # Check for __future__ import (must be before other imports)
+ # Match: from __future__ import annotations
+ # Also match: from __future__ import annotations, other_stuff
+ if re.search(r"from\s+__future__\s+import\s+.*\bannotations\b", line, re.IGNORECASE):
+ return True
+
+ # If we hit a non-__future__ import or executable code, stop checking
+ # (__future__ imports must come before everything else)
+ if stripped.startswith("import ") or (stripped.startswith("from ") and "__future__" not in stripped.lower()):
+ # But allow shebang lines
+ if not stripped.startswith("#!"):
+ break
+
+ # Also do a full-file search as fallback (in case future import is later)
+ # This handles edge cases where the import might be after some comments
+ if re.search(r"from\s+__future__\s+import\s+.*\bannotations\b", content, re.IGNORECASE | re.MULTILINE):
+ return True
+
+ return False
+
+ def _has_tuple_import(self, content: str) -> bool:
+ """
+ Check if file imports `Tuple` from typing.
+
+ Checks for imports like:
+ - `from typing import Tuple`
+ - `from typing import TYPE_CHECKING, Optional, Tuple`
+ - `from typing import Tuple as T` (also valid)
+ """
+ # Check for Tuple import from typing
+ # Pattern matches: from typing import Tuple, from typing import ..., Tuple, ...
+ patterns = [
+ r"from\s+typing\s+import\s+.*\bTuple\b", # from typing import Tuple or from typing import ..., Tuple
+ r"from\s+typing\s+import\s+.*\bTuple\s+as\s+\w+", # from typing import Tuple as T
+ ]
+
+ for pattern in patterns:
+ if re.search(pattern, content, re.IGNORECASE):
+ return True
+
+ return False
+
+ def _check_union_syntax(
+ self, file_path: Path, line_num: int, line: str, full_content: str
+ ) -> list[CompatibilityIssue]:
+ """Check for union syntax (`|`) in type annotations."""
+ issues: list[CompatibilityIssue] = []
+
+ # Skip if line is a comment or string
+ stripped = line.strip()
+ if stripped.startswith("#") or stripped.startswith('"""') or stripped.startswith("'''"):
+ return issues
+
+ # Check if union syntax is in a comment (after #)
+ # Split line at # and only check the part before the comment
+ if "#" in line:
+ code_part = line.split("#")[0]
+ # If the code part doesn't contain |, skip (it's only in the comment)
+ if "|" not in code_part:
+ return issues
+ else:
+ code_part = line
+
+ # Skip if it's clearly a bitwise OR operation (not a type annotation)
+ # Check if there are numbers or expressions that suggest bitwise operations
+ if re.search(r'\d+\s*\|\s*\d+', code_part): # Number | Number
+ return issues
+
+ # More comprehensive pattern to match union syntax in type annotations
+ # This pattern matches: type | None, type | OtherType, type | list[str] | None, etc.
+ # It captures the full union expression, not just the first part
+ union_pattern = r"([a-zA-Z_][a-zA-Z0-9_.]*(?:\[[^\]]*\])?)\s*\|\s*([a-zA-Z_][a-zA-Z0-9_.]*(?:\[[^\]]*\])?|None)"
+
+ # Check for union syntax in different contexts
+ # Function parameters: `param: type | None` or `param: type | OtherType`
+ param_match = re.search(r":\s*" + union_pattern, code_part)
+ if param_match:
+ # Check if it's in a function parameter context (not just any colon)
+ before_colon = code_part[:param_match.start()]
+ # Skip if it's in a dict literal or slice
+ if not re.search(r'[\[\{]\s*$', before_colon.rstrip()):
+ # Check if we're inside a string literal
+ start_pos = param_match.start()
+ before_match = code_part[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ if not ((single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1)):
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="union-syntax-param",
+ message="Union type syntax (`|`) in function parameter. Use `Optional[type]` or `Union[type1, type2]` for Python 3.8/3.9 compatibility",
+ code=line.strip(),
+ )
+ )
+
+ # Return types: `-> type | None` or `-> type | OtherType`
+ return_match = re.search(r"->\s*" + union_pattern, code_part)
+ if return_match:
+ start_pos = return_match.start()
+ before_match = code_part[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ if not ((single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1)):
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="union-syntax-return",
+ message="Union type syntax (`|`) in return type. Use `Optional[type]` or `Union[type1, type2]` for Python 3.8/3.9 compatibility",
+ code=line.strip(),
+ )
+ )
+
+ # Variable annotations: `var: type | None` (but not function parameters)
+ # Only match if it's not already matched as a parameter
+ if not param_match:
+ var_match = re.search(r"^\s*[a-zA-Z_][a-zA-Z0-9_]*\s*:\s*" + union_pattern, code_part)
+ if var_match:
+ start_pos = var_match.start()
+ before_match = code_part[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ if not ((single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1)):
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="union-syntax-var",
+ message="Union type syntax (`|`) in variable annotation. Use `Optional[type]` or `Union[type1, type2]` for Python 3.8/3.9 compatibility",
+ code=line.strip(),
+ )
+ )
+
+ # Type aliases: `TypeAlias = type | None` (but not variable assignments)
+ # Only match if it's not already matched as a variable annotation
+ if not param_match and not var_match:
+ alias_match = re.search(r"=\s*" + union_pattern, code_part)
+ if alias_match:
+ # Check if it's a type alias (usually uppercase or has TypeAlias)
+ before_equals = code_part[:alias_match.start()].rstrip()
+ if re.search(r'[A-Z][a-zA-Z0-9_]*\s*$', before_equals) or 'TypeAlias' in before_equals:
+ start_pos = alias_match.start()
+ before_match = code_part[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ if not ((single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1)):
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="union-syntax-alias",
+ message="Union type syntax (`|`) in type alias. Use `Optional[type]` or `Union[type1, type2]` for Python 3.8/3.9 compatibility",
+ code=line.strip(),
+ )
+ )
+
+ # Check for multi-union types (e.g., `str | list[str] | None`)
+ # This is a more complex pattern that might span the union
+ multi_union_pattern = r"([a-zA-Z_][a-zA-Z0-9_.]*(?:\[[^\]]*\])?)\s*\|\s*([a-zA-Z_][a-zA-Z0-9_.]*(?:\[[^\]]*\])?)\s*\|\s*(None|[a-zA-Z_][a-zA-Z0-9_.]*(?:\[[^\]]*\])?)"
+
+ # Check in parameter context
+ if not param_match:
+ multi_param = re.search(r":\s*" + multi_union_pattern, code_part)
+ if multi_param:
+ start_pos = multi_param.start()
+ before_match = code_part[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ if not ((single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1)):
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="union-syntax-param",
+ message="Union type syntax (`|`) in function parameter. Use `Union[type1, type2, type3]` for Python 3.8/3.9 compatibility",
+ code=line.strip(),
+ )
+ )
+
+ # Check in return context
+ if not return_match:
+ multi_return = re.search(r"->\s*" + multi_union_pattern, code_part)
+ if multi_return:
+ start_pos = multi_return.start()
+ before_match = code_part[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ if not ((single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1)):
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="union-syntax-return",
+ message="Union type syntax (`|`) in return type. Use `Union[type1, type2, type3]` for Python 3.8/3.9 compatibility",
+ code=line.strip(),
+ )
+ )
+
+ return issues
+
+ def _check_builtin_generics(
+ self, file_path: Path, line_num: int, line: str
+ ) -> list[CompatibilityIssue]:
+ """
+ Check for built-in generic types without __future__ import.
+
+ Python 3.8 requires `from __future__ import annotations` to use built-in
+ generic syntax like `tuple[...]`, `list[...]`, `dict[...]`, `set[...]`.
+ Python 3.9+ supports these natively, but for 3.8 compatibility, we
+ must either use the __future__ import or use typing.Tuple, typing.List, etc.
+
+ This check only runs if the file doesn't have the __future__ import.
+ """
+ issues: list[CompatibilityIssue] = []
+
+ # Pattern to match built-in generic types: tuple[...], list[...], dict[...], set[...]
+ # Using word boundary (\b) to avoid false positives like "tuple_list" or "list_dict"
+ patterns = [
+ (
+ r"\btuple\s*\[",
+ "builtin-generic-tuple",
+ "Built-in generic `tuple[...]` requires `from __future__ import annotations` for Python 3.8 compatibility. Add the import at the top of the file, or use `typing.Tuple` instead.",
+ ),
+ (
+ r"\blist\s*\[",
+ "builtin-generic-list",
+ "Built-in generic `list[...]` requires `from __future__ import annotations` for Python 3.8 compatibility. Add the import at the top of the file, or use `typing.List` instead.",
+ ),
+ (
+ r"\bdict\s*\[",
+ "builtin-generic-dict",
+ "Built-in generic `dict[...]` requires `from __future__ import annotations` for Python 3.8 compatibility. Add the import at the top of the file, or use `typing.Dict` instead.",
+ ),
+ (
+ r"\bset\s*\[",
+ "builtin-generic-set",
+ "Built-in generic `set[...]` requires `from __future__ import annotations` for Python 3.8 compatibility. Add the import at the top of the file, or use `typing.Set` instead.",
+ ),
+ ]
+
+ # Skip if line is a comment or string
+ stripped = line.strip()
+ if stripped.startswith("#") or stripped.startswith('"""') or stripped.startswith("'''"):
+ return issues
+
+ # Skip if the pattern is inside a string literal
+ # Check for quotes around the pattern
+ for pattern, issue_type, message in patterns:
+ matches = list(re.finditer(pattern, line))
+ for match in matches:
+ start_pos = match.start()
+ end_pos = match.end()
+
+ # Check if we're inside a string literal
+ # Simple heuristic: count quotes before the match
+ before_match = line[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ # If odd number of quotes, we're inside a string
+ if (single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1):
+ continue # Skip - it's inside a string literal
+
+ # Also check for common string contexts like cast("...", ...)
+ if re.search(r'(cast|typing\.cast)\s*\(', line[:start_pos]):
+ # Check if the match is within the string argument
+ # Look for the opening quote before the match
+ quote_match = re.search(r'["\']', line[max(0, start_pos-50):start_pos][::-1])
+ if quote_match:
+ continue # Likely in a string argument
+
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type=issue_type,
+ message=message,
+ code=line.strip(),
+ )
+ )
+
+ return issues
+
+ def _check_tuple_usage(
+ self, file_path: Path, line_num: int, line: str
+ ) -> list[CompatibilityIssue]:
+ """
+ Check for tuple[...] usage in type annotations.
+
+ NOTE: This method is only called when the file does NOT have
+ `from __future__ import annotations`. If the file has the future import,
+ `tuple[...]` is compatible with Python 3.8/3.9 and this check is skipped.
+
+ For Python 3.8 compatibility without the future import, we should use
+ `Tuple[...]` from typing instead of `tuple[...]`.
+ """
+ issues: list[CompatibilityIssue] = []
+
+ # Pattern to match tuple[...] in type annotations
+ # Matches: tuple[type, ...], tuple[type1, type2], tuple[...]
+ # Using word boundary (\b) to avoid false positives
+ pattern = r"\btuple\s*\["
+
+ # Skip if line is a comment or string
+ stripped = line.strip()
+ if stripped.startswith("#") or stripped.startswith('"""') or stripped.startswith("'''"):
+ return issues
+
+ # Skip if it's clearly not a type annotation (e.g., variable assignment, function call)
+ # We want to catch: -> tuple[...], param: tuple[...], var: tuple[...]
+ # But skip: my_tuple = tuple([...]), tuple([...])
+
+ # Check if we're in a type annotation context
+ # Look for common type annotation patterns: ->, :, or in type alias context
+ is_type_annotation = (
+ "->" in line or # Return type
+ re.search(r":\s*tuple\s*\[", line) or # Parameter or variable annotation
+ re.search(r"=\s*tuple\s*\[", line) # Type alias (may be false positive, but check anyway)
+ )
+
+ if not is_type_annotation:
+ # Could still be a type annotation in a complex context, so check for tuple[...]
+ # but be more careful
+ if not re.search(r"tuple\s*\[[^\]]+\]", line):
+ return issues # No tuple[...] found, skip
+
+ matches = list(re.finditer(pattern, line))
+ for match in matches:
+ start_pos = match.start()
+
+ # Check if we're inside a string literal
+ before_match = line[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ # If odd number of quotes, we're inside a string
+ if (single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1):
+ continue # Skip - it's inside a string literal
+
+ # Additional check: skip if it's a function call like tuple([...])
+ # Look for tuple( after the match (not tuple[...])
+ after_match = line[start_pos:]
+ if re.match(r"tuple\s*\(", after_match):
+ continue # Skip - it's a function call, not a type annotation
+
+ # Check if it's in a type annotation context
+ # Extract the tuple[...] part to verify it's a type annotation
+ tuple_match = re.search(r"tuple\s*\[[^\]]*\]", line[start_pos:])
+ if not tuple_match:
+ continue # No complete tuple[...] found
+
+ # Verify it's in a type annotation context
+ # Check if there's a colon or arrow before it (within reasonable distance)
+ context_before = line[max(0, start_pos - 50):start_pos]
+ if not (":" in context_before or "->" in context_before):
+ # Might still be a type alias or other context, but be lenient
+ # Only flag if it's clearly a type annotation pattern
+ if not re.search(r"(->|:\s*|=\s*)", context_before):
+ continue # Not clearly a type annotation
+
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="tuple-usage",
+ message="Built-in generic `tuple[...]` should be replaced with `Tuple[...]` from typing for Python 3.8 compatibility. Import `Tuple` from typing and use `Tuple[...]` instead.",
+ code=line.strip(),
+ )
+ )
+
+ return issues
+
+ def _check_tuple_type_alias(
+ self, file_path: Path, line_num: int, line: str
+ ) -> list[CompatibilityIssue]:
+ """
+ Check for tuple[...] usage in type aliases.
+
+ IMPORTANT: Even with `from __future__ import annotations`, type aliases
+ are still evaluated at runtime in Python 3.8. This means `tuple[...]`
+ in type aliases will fail with `TypeError: 'type' object is not subscriptable`.
+
+ Type aliases must use `Tuple[...]` from typing for Python 3.8 compatibility,
+ even when the file has `from __future__ import annotations`.
+
+ Examples of type aliases that need fixing:
+ - `_PacketInfo = tuple[UTPPacket, float, int]` # ❌ Fails in Python 3.8
+ - `RenewalCallback = Callable[..., Awaitable[tuple[bool, int]]]` # ❌ Fails in Python 3.8
+
+ Should be:
+ - `_PacketInfo = Tuple[UTPPacket, float, int]` # ✅ Works
+ - `RenewalCallback = Callable[..., Awaitable[Tuple[bool, int]]]` # ✅ Works
+ """
+ issues: list[CompatibilityIssue] = []
+
+ # Pattern to match tuple[...] in type aliases
+ # Matches: tuple[type, ...], tuple[type1, type2], tuple[...]
+ # Using word boundary (\b) to avoid false positives
+ pattern = r"\btuple\s*\["
+
+ # Skip if line is a comment or string
+ stripped = line.strip()
+ if stripped.startswith("#") or stripped.startswith('"""') or stripped.startswith("'''"):
+ return issues
+
+ # Check if this looks like a type alias
+ # Type aliases typically:
+ # 1. Have uppercase variable names (convention)
+ # 2. Use = assignment
+ # 3. Are at module level (no indentation or minimal indentation)
+ # 4. May be nested inside generic types like Callable[...], Awaitable[...]
+
+ # Pattern 1: Direct type alias: `_PacketInfo = tuple[...]`
+ # Matches: Uppercase identifier = tuple[...]
+ direct_alias_pattern = r"^[A-Z_][a-zA-Z0-9_]*\s*=\s*tuple\s*\["
+
+ # Pattern 2: Nested in generic: `Callable[..., Awaitable[tuple[...]]]`
+ # Matches: tuple[...] inside generic type parameters
+ nested_pattern = r"[,\[\s]tuple\s*\["
+
+ is_type_alias = False
+ match_start = None
+
+ # Check for direct type alias
+ direct_match = re.search(direct_alias_pattern, stripped)
+ if direct_match:
+ is_type_alias = True
+ match_start = direct_match.start() + len(direct_match.group(0)) - len("tuple[")
+
+ # Check for nested tuple in generic types (common in type aliases)
+ if not is_type_alias:
+ nested_match = re.search(nested_pattern, line)
+ if nested_match:
+ # Check if it's in a type alias context (has = before it, uppercase identifier)
+ before_match = line[:nested_match.start()]
+ # Look for type alias pattern: identifier = ... before the tuple
+ if re.search(r"[A-Z_][a-zA-Z0-9_]*\s*=\s*", before_match):
+ is_type_alias = True
+ match_start = nested_match.start() + 1 # +1 to skip the comma/bracket/space
+
+ if not is_type_alias:
+ return issues # Not a type alias, skip
+
+ # Find all tuple[...] matches
+ matches = list(re.finditer(pattern, line))
+ for match in matches:
+ start_pos = match.start()
+
+ # Check if we're inside a string literal
+ before_match = line[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ # If odd number of quotes, we're inside a string
+ if (single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1):
+ continue # Skip - it's inside a string literal
+
+ # Additional check: skip if it's a function call like tuple([...])
+ # Look for tuple( after the match (not tuple[...])
+ after_match = line[start_pos:]
+ if re.match(r"tuple\s*\(", after_match):
+ continue # Skip - it's a function call, not a type annotation
+
+ # Verify it's a complete tuple[...] expression
+ tuple_match = re.search(r"tuple\s*\[[^\]]*\]", line[start_pos:])
+ if not tuple_match:
+ continue # No complete tuple[...] found
+
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="tuple-type-alias",
+ message="Type alias uses `tuple[...]` which fails at runtime in Python 3.8. Even with `from __future__ import annotations`, type aliases are evaluated at runtime. Use `Tuple[...]` from typing instead and import `Tuple` from typing.",
+ code=line.strip(),
+ )
+ )
+
+ return issues
+
+ def _check_tuple_import(
+ self, file_path: Path, line_num: int, line: str
+ ) -> list[CompatibilityIssue]:
+ """
+ Check for Tuple[...] usage without proper import from typing.
+
+ For Python 3.8 compatibility, when using `Tuple[...]` in type annotations,
+ it must be imported from typing. This check flags `Tuple[...]` usage when
+ `Tuple` is not imported from typing.
+
+ This check only runs if `Tuple` is not imported, to avoid false positives.
+ """
+ issues: list[CompatibilityIssue] = []
+
+ # Pattern to match Tuple[...] in type annotations
+ # Matches: Tuple[type, ...], Tuple[type1, type2], Tuple[...]
+ # Using word boundary (\b) to ensure we match Tuple, not MyTuple
+ pattern = r"\bTuple\s*\["
+
+ # Skip if line is a comment or string
+ stripped = line.strip()
+ if stripped.startswith("#") or stripped.startswith('"""') or stripped.startswith("'''"):
+ return issues
+
+ # Skip if it's clearly not a type annotation (e.g., variable assignment, function call)
+ # We want to catch: -> Tuple[...], param: Tuple[...], var: Tuple[...]
+ # But skip: my_tuple = Tuple([...]), Tuple([...])
+
+ # Check if we're in a type annotation context
+ # Look for common type annotation patterns: ->, :, or in type alias context
+ is_type_annotation = (
+ "->" in line or # Return type
+ re.search(r":\s*Tuple\s*\[", line) or # Parameter or variable annotation
+ re.search(r"=\s*Tuple\s*\[", line) # Type alias (may be false positive, but check anyway)
+ )
+
+ if not is_type_annotation:
+ # Could still be a type annotation in a complex context, so check for Tuple[...]
+ # but be more careful
+ if not re.search(r"Tuple\s*\[[^\]]+\]", line):
+ return issues # No Tuple[...] found, skip
+
+ matches = list(re.finditer(pattern, line))
+ for match in matches:
+ start_pos = match.start()
+
+ # Check if we're inside a string literal
+ before_match = line[:start_pos]
+ single_quotes_before = before_match.count("'") - before_match.count("\\'")
+ double_quotes_before = before_match.count('"') - before_match.count('\\"')
+
+ # If odd number of quotes, we're inside a string
+ if (single_quotes_before % 2 == 1) or (double_quotes_before % 2 == 1):
+ continue # Skip - it's inside a string literal
+
+ # Additional check: skip if it's a function call like Tuple([...])
+ # Look for Tuple( after the match (not Tuple[...])
+ after_match = line[start_pos:]
+ if re.match(r"Tuple\s*\(", after_match):
+ continue # Skip - it's a function call, not a type annotation
+
+ # Check if it's in a type annotation context
+ # Extract the Tuple[...] part to verify it's a type annotation
+ tuple_match = re.search(r"Tuple\s*\[[^\]]*\]", line[start_pos:])
+ if not tuple_match:
+ continue # No complete Tuple[...] found
+
+ # Verify it's in a type annotation context
+ # Check if there's a colon or arrow before it (within reasonable distance)
+ context_before = line[max(0, start_pos - 50):start_pos]
+ if not (":" in context_before or "->" in context_before):
+ # Might still be a type alias or other context, but be lenient
+ # Only flag if it's clearly a type annotation pattern
+ if not re.search(r"(->|:\s*|=\s*)", context_before):
+ continue # Not clearly a type annotation
+
+ issues.append(
+ CompatibilityIssue(
+ file_path=file_path,
+ line_number=line_num,
+ issue_type="tuple-missing-import",
+ message="`Tuple[...]` is used but `Tuple` is not imported from typing. Add `from typing import Tuple` (or include `Tuple` in existing typing import) for Python 3.8 compatibility.",
+ code=line.strip(),
+ )
+ )
+
+ return issues
+
+ def lint_directory(self, directory: Path, exclude_patterns: Optional[list[str]] = None) -> list[CompatibilityIssue]:
+ """Lint all Python files in a directory."""
+ if exclude_patterns is None:
+ exclude_patterns = [
+ ".git",
+ ".venv",
+ "__pycache__",
+ ".pytest_cache",
+ ".ruff_cache",
+ "node_modules",
+ "build",
+ "dist",
+ "htmlcov",
+ "site",
+ ]
+
+ all_issues: list[CompatibilityIssue] = []
+
+ for py_file in directory.rglob("*.py"):
+ # Skip excluded paths
+ if any(exclude in str(py_file) for exclude in exclude_patterns):
+ continue
+
+ file_issues = self.check_file(py_file)
+ all_issues.extend(file_issues)
+
+ return all_issues
+
+ def format_output(self, issues: list[CompatibilityIssue]) -> str:
+ """Format issues for output."""
+ if not issues:
+ return "No compatibility issues found!"
+
+ output_lines = [f"Found {len(issues)} compatibility issue(s):\n"]
+
+ # Group by file
+ by_file: dict[Path, list[CompatibilityIssue]] = {}
+ for issue in issues:
+ if issue.file_path not in by_file:
+ by_file[issue.file_path] = []
+ by_file[issue.file_path].append(issue)
+
+ for file_path, file_issues in sorted(by_file.items()):
+ output_lines.append(f"\n{file_path}:")
+ for issue in sorted(file_issues, key=lambda x: x.line_number):
+ output_lines.append(
+ f" Line {issue.line_number}: [{issue.issue_type}] {issue.message}"
+ )
+ output_lines.append(f" {issue.code}")
+
+ return "\n".join(output_lines)
+
+
+def main() -> int:
+ """Main entry point."""
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ description="Check Python 3.8/3.9 compatibility issues"
+ )
+ parser.add_argument(
+ "paths",
+ nargs="*",
+ type=Path,
+ default=[Path("ccbt")],
+ help="Paths to check (default: ccbt/)",
+ )
+ parser.add_argument(
+ "--exclude",
+ action="append",
+ default=[],
+ help="Patterns to exclude (can be specified multiple times)",
+ )
+ parser.add_argument(
+ "--format",
+ choices=["text", "json"],
+ default="text",
+ help="Output format (default: text)",
+ )
+
+ args = parser.parse_args()
+
+ linter = CompatibilityLinter(Path.cwd())
+ all_issues: list[CompatibilityIssue] = []
+
+ for path in args.paths:
+ if path.is_file():
+ issues = linter.check_file(path)
+ all_issues.extend(issues)
+ elif path.is_dir():
+ issues = linter.lint_directory(path, exclude_patterns=args.exclude)
+ all_issues.extend(issues)
+ else:
+ print(f"Error: {path} does not exist", file=sys.stderr)
+ return 1
+
+ if args.format == "json":
+ import json
+
+ output = json.dumps(
+ [
+ {
+ "file": str(issue.file_path),
+ "line": issue.line_number,
+ "type": issue.issue_type,
+ "message": issue.message,
+ "code": issue.code,
+ }
+ for issue in all_issues
+ ],
+ indent=2,
+ )
+ print(output)
+ else:
+ output = linter.format_output(all_issues)
+ print(output)
+
+ return 0 if not all_issues else 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
+
diff --git a/dev/docs_build_logs/20251231_102307/summary.txt b/dev/docs_build_logs/20251231_102307/summary.txt
deleted file mode 100644
index 4ea34fd..0000000
--- a/dev/docs_build_logs/20251231_102307/summary.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Documentation Build Summary - 2025-12-31 10:23:08
-================================================================================
-
-Exit Status: FAILURE
-Return Code: 1
-
-Total Warnings: 0
-Total Errors: 1
-
-Log Directory: dev\docs_build_logs\20251231_102307
-Full Output: full_output.log
-Warnings: warnings.log
-Errors: errors.log
diff --git a/dev/docs_build_logs/20251231_102728/summary.txt b/dev/docs_build_logs/20251231_102728/summary.txt
deleted file mode 100644
index 922401c..0000000
--- a/dev/docs_build_logs/20251231_102728/summary.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Documentation Build Summary - 2025-12-31 10:31:46
-================================================================================
-
-Exit Status: FAILURE
-Return Code: 1
-
-Total Warnings: 58
-Total Errors: 2
-
-Log Directory: dev\docs_build_logs\20251231_102728
-Full Output: full_output.log
-Warnings: warnings.log
-Errors: errors.log
diff --git a/dev/docs_build_logs/20251231_104836/summary.txt b/dev/docs_build_logs/20251231_104836/summary.txt
deleted file mode 100644
index 0939aa1..0000000
--- a/dev/docs_build_logs/20251231_104836/summary.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Documentation Build Summary - 2025-12-31 10:48:37
-================================================================================
-
-Exit Status: FAILURE
-Return Code: 1
-
-Total Warnings: 1
-Total Errors: 1
-
-Log Directory: dev\docs_build_logs\20251231_104836
-Full Output: full_output.log
-Warnings: warnings.log
-Errors: errors.log
diff --git a/dev/docs_build_logs/20251231_105402/summary.txt b/dev/docs_build_logs/20251231_105402/summary.txt
deleted file mode 100644
index 7596404..0000000
--- a/dev/docs_build_logs/20251231_105402/summary.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Documentation Build Summary - 2025-12-31 11:00:08
-================================================================================
-
-Exit Status: SUCCESS
-Return Code: 0
-
-Total Warnings: 60
-Total Errors: 0
-
-Log Directory: dev\docs_build_logs\20251231_105402
-Full Output: full_output.log
-Warnings: warnings.log
-Errors: errors.log
diff --git a/dev/pre-commit-config.yaml b/dev/pre-commit-config.yaml
index ccb7783..cbd6327 100644
--- a/dev/pre-commit-config.yaml
+++ b/dev/pre-commit-config.yaml
@@ -17,6 +17,14 @@ repos:
files: ^ccbt/.*\.py$
exclude: ^(tests/|benchmarks/|.*/__pycache__/|.*\.pyc$|.*\.pyo$|dev/|dist/|docs/|htmlcov/|site/|\.benchmarks/|\.ccbt/|\.cursor/|\.github/|\.hypothesis/|\.pre-commit-cache/|\.pre-commit-home/|\.pytest_cache/|\.ruff_cache/|\.venv/)
pass_filenames: false
+ - id: compatibility-linter
+ name: compatibility-linter
+ entry: uv run python dev/compatibility_linter.py ccbt/
+ language: system
+ types: [python]
+ files: ^ccbt/.*\.py$
+ exclude: ^(tests/|benchmarks/|.*/__pycache__/|.*\.pyc$|.*\.pyo$|dev/|dist/|docs/|htmlcov/|site/|\.benchmarks/|\.ccbt/|\.cursor/|\.github/|\.hypothesis/|\.pre-commit-cache/|\.pre-commit-home/|\.pytest_cache/|\.ruff_cache/|\.venv/)
+ pass_filenames: false
- id: ty
name: ty
entry: uv run ty check --config-file=dev/ty.toml --output-format=concise
@@ -55,44 +63,47 @@ repos:
pass_filenames: false
stages: [pre-push]
require_serial: true
+ # Benchmark hooks - can be skipped by setting SKIP_BENCHMARKS=1 environment variable
+ # Usage: SKIP_BENCHMARKS=1 git commit
+ # Or: export SKIP_BENCHMARKS=1 (to skip for all commits in current shell)
- id: bench-smoke-hash
name: bench-smoke-hash
- entry: uv run python tests/performance/bench_hash_verify.py --quick --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
+ entry: uv run python dev/scripts/run_benchmark_if_enabled.py uv run python tests/performance/bench_hash_verify.py --quick --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
language: system
pass_filenames: false
always_run: true
stages: [pre-commit]
- id: bench-smoke-disk
name: bench-smoke-disk
- entry: uv run python tests/performance/bench_disk_io.py --quick --sizes 256KiB 1MiB --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
+ entry: uv run python dev/scripts/run_benchmark_if_enabled.py uv run python tests/performance/bench_disk_io.py --quick --sizes 256KiB 1MiB --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
language: system
pass_filenames: false
always_run: true
stages: [pre-commit]
- id: bench-smoke-piece
name: bench-smoke-piece
- entry: uv run python tests/performance/bench_piece_assembly.py --quick --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
+ entry: uv run python dev/scripts/run_benchmark_if_enabled.py uv run python tests/performance/bench_piece_assembly.py --quick --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
language: system
pass_filenames: false
always_run: true
stages: [pre-commit]
- id: bench-smoke-loopback
name: bench-smoke-loopback
- entry: uv run python tests/performance/bench_loopback_throughput.py --quick --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
+ entry: uv run python dev/scripts/run_benchmark_if_enabled.py uv run python tests/performance/bench_loopback_throughput.py --quick --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
language: system
pass_filenames: false
always_run: true
stages: [pre-commit]
- id: bench-smoke-encryption
name: bench-smoke-encryption
- entry: uv run python tests/performance/bench_encryption.py --quick --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
+ entry: uv run python dev/scripts/run_benchmark_if_enabled.py uv run python tests/performance/bench_encryption.py --quick --record-mode=pre-commit --config-file docs/examples/example-config-performance.toml
language: system
pass_filenames: false
always_run: true
stages: [pre-commit]
- id: bench-smoke-all
name: bench-smoke-all
- entry: uv run python tests/scripts/run_benchmarks_selective.py
+ entry: uv run python dev/scripts/run_benchmark_if_enabled.py uv run python tests/scripts/run_benchmarks_selective.py
language: system
types: [python]
files: ^ccbt/.*\.py$
diff --git a/dev/ruff.toml b/dev/ruff.toml
index 50ea9b6..044ec0e 100644
--- a/dev/ruff.toml
+++ b/dev/ruff.toml
@@ -94,6 +94,12 @@ select = [
"RUF", # ruff-specific rules
]
+# Ignore incompatible pydocstyle rules
+ignore = [
+ "D203", # incorrect-blank-line-before-class - incompatible with D211
+ "D213", # multi-line-summary-second-line - incompatible with D212
+]
+
# Allow fix for all enabled rules
fixable = ["ALL"]
unfixable = []
@@ -124,6 +130,9 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
"TRY301", # abstract-raise - raise statements are clear as-is
"PERF203", # try-except-in-loop - necessary for async error handling
"TRY300", # try-else - async patterns don't always benefit from else blocks
+ "UP045", # Use X | None - intentionally using Optional[X] for Python 3.8/3.9 compatibility
+ "UP007", # Use X | Y - intentionally using Union[X, Y] for Python 3.8/3.9 compatibility
+ "UP006", # Use tuple instead of Tuple - intentionally using Tuple for Python 3.8 compatibility
]
"ccbt/session/session.py" = [
"SLF001", # Private member access used for integration points
@@ -177,5 +186,14 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
"TRY400", # logging.exception - logging.error is acceptable for UI code
]
+# Python 3.8/3.9 Compatibility Rules
+# These rules enforce compatibility patterns from compatibility_tests/COMPREHENSIVE_RESOLUTION_PLAN.md
+[lint.pydocstyle]
+convention = "google"
+
+# Note: Custom compatibility checks are implemented in dev/compatibility_linter.py
+# and integrated into pre-commit hooks. Ruff's pygrep-hooks (PGH) rules are used
+# where possible, but complex pattern matching requires the custom linter.
+
diff --git a/dev/run_precommit_lints.py b/dev/run_precommit_lints.py
index 1c1269a..11b9d10 100644
--- a/dev/run_precommit_lints.py
+++ b/dev/run_precommit_lints.py
@@ -77,6 +77,17 @@ def main():
"Ty type checking"
)
+ # 4. Compatibility linter (Python 3.8/3.9 compatibility)
+ compatibility_output = output_dir / f"compatibility_linter_{timestamp}.txt"
+ compatibility_cmd = [
+ "uv", "run", "python", "dev/compatibility_linter.py", "ccbt/"
+ ]
+ results["compatibility_linter"] = run_command(
+ compatibility_cmd,
+ compatibility_output,
+ "Compatibility linter (Python 3.8/3.9)"
+ )
+
# Summary
print("\n" + "="*60)
print("SUMMARY")
@@ -90,6 +101,7 @@ def main():
print(f" - Ruff check: {ruff_check_output.name}")
print(f" - Ruff format: {ruff_format_output.name}")
print(f" - Ty check: {ty_output.name}")
+ print(f" - Compatibility linter: {compatibility_output.name}")
# Return non-zero if any check failed
return 0 if all(code == 0 for code in results.values()) else 1
diff --git a/docs/overrides/README.md b/docs/overrides/README.md
index 620775d..44cc8cb 100644
--- a/docs/overrides/README.md
+++ b/docs/overrides/README.md
@@ -67,5 +67,14 @@ If you're a native speaker of any of these languages and would like to contribut
+
+
+
+
+
+
+
+
+
diff --git a/docs/overrides/README_RTD.md b/docs/overrides/README_RTD.md
index 0507e9f..4cd00c3 100644
--- a/docs/overrides/README_RTD.md
+++ b/docs/overrides/README_RTD.md
@@ -78,5 +78,14 @@ If builds fail on Read the Docs:
+
+
+
+
+
+
+
+
+
diff --git a/docs/overrides/partials/languages/README.md b/docs/overrides/partials/languages/README.md
index 28d6a6e..79ba2cf 100644
--- a/docs/overrides/partials/languages/README.md
+++ b/docs/overrides/partials/languages/README.md
@@ -82,5 +82,14 @@ If you're a native speaker, please contribute translations by:
+
+
+
+
+
+
+
+
+
diff --git a/docs/overrides/partials/languages/arc.html b/docs/overrides/partials/languages/arc.html
index 585fe45..1c3c607 100644
--- a/docs/overrides/partials/languages/arc.html
+++ b/docs/overrides/partials/languages/arc.html
@@ -71,5 +71,14 @@
+
+
+
+
+
+
+
+
+
diff --git a/docs/overrides/partials/languages/ha.html b/docs/overrides/partials/languages/ha.html
index 3cdb7ed..daf6d80 100644
--- a/docs/overrides/partials/languages/ha.html
+++ b/docs/overrides/partials/languages/ha.html
@@ -70,5 +70,14 @@
+
+
+
+
+
+
+
+
+
diff --git a/docs/overrides/partials/languages/sw.html b/docs/overrides/partials/languages/sw.html
index 44fa8bd..2d56a81 100644
--- a/docs/overrides/partials/languages/sw.html
+++ b/docs/overrides/partials/languages/sw.html
@@ -70,5 +70,14 @@
+
+
+
+
+
+
+
+
+
diff --git a/docs/overrides/partials/languages/yo.html b/docs/overrides/partials/languages/yo.html
index 805e716..f5a6a12 100644
--- a/docs/overrides/partials/languages/yo.html
+++ b/docs/overrides/partials/languages/yo.html
@@ -70,5 +70,14 @@
+
+
+
+
+
+
+
+
+
diff --git a/docs/reports/benchmarks/runs/disk_io-20260102-050947-ea3cad3.json b/docs/reports/benchmarks/runs/disk_io-20260102-050947-ea3cad3.json
new file mode 100644
index 0000000..66ac9c6
--- /dev/null
+++ b/docs/reports/benchmarks/runs/disk_io-20260102-050947-ea3cad3.json
@@ -0,0 +1,45 @@
+{
+ "meta": {
+ "benchmark": "disk_io",
+ "config": "example-config-performance",
+ "timestamp": "2026-01-02T05:09:47.440940+00:00",
+ "platform": {
+ "system": "Windows",
+ "release": "11",
+ "python": "3.13.3"
+ },
+ "git": {
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
+ "author": "Joseph Pollack",
+ "is_dirty": false
+ }
+ },
+ "results": [
+ {
+ "size_bytes": 262144,
+ "iterations": 10,
+ "write_elapsed_s": 1.0489899999956833,
+ "read_elapsed_s": 0.005929799997829832,
+ "write_throughput_bytes_per_s": 2499013.336648383,
+ "read_throughput_bytes_per_s": 442078991.021516
+ },
+ {
+ "size_bytes": 1048576,
+ "iterations": 10,
+ "write_elapsed_s": 0.03471130000252742,
+ "read_elapsed_s": 0.006363599997712299,
+ "write_throughput_bytes_per_s": 302084911.8078696,
+ "read_throughput_bytes_per_s": 1647771702.1449509
+ },
+ {
+ "size_bytes": 4194304,
+ "iterations": 10,
+ "write_elapsed_s": 0.06873649999761255,
+ "read_elapsed_s": 0.016081100002338644,
+ "write_throughput_bytes_per_s": 610200403.0094174,
+ "read_throughput_bytes_per_s": 2608219586.589245
+ }
+ ]
+}
\ No newline at end of file
diff --git a/docs/reports/benchmarks/runs/encryption-20260102-051353-ea3cad3.json b/docs/reports/benchmarks/runs/encryption-20260102-051353-ea3cad3.json
new file mode 100644
index 0000000..4b602a9
--- /dev/null
+++ b/docs/reports/benchmarks/runs/encryption-20260102-051353-ea3cad3.json
@@ -0,0 +1,571 @@
+{
+ "meta": {
+ "benchmark": "encryption",
+ "config": "performance",
+ "timestamp": "2026-01-02T05:13:53.907544+00:00",
+ "platform": {
+ "system": "Windows",
+ "release": "11",
+ "python": "3.13.3"
+ },
+ "git": {
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
+ "author": "Joseph Pollack",
+ "is_dirty": true
+ }
+ },
+ "results": [
+ {
+ "cipher": "RC4",
+ "operation": "encrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.03451829999539768,
+ "throughput_bytes_per_s": 2966542.385159552
+ },
+ {
+ "cipher": "RC4",
+ "operation": "decrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.0827722999965772,
+ "throughput_bytes_per_s": 1237128.8462956138
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "encrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.0001883000004454516,
+ "throughput_bytes_per_s": 543813062.9726905
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "decrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.0003646000041044317,
+ "throughput_bytes_per_s": 280855729.14768744
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "encrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.00021330000163288787,
+ "throughput_bytes_per_s": 480075008.04543525
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "decrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.00047730000369483605,
+ "throughput_bytes_per_s": 214540119.85608512
+ },
+ {
+ "cipher": "RC4",
+ "operation": "encrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 2.8039222000006703,
+ "throughput_bytes_per_s": 2337297.3757968154
+ },
+ {
+ "cipher": "RC4",
+ "operation": "decrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 5.526166100004048,
+ "throughput_bytes_per_s": 1185921.646472986
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "encrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.0073921000002883375,
+ "throughput_bytes_per_s": 886568092.9295287
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "decrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.014727400004630908,
+ "throughput_bytes_per_s": 444993685.09983265
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "encrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.00963239999691723,
+ "throughput_bytes_per_s": 680370416.7286892
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "decrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.018778899997414555,
+ "throughput_bytes_per_s": 348987427.4266484
+ },
+ {
+ "cipher": "RC4",
+ "operation": "encrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 38.25981259999389,
+ "throughput_bytes_per_s": 2740672.075325762
+ },
+ {
+ "cipher": "RC4",
+ "operation": "decrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 71.82708419999835,
+ "throughput_bytes_per_s": 1459861.5712706656
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "encrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 0.1789548000015202,
+ "throughput_bytes_per_s": 585944607.2366276
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "decrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 0.3451561000038055,
+ "throughput_bytes_per_s": 303797615.0467684
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "encrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 0.1957410000031814,
+ "throughput_bytes_per_s": 535695638.6158022
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "decrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 0.42559509999409784,
+ "throughput_bytes_per_s": 246378776.450796
+ },
+ {
+ "operation": "keypair_generation",
+ "key_size": 768,
+ "iterations": 100,
+ "elapsed_s": 0.03593610000098124,
+ "avg_latency_ms": 0.3514170002017636
+ },
+ {
+ "operation": "keypair_generation",
+ "key_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.024462199995468836,
+ "avg_latency_ms": 0.24316300012287684
+ },
+ {
+ "operation": "shared_secret",
+ "key_size": 768,
+ "iterations": 100,
+ "elapsed_s": 0.020352100000309292,
+ "avg_latency_ms": 0.20324500001152046
+ },
+ {
+ "operation": "shared_secret",
+ "key_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.023474100002204068,
+ "avg_latency_ms": 0.2344520005135564
+ },
+ {
+ "operation": "key_derivation",
+ "key_size": 0,
+ "iterations": 100,
+ "elapsed_s": 0.0034324000007472932,
+ "avg_latency_ms": 0.034095999581040815
+ },
+ {
+ "role": "initiator",
+ "dh_key_size": 768,
+ "iterations": 20,
+ "elapsed_s": 0.8071885999888764,
+ "avg_latency_ms": 40.35942999944382,
+ "success_rate": 100.0
+ },
+ {
+ "role": "initiator",
+ "dh_key_size": 1024,
+ "iterations": 20,
+ "elapsed_s": 1.2267373000140651,
+ "avg_latency_ms": 61.336865000703256,
+ "success_rate": 100.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.002212000013969373,
+ "throughput_bytes_per_s": 46292947.26641797,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.04064449998259079,
+ "throughput_bytes_per_s": 2519406.070781062,
+ "overhead_ms": 0.38418099960836116
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.027512699947692454,
+ "throughput_bytes_per_s": 3721917.5215331237,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.06074540007102769,
+ "throughput_bytes_per_s": 1685724.3491732196,
+ "overhead_ms": 0.3632270009984495
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.002214099971752148,
+ "throughput_bytes_per_s": 46249040.83213769,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.039272700014407746,
+ "throughput_bytes_per_s": 2607409.2171516884,
+ "overhead_ms": 0.37091900027007796
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.02435549998335773,
+ "throughput_bytes_per_s": 4204389.155220405,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.05822200001421152,
+ "throughput_bytes_per_s": 1758785.3384460341,
+ "overhead_ms": 0.3230630001053214
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.0023317000013776124,
+ "throughput_bytes_per_s": 43916455.77883096,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.04363490007381188,
+ "throughput_bytes_per_s": 2346745.376448263,
+ "overhead_ms": 0.41184600107953884
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.027873400009411853,
+ "throughput_bytes_per_s": 3673753.469810758,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.061382800005958416,
+ "throughput_bytes_per_s": 1668219.7617257612,
+ "overhead_ms": 0.3663179998693522
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.09153799997875467,
+ "throughput_bytes_per_s": 71594310.57616559,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 2.5692752999675577,
+ "throughput_bytes_per_s": 2550758.1846455894,
+ "overhead_ms": 24.770973999766284
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.1477269000315573,
+ "throughput_bytes_per_s": 44362942.690870956,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 3.220068699993135,
+ "throughput_bytes_per_s": 2035236.080526472,
+ "overhead_ms": 29.322591999880387
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.009617199968488421,
+ "throughput_bytes_per_s": 681445745.2765286,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 2.118651700024202,
+ "throughput_bytes_per_s": 3093288.0567037687,
+ "overhead_ms": 21.109585000449442
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.03806270001223311,
+ "throughput_bytes_per_s": 172179062.38637078,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 2.2931100000059814,
+ "throughput_bytes_per_s": 2857952.736668937,
+ "overhead_ms": 22.57959199967445
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.0027211999549763277,
+ "throughput_bytes_per_s": 2408349297.5278296,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 2.159935999996378,
+ "throughput_bytes_per_s": 3034163.9752339837,
+ "overhead_ms": 21.571500000500237
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.028122700001404155,
+ "throughput_bytes_per_s": 233035946.03906387,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 2.3325769000221044,
+ "throughput_bytes_per_s": 2809596.545321998,
+ "overhead_ms": 23.048445000240463
+ },
+ {
+ "connection_type": "plain",
+ "dh_key_size": 0,
+ "iterations": 10,
+ "elapsed_s": 0.007540400001744274,
+ "avg_latency_ms": 0.7540400001744274,
+ "overhead_ms": 0.0,
+ "overhead_percent": 0.0
+ },
+ {
+ "connection_type": "encrypted",
+ "dh_key_size": 768,
+ "iterations": 10,
+ "elapsed_s": 0.40264029998797923,
+ "avg_latency_ms": 40.26402999879792,
+ "overhead_ms": 39.509989998623496,
+ "overhead_percent": 5239.773750660959
+ },
+ {
+ "connection_type": "encrypted",
+ "dh_key_size": 1024,
+ "iterations": 10,
+ "elapsed_s": 0.63951300001645,
+ "avg_latency_ms": 63.951300001644995,
+ "overhead_ms": 63.19726000147057,
+ "overhead_percent": 8381.154844153034
+ },
+ {
+ "transfer_type": "plain",
+ "piece_size_bytes": 262144,
+ "iterations": 20,
+ "elapsed_s": 0.008156000003509689,
+ "throughput_bytes_per_s": 642824913.8969941,
+ "overhead_percent": 0.0
+ },
+ {
+ "transfer_type": "encrypted",
+ "piece_size_bytes": 262144,
+ "iterations": 20,
+ "elapsed_s": 3.413200799986953,
+ "throughput_bytes_per_s": 1536059.642321671,
+ "overhead_percent": 99.76104540923754
+ },
+ {
+ "transfer_type": "plain",
+ "piece_size_bytes": 524288,
+ "iterations": 20,
+ "elapsed_s": 0.010919600012130104,
+ "throughput_bytes_per_s": 960269605.8785881,
+ "overhead_percent": 0.0
+ },
+ {
+ "transfer_type": "encrypted",
+ "piece_size_bytes": 524288,
+ "iterations": 20,
+ "elapsed_s": 8.3785449999923,
+ "throughput_bytes_per_s": 1251501.3048219753,
+ "overhead_percent": 99.86967188202559
+ },
+ {
+ "transfer_type": "plain",
+ "piece_size_bytes": 1048576,
+ "iterations": 20,
+ "elapsed_s": 0.010977500016451813,
+ "throughput_bytes_per_s": 1910409471.0608335,
+ "overhead_percent": 0.0
+ },
+ {
+ "transfer_type": "encrypted",
+ "piece_size_bytes": 1048576,
+ "iterations": 20,
+ "elapsed_s": 13.699398600008863,
+ "throughput_bytes_per_s": 1530835.0835186613,
+ "overhead_percent": 99.91986874506706
+ },
+ {
+ "operation": "cipher",
+ "cipher_type": "RC4",
+ "dh_key_size": 0,
+ "memory_bytes": 192512,
+ "instances": 100,
+ "avg_bytes_per_instance": 1925
+ },
+ {
+ "operation": "cipher",
+ "cipher_type": "AES-128",
+ "dh_key_size": 0,
+ "memory_bytes": 0,
+ "instances": 100,
+ "avg_bytes_per_instance": 0
+ },
+ {
+ "operation": "cipher",
+ "cipher_type": "AES-256",
+ "dh_key_size": 0,
+ "memory_bytes": 0,
+ "instances": 100,
+ "avg_bytes_per_instance": 0
+ },
+ {
+ "operation": "handshake",
+ "cipher_type": "RC4",
+ "dh_key_size": 768,
+ "memory_bytes": 0,
+ "instances": 10,
+ "avg_bytes_per_instance": 0
+ },
+ {
+ "operation": "handshake",
+ "cipher_type": "RC4",
+ "dh_key_size": 1024,
+ "memory_bytes": 4096,
+ "instances": 10,
+ "avg_bytes_per_instance": 409
+ }
+ ]
+}
\ No newline at end of file
diff --git a/docs/reports/benchmarks/runs/hash_verify-20260102-051358-ea3cad3.json b/docs/reports/benchmarks/runs/hash_verify-20260102-051358-ea3cad3.json
new file mode 100644
index 0000000..3ff939f
--- /dev/null
+++ b/docs/reports/benchmarks/runs/hash_verify-20260102-051358-ea3cad3.json
@@ -0,0 +1,42 @@
+{
+ "meta": {
+ "benchmark": "hash_verify",
+ "config": "performance",
+ "timestamp": "2026-01-02T05:13:58.631748+00:00",
+ "platform": {
+ "system": "Windows",
+ "release": "11",
+ "python": "3.13.3"
+ },
+ "git": {
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
+ "author": "Joseph Pollack",
+ "is_dirty": true
+ }
+ },
+ "results": [
+ {
+ "size_bytes": 1048576,
+ "iterations": 64,
+ "elapsed_s": 9.470000077271834e-05,
+ "bytes_processed": 67108864,
+ "throughput_bytes_per_s": 708646921356.0245
+ },
+ {
+ "size_bytes": 4194304,
+ "iterations": 64,
+ "elapsed_s": 9.719999798107892e-05,
+ "bytes_processed": 268435456,
+ "throughput_bytes_per_s": 2761681703452.854
+ },
+ {
+ "size_bytes": 16777216,
+ "iterations": 64,
+ "elapsed_s": 8.779999916441739e-05,
+ "bytes_processed": 1073741824,
+ "throughput_bytes_per_s": 12229405856704.771
+ }
+ ]
+}
\ No newline at end of file
diff --git a/docs/reports/benchmarks/runs/loopback_throughput-20260102-051411-ea3cad3.json b/docs/reports/benchmarks/runs/loopback_throughput-20260102-051411-ea3cad3.json
new file mode 100644
index 0000000..74e1d00
--- /dev/null
+++ b/docs/reports/benchmarks/runs/loopback_throughput-20260102-051411-ea3cad3.json
@@ -0,0 +1,53 @@
+{
+ "meta": {
+ "benchmark": "loopback_throughput",
+ "config": "performance",
+ "timestamp": "2026-01-02T05:14:11.143094+00:00",
+ "platform": {
+ "system": "Windows",
+ "release": "11",
+ "python": "3.13.3"
+ },
+ "git": {
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
+ "author": "Joseph Pollack",
+ "is_dirty": true
+ }
+ },
+ "results": [
+ {
+ "payload_bytes": 16384,
+ "pipeline_depth": 8,
+ "duration_s": 3.000012800002878,
+ "bytes_transferred": 28100132864,
+ "throughput_bytes_per_s": 9366670990.19479,
+ "stall_percent": 11.11110535251912
+ },
+ {
+ "payload_bytes": 16384,
+ "pipeline_depth": 128,
+ "duration_s": 3.000014799996279,
+ "bytes_transferred": 61922738176,
+ "throughput_bytes_per_s": 20640810897.358505,
+ "stall_percent": 0.7751919667985651
+ },
+ {
+ "payload_bytes": 65536,
+ "pipeline_depth": 8,
+ "duration_s": 3.0000116000010166,
+ "bytes_transferred": 121204899840,
+ "throughput_bytes_per_s": 40401477060.94167,
+ "stall_percent": 11.111105770825153
+ },
+ {
+ "payload_bytes": 65536,
+ "pipeline_depth": 128,
+ "duration_s": 3.000033099997381,
+ "bytes_transferred": 151123525632,
+ "throughput_bytes_per_s": 50373952751.431946,
+ "stall_percent": 0.775179455227201
+ }
+ ]
+}
\ No newline at end of file
diff --git a/docs/reports/benchmarks/runs/piece_assembly-20260102-051413-ea3cad3.json b/docs/reports/benchmarks/runs/piece_assembly-20260102-051413-ea3cad3.json
new file mode 100644
index 0000000..05ce71b
--- /dev/null
+++ b/docs/reports/benchmarks/runs/piece_assembly-20260102-051413-ea3cad3.json
@@ -0,0 +1,35 @@
+{
+ "meta": {
+ "benchmark": "piece_assembly",
+ "config": "performance",
+ "timestamp": "2026-01-02T05:14:13.102422+00:00",
+ "platform": {
+ "system": "Windows",
+ "release": "11",
+ "python": "3.13.3"
+ },
+ "git": {
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
+ "author": "Joseph Pollack",
+ "is_dirty": true
+ }
+ },
+ "results": [
+ {
+ "piece_size_bytes": 1048576,
+ "block_size_bytes": 16384,
+ "blocks": 64,
+ "elapsed_s": 0.3159229000011692,
+ "throughput_bytes_per_s": 3319088.2965309555
+ },
+ {
+ "piece_size_bytes": 4194304,
+ "block_size_bytes": 16384,
+ "blocks": 256,
+ "elapsed_s": 0.31514900000183843,
+ "throughput_bytes_per_s": 13308955.446393713
+ }
+ ]
+}
\ No newline at end of file
diff --git a/docs/reports/benchmarks/timeseries/disk_io_timeseries.json b/docs/reports/benchmarks/timeseries/disk_io_timeseries.json
index 71c6c3c..4513987 100644
--- a/docs/reports/benchmarks/timeseries/disk_io_timeseries.json
+++ b/docs/reports/benchmarks/timeseries/disk_io_timeseries.json
@@ -41,6 +41,48 @@
"read_throughput_bytes_per_s": 3335059317.3461175
}
]
+ },
+ {
+ "timestamp": "2026-01-02T05:09:47.443872+00:00",
+ "git": {
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
+ "author": "Joseph Pollack",
+ "is_dirty": false
+ },
+ "platform": {
+ "system": "Windows",
+ "release": "11",
+ "python": "3.13.3"
+ },
+ "config": "example-config-performance",
+ "results": [
+ {
+ "size_bytes": 262144,
+ "iterations": 10,
+ "write_elapsed_s": 1.0489899999956833,
+ "read_elapsed_s": 0.005929799997829832,
+ "write_throughput_bytes_per_s": 2499013.336648383,
+ "read_throughput_bytes_per_s": 442078991.021516
+ },
+ {
+ "size_bytes": 1048576,
+ "iterations": 10,
+ "write_elapsed_s": 0.03471130000252742,
+ "read_elapsed_s": 0.006363599997712299,
+ "write_throughput_bytes_per_s": 302084911.8078696,
+ "read_throughput_bytes_per_s": 1647771702.1449509
+ },
+ {
+ "size_bytes": 4194304,
+ "iterations": 10,
+ "write_elapsed_s": 0.06873649999761255,
+ "read_elapsed_s": 0.016081100002338644,
+ "write_throughput_bytes_per_s": 610200403.0094174,
+ "read_throughput_bytes_per_s": 2608219586.589245
+ }
+ ]
}
]
}
\ No newline at end of file
diff --git a/docs/reports/benchmarks/timeseries/encryption_timeseries.json b/docs/reports/benchmarks/timeseries/encryption_timeseries.json
index f51c876..5010cc0 100644
--- a/docs/reports/benchmarks/timeseries/encryption_timeseries.json
+++ b/docs/reports/benchmarks/timeseries/encryption_timeseries.json
@@ -567,6 +567,574 @@
"avg_bytes_per_instance": 0
}
]
+ },
+ {
+ "timestamp": "2026-01-02T05:13:53.914384+00:00",
+ "git": {
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
+ "author": "Joseph Pollack",
+ "is_dirty": true
+ },
+ "platform": {
+ "system": "Windows",
+ "release": "11",
+ "python": "3.13.3"
+ },
+ "config": "performance",
+ "results": [
+ {
+ "cipher": "RC4",
+ "operation": "encrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.03451829999539768,
+ "throughput_bytes_per_s": 2966542.385159552
+ },
+ {
+ "cipher": "RC4",
+ "operation": "decrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.0827722999965772,
+ "throughput_bytes_per_s": 1237128.8462956138
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "encrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.0001883000004454516,
+ "throughput_bytes_per_s": 543813062.9726905
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "decrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.0003646000041044317,
+ "throughput_bytes_per_s": 280855729.14768744
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "encrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.00021330000163288787,
+ "throughput_bytes_per_s": 480075008.04543525
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "decrypt",
+ "data_size_bytes": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.00047730000369483605,
+ "throughput_bytes_per_s": 214540119.85608512
+ },
+ {
+ "cipher": "RC4",
+ "operation": "encrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 2.8039222000006703,
+ "throughput_bytes_per_s": 2337297.3757968154
+ },
+ {
+ "cipher": "RC4",
+ "operation": "decrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 5.526166100004048,
+ "throughput_bytes_per_s": 1185921.646472986
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "encrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.0073921000002883375,
+ "throughput_bytes_per_s": 886568092.9295287
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "decrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.014727400004630908,
+ "throughput_bytes_per_s": 444993685.09983265
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "encrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.00963239999691723,
+ "throughput_bytes_per_s": 680370416.7286892
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "decrypt",
+ "data_size_bytes": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.018778899997414555,
+ "throughput_bytes_per_s": 348987427.4266484
+ },
+ {
+ "cipher": "RC4",
+ "operation": "encrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 38.25981259999389,
+ "throughput_bytes_per_s": 2740672.075325762
+ },
+ {
+ "cipher": "RC4",
+ "operation": "decrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 71.82708419999835,
+ "throughput_bytes_per_s": 1459861.5712706656
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "encrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 0.1789548000015202,
+ "throughput_bytes_per_s": 585944607.2366276
+ },
+ {
+ "cipher": "AES-128",
+ "operation": "decrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 0.3451561000038055,
+ "throughput_bytes_per_s": 303797615.0467684
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "encrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 0.1957410000031814,
+ "throughput_bytes_per_s": 535695638.6158022
+ },
+ {
+ "cipher": "AES-256",
+ "operation": "decrypt",
+ "data_size_bytes": 1048576,
+ "iterations": 100,
+ "elapsed_s": 0.42559509999409784,
+ "throughput_bytes_per_s": 246378776.450796
+ },
+ {
+ "operation": "keypair_generation",
+ "key_size": 768,
+ "iterations": 100,
+ "elapsed_s": 0.03593610000098124,
+ "avg_latency_ms": 0.3514170002017636
+ },
+ {
+ "operation": "keypair_generation",
+ "key_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.024462199995468836,
+ "avg_latency_ms": 0.24316300012287684
+ },
+ {
+ "operation": "shared_secret",
+ "key_size": 768,
+ "iterations": 100,
+ "elapsed_s": 0.020352100000309292,
+ "avg_latency_ms": 0.20324500001152046
+ },
+ {
+ "operation": "shared_secret",
+ "key_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.023474100002204068,
+ "avg_latency_ms": 0.2344520005135564
+ },
+ {
+ "operation": "key_derivation",
+ "key_size": 0,
+ "iterations": 100,
+ "elapsed_s": 0.0034324000007472932,
+ "avg_latency_ms": 0.034095999581040815
+ },
+ {
+ "role": "initiator",
+ "dh_key_size": 768,
+ "iterations": 20,
+ "elapsed_s": 0.8071885999888764,
+ "avg_latency_ms": 40.35942999944382,
+ "success_rate": 100.0
+ },
+ {
+ "role": "initiator",
+ "dh_key_size": 1024,
+ "iterations": 20,
+ "elapsed_s": 1.2267373000140651,
+ "avg_latency_ms": 61.336865000703256,
+ "success_rate": 100.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.002212000013969373,
+ "throughput_bytes_per_s": 46292947.26641797,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.04064449998259079,
+ "throughput_bytes_per_s": 2519406.070781062,
+ "overhead_ms": 0.38418099960836116
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.027512699947692454,
+ "throughput_bytes_per_s": 3721917.5215331237,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.06074540007102769,
+ "throughput_bytes_per_s": 1685724.3491732196,
+ "overhead_ms": 0.3632270009984495
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.002214099971752148,
+ "throughput_bytes_per_s": 46249040.83213769,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.039272700014407746,
+ "throughput_bytes_per_s": 2607409.2171516884,
+ "overhead_ms": 0.37091900027007796
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.02435549998335773,
+ "throughput_bytes_per_s": 4204389.155220405,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.05822200001421152,
+ "throughput_bytes_per_s": 1758785.3384460341,
+ "overhead_ms": 0.3230630001053214
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.0023317000013776124,
+ "throughput_bytes_per_s": 43916455.77883096,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.04363490007381188,
+ "throughput_bytes_per_s": 2346745.376448263,
+ "overhead_ms": 0.41184600107953884
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 1024,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.027873400009411853,
+ "throughput_bytes_per_s": 3673753.469810758,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 1024,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.061382800005958416,
+ "throughput_bytes_per_s": 1668219.7617257612,
+ "overhead_ms": 0.3663179998693522
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.09153799997875467,
+ "throughput_bytes_per_s": 71594310.57616559,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 2.5692752999675577,
+ "throughput_bytes_per_s": 2550758.1846455894,
+ "overhead_ms": 24.770973999766284
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 0.1477269000315573,
+ "throughput_bytes_per_s": 44362942.690870956,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 1024,
+ "iterations": 100,
+ "elapsed_s": 3.220068699993135,
+ "throughput_bytes_per_s": 2035236.080526472,
+ "overhead_ms": 29.322591999880387
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.009617199968488421,
+ "throughput_bytes_per_s": 681445745.2765286,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 2.118651700024202,
+ "throughput_bytes_per_s": 3093288.0567037687,
+ "overhead_ms": 21.109585000449442
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 0.03806270001223311,
+ "throughput_bytes_per_s": 172179062.38637078,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 16384,
+ "iterations": 100,
+ "elapsed_s": 2.2931100000059814,
+ "throughput_bytes_per_s": 2857952.736668937,
+ "overhead_ms": 22.57959199967445
+ },
+ {
+ "operation": "read",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.0027211999549763277,
+ "throughput_bytes_per_s": 2408349297.5278296,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "read",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 2.159935999996378,
+ "throughput_bytes_per_s": 3034163.9752339837,
+ "overhead_ms": 21.571500000500237
+ },
+ {
+ "operation": "write",
+ "stream_type": "plain",
+ "data_size_bytes": 65536,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 0.028122700001404155,
+ "throughput_bytes_per_s": 233035946.03906387,
+ "overhead_ms": 0.0
+ },
+ {
+ "operation": "write",
+ "stream_type": "encrypted",
+ "data_size_bytes": 65536,
+ "buffer_size": 65536,
+ "iterations": 100,
+ "elapsed_s": 2.3325769000221044,
+ "throughput_bytes_per_s": 2809596.545321998,
+ "overhead_ms": 23.048445000240463
+ },
+ {
+ "connection_type": "plain",
+ "dh_key_size": 0,
+ "iterations": 10,
+ "elapsed_s": 0.007540400001744274,
+ "avg_latency_ms": 0.7540400001744274,
+ "overhead_ms": 0.0,
+ "overhead_percent": 0.0
+ },
+ {
+ "connection_type": "encrypted",
+ "dh_key_size": 768,
+ "iterations": 10,
+ "elapsed_s": 0.40264029998797923,
+ "avg_latency_ms": 40.26402999879792,
+ "overhead_ms": 39.509989998623496,
+ "overhead_percent": 5239.773750660959
+ },
+ {
+ "connection_type": "encrypted",
+ "dh_key_size": 1024,
+ "iterations": 10,
+ "elapsed_s": 0.63951300001645,
+ "avg_latency_ms": 63.951300001644995,
+ "overhead_ms": 63.19726000147057,
+ "overhead_percent": 8381.154844153034
+ },
+ {
+ "transfer_type": "plain",
+ "piece_size_bytes": 262144,
+ "iterations": 20,
+ "elapsed_s": 0.008156000003509689,
+ "throughput_bytes_per_s": 642824913.8969941,
+ "overhead_percent": 0.0
+ },
+ {
+ "transfer_type": "encrypted",
+ "piece_size_bytes": 262144,
+ "iterations": 20,
+ "elapsed_s": 3.413200799986953,
+ "throughput_bytes_per_s": 1536059.642321671,
+ "overhead_percent": 99.76104540923754
+ },
+ {
+ "transfer_type": "plain",
+ "piece_size_bytes": 524288,
+ "iterations": 20,
+ "elapsed_s": 0.010919600012130104,
+ "throughput_bytes_per_s": 960269605.8785881,
+ "overhead_percent": 0.0
+ },
+ {
+ "transfer_type": "encrypted",
+ "piece_size_bytes": 524288,
+ "iterations": 20,
+ "elapsed_s": 8.3785449999923,
+ "throughput_bytes_per_s": 1251501.3048219753,
+ "overhead_percent": 99.86967188202559
+ },
+ {
+ "transfer_type": "plain",
+ "piece_size_bytes": 1048576,
+ "iterations": 20,
+ "elapsed_s": 0.010977500016451813,
+ "throughput_bytes_per_s": 1910409471.0608335,
+ "overhead_percent": 0.0
+ },
+ {
+ "transfer_type": "encrypted",
+ "piece_size_bytes": 1048576,
+ "iterations": 20,
+ "elapsed_s": 13.699398600008863,
+ "throughput_bytes_per_s": 1530835.0835186613,
+ "overhead_percent": 99.91986874506706
+ },
+ {
+ "operation": "cipher",
+ "cipher_type": "RC4",
+ "dh_key_size": 0,
+ "memory_bytes": 192512,
+ "instances": 100,
+ "avg_bytes_per_instance": 1925
+ },
+ {
+ "operation": "cipher",
+ "cipher_type": "AES-128",
+ "dh_key_size": 0,
+ "memory_bytes": 0,
+ "instances": 100,
+ "avg_bytes_per_instance": 0
+ },
+ {
+ "operation": "cipher",
+ "cipher_type": "AES-256",
+ "dh_key_size": 0,
+ "memory_bytes": 0,
+ "instances": 100,
+ "avg_bytes_per_instance": 0
+ },
+ {
+ "operation": "handshake",
+ "cipher_type": "RC4",
+ "dh_key_size": 768,
+ "memory_bytes": 0,
+ "instances": 10,
+ "avg_bytes_per_instance": 0
+ },
+ {
+ "operation": "handshake",
+ "cipher_type": "RC4",
+ "dh_key_size": 1024,
+ "memory_bytes": 4096,
+ "instances": 10,
+ "avg_bytes_per_instance": 409
+ }
+ ]
}
]
}
\ No newline at end of file
diff --git a/docs/reports/benchmarks/timeseries/hash_verify_timeseries.json b/docs/reports/benchmarks/timeseries/hash_verify_timeseries.json
index 3d03e7a..b24187b 100644
--- a/docs/reports/benchmarks/timeseries/hash_verify_timeseries.json
+++ b/docs/reports/benchmarks/timeseries/hash_verify_timeseries.json
@@ -1,89 +1,11 @@
{
"entries": [
{
- "timestamp": "2025-12-09T13:52:18.131622+00:00",
+ "timestamp": "2026-01-02T05:13:58.634322+00:00",
"git": {
- "commit_hash": "862dc936e28b5c54448b586719c41c05e5a3a37f",
- "commit_hash_short": "862dc93",
- "branch": "dev",
- "author": "Joseph Pollack",
- "is_dirty": false
- },
- "platform": {
- "system": "Windows",
- "release": "11",
- "python": "3.13.3"
- },
- "config": "performance",
- "results": [
- {
- "size_bytes": 1048576,
- "iterations": 64,
- "elapsed_s": 9.670000144978985e-05,
- "bytes_processed": 67108864,
- "throughput_bytes_per_s": 693990310174.3525
- },
- {
- "size_bytes": 4194304,
- "iterations": 64,
- "elapsed_s": 8.69999967108015e-05,
- "bytes_processed": 268435456,
- "throughput_bytes_per_s": 3085465128146.0605
- },
- {
- "size_bytes": 16777216,
- "iterations": 64,
- "elapsed_s": 8.650000017951243e-05,
- "bytes_processed": 1073741824,
- "throughput_bytes_per_s": 12413200251695.68
- }
- ]
- },
- {
- "timestamp": "2025-12-31T15:56:19.831085+00:00",
- "git": {
- "commit_hash": "32b1ca9a87bb5fa5a113702986b04317e335c719",
- "commit_hash_short": "32b1ca9",
- "branch": "addssessionrefactor",
- "author": "Joseph Pollack",
- "is_dirty": false
- },
- "platform": {
- "system": "Windows",
- "release": "11",
- "python": "3.13.3"
- },
- "config": "performance",
- "results": [
- {
- "size_bytes": 1048576,
- "iterations": 64,
- "elapsed_s": 9.000000136438757e-05,
- "bytes_processed": 67108864,
- "throughput_bytes_per_s": 745654033140.4323
- },
- {
- "size_bytes": 4194304,
- "iterations": 64,
- "elapsed_s": 8.620000153314322e-05,
- "bytes_processed": 268435456,
- "throughput_bytes_per_s": 3114100362246.3823
- },
- {
- "size_bytes": 16777216,
- "iterations": 64,
- "elapsed_s": 8.899999738787301e-05,
- "bytes_processed": 1073741824,
- "throughput_bytes_per_s": 12064515230494.896
- }
- ]
- },
- {
- "timestamp": "2025-12-31T16:11:12.455669+00:00",
- "git": {
- "commit_hash": "ec4b34907b7d84bc411c3189fea26669e50d98e4",
- "commit_hash_short": "ec4b349",
- "branch": "addssessionrefactor",
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
"author": "Joseph Pollack",
"is_dirty": true
},
@@ -97,88 +19,23 @@
{
"size_bytes": 1048576,
"iterations": 64,
- "elapsed_s": 0.00010850000035134144,
- "bytes_processed": 67108864,
- "throughput_bytes_per_s": 618514873573.1805
- },
- {
- "size_bytes": 4194304,
- "iterations": 64,
- "elapsed_s": 9.800000043469481e-05,
- "bytes_processed": 268435456,
- "throughput_bytes_per_s": 2739137293972.5635
- },
- {
- "size_bytes": 16777216,
- "iterations": 64,
- "elapsed_s": 9.490000229561701e-05,
- "bytes_processed": 1073741824,
- "throughput_bytes_per_s": 11314455195219.643
- }
- ]
- },
- {
-<<<<<<< Updated upstream
- "timestamp": "2026-01-01T21:26:22.427564+00:00",
- "git": {
- "commit_hash": "a180ff317e02fa68b6ba45ac4bb8e80ee20116ec",
- "commit_hash_short": "a180ff3",
- "branch": "addssessionrefactor",
-=======
- "timestamp": "2026-01-01T21:33:24.328887+00:00",
- "git": {
- "commit_hash": "43a2215f6b9d7344d5a477b34370e0c1de833bbf",
- "commit_hash_short": "43a2215",
- "branch": "HEAD",
->>>>>>> Stashed changes
- "author": "Joseph Pollack",
- "is_dirty": false
- },
- "platform": {
- "system": "Windows",
- "release": "11",
- "python": "3.13.3"
- },
- "config": "performance",
- "results": [
- {
- "size_bytes": 1048576,
- "iterations": 64,
-<<<<<<< Updated upstream
- "elapsed_s": 9.810000119614415e-05,
- "bytes_processed": 67108864,
- "throughput_bytes_per_s": 684086270965.6902
-=======
- "elapsed_s": 0.0003040999981749337,
+ "elapsed_s": 9.470000077271834e-05,
"bytes_processed": 67108864,
- "throughput_bytes_per_s": 220680251242.21008
->>>>>>> Stashed changes
+ "throughput_bytes_per_s": 708646921356.0245
},
{
"size_bytes": 4194304,
"iterations": 64,
-<<<<<<< Updated upstream
- "elapsed_s": 9.230000068782829e-05,
+ "elapsed_s": 9.719999798107892e-05,
"bytes_processed": 268435456,
- "throughput_bytes_per_s": 2908293109421.384
-=======
- "elapsed_s": 0.00012789999891538173,
- "bytes_processed": 268435456,
- "throughput_bytes_per_s": 2098791698798.9666
->>>>>>> Stashed changes
+ "throughput_bytes_per_s": 2761681703452.854
},
{
"size_bytes": 16777216,
"iterations": 64,
-<<<<<<< Updated upstream
- "elapsed_s": 9.109999882639386e-05,
- "bytes_processed": 1073741824,
- "throughput_bytes_per_s": 11786408757767.307
-=======
- "elapsed_s": 9.259999933419749e-05,
+ "elapsed_s": 8.779999916441739e-05,
"bytes_processed": 1073741824,
- "throughput_bytes_per_s": 11595484143847.758
->>>>>>> Stashed changes
+ "throughput_bytes_per_s": 12229405856704.771
}
]
}
diff --git a/docs/reports/benchmarks/timeseries/loopback_throughput_timeseries.json b/docs/reports/benchmarks/timeseries/loopback_throughput_timeseries.json
index 6b75fa6..066e3e9 100644
--- a/docs/reports/benchmarks/timeseries/loopback_throughput_timeseries.json
+++ b/docs/reports/benchmarks/timeseries/loopback_throughput_timeseries.json
@@ -1,61 +1,11 @@
{
"entries": [
{
- "timestamp": "2025-12-09T13:52:30.586439+00:00",
+ "timestamp": "2026-01-02T05:14:11.144981+00:00",
"git": {
- "commit_hash": "862dc936e28b5c54448b586719c41c05e5a3a37f",
- "commit_hash_short": "862dc93",
- "branch": "dev",
- "author": "Joseph Pollack",
- "is_dirty": false
- },
- "platform": {
- "system": "Windows",
- "release": "11",
- "python": "3.13.3"
- },
- "config": "performance",
- "results": [
- {
- "payload_bytes": 16384,
- "pipeline_depth": 8,
- "duration_s": 3.000016300000425,
- "bytes_transferred": 28182183936,
- "throughput_bytes_per_s": 9394010271.20953,
- "stall_percent": 11.111105369284974
- },
- {
- "payload_bytes": 16384,
- "pipeline_depth": 128,
- "duration_s": 3.000051999999414,
- "bytes_transferred": 52992933888,
- "throughput_bytes_per_s": 17664005119.914707,
- "stall_percent": 0.7751935606383651
- },
- {
- "payload_bytes": 65536,
- "pipeline_depth": 8,
- "duration_s": 3.0000094000024546,
- "bytes_transferred": 114890899456,
- "throughput_bytes_per_s": 38296846488.516335,
- "stall_percent": 11.111105477341939
- },
- {
- "payload_bytes": 65536,
- "pipeline_depth": 128,
- "duration_s": 3.000038599999243,
- "bytes_transferred": 221845127168,
- "throughput_bytes_per_s": 73947424265.82643,
- "stall_percent": 0.7751935712223383
- }
- ]
- },
- {
- "timestamp": "2025-12-31T15:56:32.306398+00:00",
- "git": {
- "commit_hash": "32b1ca9a87bb5fa5a113702986b04317e335c719",
- "commit_hash_short": "32b1ca9",
- "branch": "addssessionrefactor",
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
"author": "Joseph Pollack",
"is_dirty": true
},
@@ -69,170 +19,34 @@
{
"payload_bytes": 16384,
"pipeline_depth": 8,
- "duration_s": 3.00001759999941,
- "bytes_transferred": 31751536640,
- "throughput_bytes_per_s": 10583783455.139145,
- "stall_percent": 11.111106014752735
+ "duration_s": 3.000012800002878,
+ "bytes_transferred": 28100132864,
+ "throughput_bytes_per_s": 9366670990.19479,
+ "stall_percent": 11.11110535251912
},
{
"payload_bytes": 16384,
"pipeline_depth": 128,
- "duration_s": 3.0000309999995807,
- "bytes_transferred": 62571364352,
- "throughput_bytes_per_s": 20856905929.30831,
- "stall_percent": 0.7751845337227097
+ "duration_s": 3.000014799996279,
+ "bytes_transferred": 61922738176,
+ "throughput_bytes_per_s": 20640810897.358505,
+ "stall_percent": 0.7751919667985651
},
{
"payload_bytes": 65536,
"pipeline_depth": 8,
"duration_s": 3.0000116000010166,
- "bytes_transferred": 126129930240,
- "throughput_bytes_per_s": 42043147513.148705,
- "stall_percent": 11.111075188761681
- },
- {
- "payload_bytes": 65536,
- "pipeline_depth": 128,
- "duration_s": 3.000052200000937,
- "bytes_transferred": 247966007296,
- "throughput_bytes_per_s": 82653897587.4895,
- "stall_percent": 0.7751714364313005
- }
- ]
- },
- {
- "timestamp": "2025-12-31T16:11:25.026493+00:00",
- "git": {
- "commit_hash": "ec4b34907b7d84bc411c3189fea26669e50d98e4",
- "commit_hash_short": "ec4b349",
- "branch": "addssessionrefactor",
- "author": "Joseph Pollack",
- "is_dirty": true
- },
- "platform": {
- "system": "Windows",
- "release": "11",
- "python": "3.13.3"
- },
- "config": "performance",
- "results": [
- {
- "payload_bytes": 16384,
- "pipeline_depth": 8,
- "duration_s": 3.000020299998141,
- "bytes_transferred": 27435073536,
- "throughput_bytes_per_s": 9144962631.091864,
- "stall_percent": 11.111105212923967
- },
- {
- "payload_bytes": 16384,
- "pipeline_depth": 128,
- "duration_s": 3.0000699999982317,
- "bytes_transferred": 41624010752,
- "throughput_bytes_per_s": 13874346515.922806,
- "stall_percent": 0.7751595859358157
- },
- {
- "payload_bytes": 65536,
- "pipeline_depth": 8,
- "duration_s": 3.0000199999994948,
- "bytes_transferred": 104454946816,
- "throughput_bytes_per_s": 34818083484.78263,
- "stall_percent": 11.111104914479984
- },
- {
- "payload_bytes": 65536,
- "pipeline_depth": 128,
- "duration_s": 3.0001693999984127,
- "bytes_transferred": 205192364032,
- "throughput_bytes_per_s": 68393592719.16731,
- "stall_percent": 0.7751672662645684
- }
- ]
- },
- {
-<<<<<<< Updated upstream
- "timestamp": "2026-01-01T21:26:34.928266+00:00",
- "git": {
- "commit_hash": "a180ff317e02fa68b6ba45ac4bb8e80ee20116ec",
- "commit_hash_short": "a180ff3",
- "branch": "addssessionrefactor",
-=======
- "timestamp": "2026-01-01T21:33:36.877184+00:00",
- "git": {
- "commit_hash": "43a2215f6b9d7344d5a477b34370e0c1de833bbf",
- "commit_hash_short": "43a2215",
- "branch": "HEAD",
->>>>>>> Stashed changes
- "author": "Joseph Pollack",
- "is_dirty": true
- },
- "platform": {
- "system": "Windows",
- "release": "11",
- "python": "3.13.3"
- },
- "config": "performance",
- "results": [
- {
- "payload_bytes": 16384,
- "pipeline_depth": 8,
-<<<<<<< Updated upstream
- "duration_s": 3.000015899997379,
- "bytes_transferred": 22009610240,
- "throughput_bytes_per_s": 7336497863.234401,
- "stall_percent": 11.111103758996506
-=======
- "duration_s": 3.000017399997887,
- "bytes_transferred": 28786163712,
- "throughput_bytes_per_s": 9595332251.079702,
- "stall_percent": 11.111105489757612
->>>>>>> Stashed changes
- },
- {
- "payload_bytes": 16384,
- "pipeline_depth": 128,
-<<<<<<< Updated upstream
- "duration_s": 3.000031100000342,
- "bytes_transferred": 50079989760,
- "throughput_bytes_per_s": 16693156867.605236,
- "stall_percent": 0.7751935468058812
-=======
- "duration_s": 3.0000443999997515,
- "bytes_transferred": 48896245760,
- "throughput_bytes_per_s": 16298507368.758959,
- "stall_percent": 0.7751754992010522
->>>>>>> Stashed changes
- },
- {
- "payload_bytes": 65536,
- "pipeline_depth": 8,
-<<<<<<< Updated upstream
- "duration_s": 3.000010800002201,
- "bytes_transferred": 112558080000,
- "throughput_bytes_per_s": 37519224930.762726,
- "stall_percent": 11.11108235844545
-=======
- "duration_s": 3.0000132999994094,
- "bytes_transferred": 119485759488,
- "throughput_bytes_per_s": 39828409923.39052,
- "stall_percent": 11.111105693990083
->>>>>>> Stashed changes
+ "bytes_transferred": 121204899840,
+ "throughput_bytes_per_s": 40401477060.94167,
+ "stall_percent": 11.111105770825153
},
{
"payload_bytes": 65536,
"pipeline_depth": 128,
-<<<<<<< Updated upstream
- "duration_s": 3.000025099998311,
- "bytes_transferred": 245232566272,
- "throughput_bytes_per_s": 81743504836.72223,
- "stall_percent": 0.7751935928926357
-=======
- "duration_s": 3.0000153000000864,
- "bytes_transferred": 228808589312,
- "throughput_bytes_per_s": 76269140798.0464,
- "stall_percent": 0.7751904937704253
->>>>>>> Stashed changes
+ "duration_s": 3.000033099997381,
+ "bytes_transferred": 151123525632,
+ "throughput_bytes_per_s": 50373952751.431946,
+ "stall_percent": 0.775179455227201
}
]
}
diff --git a/docs/reports/benchmarks/timeseries/piece_assembly_timeseries.json b/docs/reports/benchmarks/timeseries/piece_assembly_timeseries.json
index a1e30a6..ab0f153 100644
--- a/docs/reports/benchmarks/timeseries/piece_assembly_timeseries.json
+++ b/docs/reports/benchmarks/timeseries/piece_assembly_timeseries.json
@@ -1,11 +1,11 @@
{
"entries": [
{
- "timestamp": "2025-12-31T15:56:34.824768+00:00",
+ "timestamp": "2026-01-02T05:14:13.106994+00:00",
"git": {
- "commit_hash": "32b1ca9a87bb5fa5a113702986b04317e335c719",
- "commit_hash_short": "32b1ca9",
- "branch": "addssessionrefactor",
+ "commit_hash": "ea3cad3c4d3f1d60b727f8878caa72c5584bb532",
+ "commit_hash_short": "ea3cad3",
+ "branch": "addscom",
"author": "Joseph Pollack",
"is_dirty": true
},
@@ -20,97 +20,15 @@
"piece_size_bytes": 1048576,
"block_size_bytes": 16384,
"blocks": 64,
- "elapsed_s": 0.3204829000023892,
- "throughput_bytes_per_s": 3271862.5548888342
+ "elapsed_s": 0.3159229000011692,
+ "throughput_bytes_per_s": 3319088.2965309555
},
{
"piece_size_bytes": 4194304,
"block_size_bytes": 16384,
"blocks": 256,
- "elapsed_s": 0.30863529999987804,
- "throughput_bytes_per_s": 13589838.881040689
- }
- ]
- },
- {
- "timestamp": "2025-12-31T16:11:27.667582+00:00",
- "git": {
- "commit_hash": "ec4b34907b7d84bc411c3189fea26669e50d98e4",
- "commit_hash_short": "ec4b349",
- "branch": "addssessionrefactor",
- "author": "Joseph Pollack",
- "is_dirty": true
- },
- "platform": {
- "system": "Windows",
- "release": "11",
- "python": "3.13.3"
- },
- "config": "performance",
- "results": [
- {
- "piece_size_bytes": 1048576,
- "block_size_bytes": 16384,
- "blocks": 64,
- "elapsed_s": 0.3148627000009583,
- "throughput_bytes_per_s": 3330264.270733906
- },
- {
- "piece_size_bytes": 4194304,
- "block_size_bytes": 16384,
- "blocks": 256,
- "elapsed_s": 0.31750839999949676,
- "throughput_bytes_per_s": 13210056.804817284
- }
- ]
- },
- {
-<<<<<<< Updated upstream
- "timestamp": "2026-01-01T21:26:36.872152+00:00",
- "git": {
- "commit_hash": "a180ff317e02fa68b6ba45ac4bb8e80ee20116ec",
- "commit_hash_short": "a180ff3",
- "branch": "addssessionrefactor",
-=======
- "timestamp": "2026-01-01T21:33:38.852240+00:00",
- "git": {
- "commit_hash": "43a2215f6b9d7344d5a477b34370e0c1de833bbf",
- "commit_hash_short": "43a2215",
- "branch": "HEAD",
->>>>>>> Stashed changes
- "author": "Joseph Pollack",
- "is_dirty": true
- },
- "platform": {
- "system": "Windows",
- "release": "11",
- "python": "3.13.3"
- },
- "config": "performance",
- "results": [
- {
- "piece_size_bytes": 1048576,
- "block_size_bytes": 16384,
- "blocks": 64,
-<<<<<<< Updated upstream
- "elapsed_s": 0.3269073999981629,
- "throughput_bytes_per_s": 3207562.753262522
-=======
- "elapsed_s": 0.3274870999994164,
- "throughput_bytes_per_s": 3201884.898678051
->>>>>>> Stashed changes
- },
- {
- "piece_size_bytes": 4194304,
- "block_size_bytes": 16384,
- "blocks": 256,
-<<<<<<< Updated upstream
- "elapsed_s": 0.30781500000011874,
- "throughput_bytes_per_s": 13626054.610718718
-=======
- "elapsed_s": 0.30580449999979464,
- "throughput_bytes_per_s": 13715638.586099343
->>>>>>> Stashed changes
+ "elapsed_s": 0.31514900000183843,
+ "throughput_bytes_per_s": 13308955.446393713
}
]
}
diff --git a/tests/conftest.py b/tests/conftest.py
index 114023c..8457059 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -9,7 +9,7 @@
import random
import time
from pathlib import Path
-from typing import Any
+from typing import Any, Optional
import pytest
import pytest_asyncio
@@ -17,7 +17,7 @@
# #region agent log
# Debug logging helper
_DEBUG_LOG_PATH = Path(__file__).parent.parent / ".cursor" / "debug.log"
-def _debug_log(hypothesis_id: str, location: str, message: str, data: dict | None = None):
+def _debug_log(hypothesis_id: str, location: str, message: str, data: Optional[dict] = None):
"""Write debug log entry in NDJSON format."""
try:
# Ensure directory exists
@@ -336,6 +336,7 @@ def cleanup_singleton_resources():
# Only reset NetworkOptimizer if it exists and has active cleanup thread
if _network_optimizer is not None:
pool = _network_optimizer.connection_pool
+ # CRITICAL FIX: Check for connection_pool existence before accessing
if pool is not None and pool._cleanup_task is not None:
# #region agent log
_debug_log("A", "conftest.py:cleanup_singleton_resources", "NetworkOptimizer has cleanup task", {"thread_alive": pool._cleanup_task.is_alive()})
@@ -345,14 +346,31 @@ def cleanup_singleton_resources():
# #region agent log
_debug_log("A", "conftest.py:cleanup_singleton_resources", "Calling pool.stop()", {})
# #endregion
- # Call stop to properly shutdown the thread
+ # Call stop to properly shutdown the thread with timeout protection
try:
- pool.stop()
+ # CRITICAL FIX: Add timeout wrapper to prevent hanging
+ import threading
+ stop_completed = threading.Event()
+ def stop_with_timeout():
+ try:
+ pool.stop()
+ finally:
+ stop_completed.set()
+
+ stop_thread = threading.Thread(target=stop_with_timeout, daemon=True)
+ stop_thread.start()
+ stop_thread.join(timeout=2.0) # 2 second timeout
+
+ if not stop_completed.is_set():
+ # Timeout occurred, force cleanup
+ pool._shutdown_event.set()
+ pool._cleanup_task = None
+
# #region agent log
- _debug_log("A", "conftest.py:cleanup_singleton_resources", "pool.stop() completed, sleeping 0.1s", {})
+ _debug_log("A", "conftest.py:cleanup_singleton_resources", "pool.stop() completed, sleeping 0.5s", {})
# #endregion
- # Give thread a moment to respond to shutdown signal
- time.sleep(0.1)
+ # CRITICAL FIX: Increase sleep from 0.1s to 0.5s to ensure cleanup completes
+ time.sleep(0.5)
# #region agent log
_debug_log("A", "conftest.py:cleanup_singleton_resources", "Sleep completed", {})
# #endregion
@@ -367,9 +385,20 @@ def cleanup_singleton_resources():
_debug_log("A", "conftest.py:cleanup_singleton_resources", "Resetting NetworkOptimizer", {})
# #endregion
reset_network_optimizer()
+ # CRITICAL FIX: Explicitly clear pool reference
+ pool = None
# #region agent log
_debug_log("A", "conftest.py:cleanup_singleton_resources", "NetworkOptimizer reset completed", {})
# #endregion
+
+ # CRITICAL FIX: Force cleanup all ConnectionPool instances (not just singleton)
+ # This ensures any ConnectionPool instances created outside the singleton are also cleaned up
+ try:
+ from ccbt.utils.network_optimizer import force_cleanup_all_connection_pools
+ force_cleanup_all_connection_pools()
+ except Exception:
+ # Best effort - if import or cleanup fails, continue
+ pass
# Always reset MetricsCollector if it exists (running or not)
# This ensures clean state between tests to prevent state pollution
@@ -827,12 +856,49 @@ def create_interactive_cli(session, console=None):
console.print = Mock()
console.clear = Mock()
console.print_json = Mock()
+ # CRITICAL FIX: Rich Progress requires console.get_time method
+ import time
+ console.get_time = Mock(return_value=time.time)
adapter = LocalSessionAdapter(session)
executor = UnifiedCommandExecutor(adapter)
return InteractiveCLI(executor, adapter, console, session=session)
+@pytest.fixture
+def mock_config_manager():
+ """Fixture to provide a mocked ConfigManager for interactive CLI tests.
+
+ This fixture patches ConfigManager at the module level so that when
+ commands call ConfigManager(None), they receive the mocked instance
+ instead of creating a new one.
+
+ Also ensures config state is reset after each test.
+ """
+ from unittest.mock import Mock, MagicMock, patch
+ from ccbt.models import Config
+
+ # Create mock config with proper structure
+ mock_config = MagicMock(spec=Config)
+ mock_config.model_dump.return_value = {"network": {"port": 6881}}
+ # Create disk mock with backup_dir attribute
+ mock_disk = Mock()
+ mock_disk.backup_dir = "/tmp/backups"
+ mock_config.disk = mock_disk
+ mock_config.config_file = None
+
+ mock_cm = MagicMock()
+ mock_cm.config = mock_config
+ mock_cm.config_file = None
+
+ with patch('ccbt.cli.interactive.ConfigManager', return_value=mock_cm):
+ yield mock_cm
+
+ # Cleanup: reset config state after each test
+ from ccbt.config.config import reset_config
+ reset_config()
+
+
def create_test_torrent_dict(
name: str = "test_torrent",
info_hash: bytes = b"\x00" * 20,
diff --git a/tests/integration/test_connection_pool_integration.py b/tests/integration/test_connection_pool_integration.py
index 88a153d..14d45f8 100644
--- a/tests/integration/test_connection_pool_integration.py
+++ b/tests/integration/test_connection_pool_integration.py
@@ -82,13 +82,18 @@ async def mock_acquire(peer_info):
manager.connection_pool.acquire = mock_acquire
- # Mock the rest of connection process to avoid actual connection
- with patch.object(manager, '_disconnect_peer', new_callable=AsyncMock):
- # This will call acquire but fail later, which is fine for testing
- try:
- await manager._connect_to_peer(peer_info)
- except Exception:
- pass # Expected to fail without actual connection
+ # CRITICAL FIX: Mock asyncio.open_connection to prevent real network calls
+ # This prevents 30-second timeouts per connection attempt
+ with patch("asyncio.open_connection") as mock_open_conn:
+ mock_open_conn.side_effect = ConnectionError("Mocked connection failure")
+
+ # Mock the rest of connection process to avoid actual connection
+ with patch.object(manager, '_disconnect_peer', new_callable=AsyncMock):
+ # This will call acquire but fail later, which is fine for testing
+ try:
+ await manager._connect_to_peer(peer_info)
+ except Exception:
+ pass # Expected to fail without actual connection
# Verify acquire was called (would be called if we had proper mocking)
# The fact that we can call _connect_to_peer without error in setup
diff --git a/tests/integration/test_early_peer_acceptance.py b/tests/integration/test_early_peer_acceptance.py
index 0cc4bee..70aab13 100644
--- a/tests/integration/test_early_peer_acceptance.py
+++ b/tests/integration/test_early_peer_acceptance.py
@@ -8,6 +8,7 @@
import asyncio
import json
from pathlib import Path
+from typing import Optional
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
@@ -17,7 +18,7 @@
from ccbt.session.session import AsyncSessionManager, AsyncTorrentSession
# #region agent log
-def _debug_log(hypothesis_id: str, location: str, message: str, data: dict | None = None):
+def _debug_log(hypothesis_id: str, location: str, message: str, data: Optional[dict] = None):
"""Debug logging for test hang investigation."""
try:
log_path = Path(".cursor/debug.log")
@@ -44,7 +45,7 @@ class TestEarlyPeerAcceptance:
@pytest.mark.asyncio
async def test_incoming_peer_before_tracker_announce(self, tmp_path):
"""Test that incoming peers are queued and accepted even before tracker announce completes."""
- start_task: asyncio.Task | None = None
+ start_task: Optional[asyncio.Task] = None
with patch("ccbt.config.config.get_config") as mock_get_config:
from ccbt.config.config import Config
@@ -292,7 +293,7 @@ class TestEarlyDownloadStart:
@pytest.mark.asyncio
async def test_download_starts_on_first_tracker_response(self, tmp_path):
"""Test that download starts immediately when first tracker responds with peers."""
- start_task: asyncio.Task | None = None
+ start_task: Optional[asyncio.Task] = None
with patch("ccbt.config.config.get_config") as mock_get_config:
from ccbt.config.config import Config
@@ -416,7 +417,7 @@ async def mock_wait_for_starting_session(self, session):
@pytest.mark.asyncio
async def test_peer_manager_reused_when_already_exists(self, tmp_path):
"""Test that existing peer_manager is reused when connecting new peers."""
- start_task: asyncio.Task | None = None
+ start_task: Optional[asyncio.Task] = None
with patch("ccbt.config.config.get_config") as mock_get_config:
from ccbt.config.config import Config
diff --git a/tests/integration/test_private_torrents.py b/tests/integration/test_private_torrents.py
index 1f1081a..4b4a710 100644
--- a/tests/integration/test_private_torrents.py
+++ b/tests/integration/test_private_torrents.py
@@ -36,63 +36,71 @@ async def test_private_torrent_peer_source_validation(tmp_path: Path):
# Start the manager so _running is True (required for _connect_to_peer to work)
await peer_manager.start()
- try:
- # Test 1: Tracker peer should be accepted
- tracker_peer = PeerInfo(ip="192.168.1.1", port=6881, peer_source="tracker")
- # Should not raise exception about peer source
+ # CRITICAL FIX: Mock asyncio.open_connection to prevent real network calls
+ # This prevents 30-second timeouts per connection attempt (2 retries = 60s per peer)
+ # Without this mock, the test would timeout after 300+ seconds with 5 peers
+ with patch("asyncio.open_connection") as mock_open_conn:
+ # Mock connection to fail immediately with ConnectionError (simulates network failure)
+ # This allows the test to verify peer source validation without waiting for timeouts
+ mock_open_conn.side_effect = ConnectionError("Mocked connection failure")
+
try:
- await peer_manager._connect_to_peer(tracker_peer)
- # Connection will fail (no real network), but shouldn't raise PeerConnectionError
- # about peer source
- except PeerConnectionError as e:
- # If PeerConnectionError is raised, it should not be about peer source
- assert "Private torrents only accept tracker-provided peers" not in str(e)
- except Exception:
- # Other exceptions (network, etc.) are OK
- pass
+ # Test 1: Tracker peer should be accepted
+ tracker_peer = PeerInfo(ip="192.168.1.1", port=6881, peer_source="tracker")
+ # Should not raise exception about peer source
+ try:
+ await peer_manager._connect_to_peer(tracker_peer)
+ # Connection will fail (mocked network), but shouldn't raise PeerConnectionError
+ # about peer source
+ except PeerConnectionError as e:
+ # If PeerConnectionError is raised, it should not be about peer source
+ assert "Private torrents only accept tracker-provided peers" not in str(e)
+ except Exception:
+ # Other exceptions (network, etc.) are OK
+ pass
- # Test 2: DHT peer should be rejected
- dht_peer = PeerInfo(ip="192.168.1.2", port=6882, peer_source="dht")
- # The exception is logged but caught by the outer exception handler
- # Check that it raises the correct error by catching it directly
- try:
- await peer_manager._connect_to_peer(dht_peer)
- pytest.fail("Expected PeerConnectionError for DHT peer in private torrent")
- except PeerConnectionError as e:
- assert "Private torrents only accept tracker-provided peers" in str(e)
- assert "dht" in str(e).lower()
- except Exception:
- # Network errors are OK, but we should have gotten PeerConnectionError first
- pass
- finally:
- await peer_manager.stop()
+ # Test 2: DHT peer should be rejected
+ dht_peer = PeerInfo(ip="192.168.1.2", port=6882, peer_source="dht")
+ # The exception is logged but caught by the outer exception handler
+ # Check that it raises the correct error by catching it directly
+ try:
+ await peer_manager._connect_to_peer(dht_peer)
+ pytest.fail("Expected PeerConnectionError for DHT peer in private torrent")
+ except PeerConnectionError as e:
+ assert "Private torrents only accept tracker-provided peers" in str(e)
+ assert "dht" in str(e).lower()
+ except Exception:
+ # Network errors are OK, but we should have gotten PeerConnectionError first
+ pass
- # Test 3: PEX peer should be rejected
- pex_peer = PeerInfo(ip="192.168.1.3", port=6883, peer_source="pex")
- with pytest.raises(PeerConnectionError) as exc_info:
- await peer_manager._connect_to_peer(pex_peer)
- assert "Private torrents only accept tracker-provided peers" in str(exc_info.value)
- assert "pex" in str(exc_info.value).lower()
-
- # Test 4: LSD peer should be rejected
- lsd_peer = PeerInfo(ip="192.168.1.4", port=6884, peer_source="lsd")
- with pytest.raises(PeerConnectionError) as exc_info:
- await peer_manager._connect_to_peer(lsd_peer)
- assert "Private torrents only accept tracker-provided peers" in str(exc_info.value)
- assert "lsd" in str(exc_info.value).lower()
-
- # Test 5: Manual peer should be accepted
- manual_peer = PeerInfo(ip="192.168.1.5", port=6885, peer_source="manual")
- try:
- await peer_manager._connect_to_peer(manual_peer)
- # Connection will fail (no real network), but shouldn't raise PeerConnectionError
- # about peer source
- except PeerConnectionError as e:
- # If PeerConnectionError is raised, it should not be about peer source
- assert "Private torrents only accept tracker-provided peers" not in str(e)
- except Exception:
- # Other exceptions (network, etc.) are OK
- pass
+ # Test 3: PEX peer should be rejected
+ pex_peer = PeerInfo(ip="192.168.1.3", port=6883, peer_source="pex")
+ with pytest.raises(PeerConnectionError) as exc_info:
+ await peer_manager._connect_to_peer(pex_peer)
+ assert "Private torrents only accept tracker-provided peers" in str(exc_info.value)
+ assert "pex" in str(exc_info.value).lower()
+
+ # Test 4: LSD peer should be rejected
+ lsd_peer = PeerInfo(ip="192.168.1.4", port=6884, peer_source="lsd")
+ with pytest.raises(PeerConnectionError) as exc_info:
+ await peer_manager._connect_to_peer(lsd_peer)
+ assert "Private torrents only accept tracker-provided peers" in str(exc_info.value)
+ assert "lsd" in str(exc_info.value).lower()
+
+ # Test 5: Manual peer should be accepted
+ manual_peer = PeerInfo(ip="192.168.1.5", port=6885, peer_source="manual")
+ try:
+ await peer_manager._connect_to_peer(manual_peer)
+ # Connection will fail (mocked network), but shouldn't raise PeerConnectionError
+ # about peer source
+ except PeerConnectionError as e:
+ # If PeerConnectionError is raised, it should not be about peer source
+ assert "Private torrents only accept tracker-provided peers" not in str(e)
+ except Exception:
+ # Other exceptions (network, etc.) are OK
+ pass
+ finally:
+ await peer_manager.stop()
@pytest.mark.asyncio
@@ -272,11 +280,16 @@ async def test_private_torrent_tracker_only_peers(tmp_path: Path):
# Verify _is_private flag is set on peer manager
assert getattr(peer_manager, "_is_private", False) is True
- # Test that DHT peer would be rejected
- dht_peer = PeerInfo(ip="192.168.1.100", port=6881, peer_source="dht")
- with pytest.raises(PeerConnectionError) as exc_info:
- await peer_manager._connect_to_peer(dht_peer)
- assert "Private torrents only accept tracker-provided peers" in str(exc_info.value)
+ # CRITICAL FIX: Mock asyncio.open_connection to prevent real network calls
+ # This prevents 30-second timeouts per connection attempt
+ with patch("asyncio.open_connection") as mock_open_conn:
+ mock_open_conn.side_effect = ConnectionError("Mocked connection failure")
+
+ # Test that DHT peer would be rejected
+ dht_peer = PeerInfo(ip="192.168.1.100", port=6881, peer_source="dht")
+ with pytest.raises(PeerConnectionError) as exc_info:
+ await peer_manager._connect_to_peer(dht_peer)
+ assert "Private torrents only accept tracker-provided peers" in str(exc_info.value)
finally:
await session.stop()
@@ -296,20 +309,30 @@ async def test_non_private_torrent_allows_all_sources(tmp_path: Path):
# Create peer connection manager
peer_manager = AsyncPeerConnectionManager(torrent_data, MagicMock())
peer_manager._is_private = False # Explicitly mark as non-private
+ # Start the manager so _running is True (required for _connect_to_peer to work)
+ await peer_manager.start()
- # All peer sources should be accepted (no PeerConnectionError about source)
- for source in ["tracker", "dht", "pex", "lsd", "manual"]:
- peer = PeerInfo(ip="192.168.1.1", port=6881, peer_source=source)
- try:
- await peer_manager._connect_to_peer(peer)
- # Connection will fail (no real network), but shouldn't raise PeerConnectionError
- # about peer source
- except PeerConnectionError as e:
- # If PeerConnectionError is raised, it should not be about peer source
- assert "Private torrents only accept tracker-provided peers" not in str(e)
- except Exception:
- # Other exceptions (network, etc.) are OK
- pass
+ try:
+ # CRITICAL FIX: Mock asyncio.open_connection to prevent real network calls
+ # This prevents 30-second timeouts per connection attempt (5 sources = 150+ seconds)
+ with patch("asyncio.open_connection") as mock_open_conn:
+ mock_open_conn.side_effect = ConnectionError("Mocked connection failure")
+
+ # All peer sources should be accepted (no PeerConnectionError about source)
+ for source in ["tracker", "dht", "pex", "lsd", "manual"]:
+ peer = PeerInfo(ip="192.168.1.1", port=6881, peer_source=source)
+ try:
+ await peer_manager._connect_to_peer(peer)
+ # Connection will fail (mocked network), but shouldn't raise PeerConnectionError
+ # about peer source
+ except PeerConnectionError as e:
+ # If PeerConnectionError is raised, it should not be about peer source
+ assert "Private torrents only accept tracker-provided peers" not in str(e)
+ except Exception:
+ # Other exceptions (network, etc.) are OK
+ pass
+ finally:
+ await peer_manager.stop()
@pytest.mark.asyncio
diff --git a/tests/performance/bench_encryption.py b/tests/performance/bench_encryption.py
index f857aef..470192f 100644
--- a/tests/performance/bench_encryption.py
+++ b/tests/performance/bench_encryption.py
@@ -28,6 +28,7 @@
import time
from dataclasses import asdict, dataclass
from datetime import datetime, timezone
+from typing import Optional
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock
@@ -1113,7 +1114,7 @@ def write_json(
return path
-def derive_config_name(config_file: str | None) -> str:
+def derive_config_name(config_file: Optional[str]) -> str:
"""Derive config name from config file path."""
if not config_file:
return "default"
diff --git a/tests/performance/bench_hash_verify.py b/tests/performance/bench_hash_verify.py
index 33543fe..b9c1a65 100644
--- a/tests/performance/bench_hash_verify.py
+++ b/tests/performance/bench_hash_verify.py
@@ -21,7 +21,7 @@
from dataclasses import asdict, dataclass
from datetime import datetime, timezone
from pathlib import Path
-from typing import List, Union
+from typing import List, Optional, Union
from ccbt.piece.piece_manager import PieceData, PieceManager # type: ignore
@@ -121,7 +121,7 @@ def write_json(output_dir: Path, benchmark: str, config_name: str, results: List
return path
-def derive_config_name(config_file: str | None) -> str:
+def derive_config_name(config_file: Optional[str]) -> str:
if not config_file:
return "default"
stem = Path(config_file).stem
diff --git a/tests/performance/bench_loopback_throughput.py b/tests/performance/bench_loopback_throughput.py
index 0e08133..44901e0 100644
--- a/tests/performance/bench_loopback_throughput.py
+++ b/tests/performance/bench_loopback_throughput.py
@@ -19,7 +19,7 @@
from dataclasses import asdict, dataclass
from datetime import datetime, timezone
from pathlib import Path
-from typing import List
+from typing import List, Optional
# Import bench_utils using relative import or direct import
try:
@@ -97,7 +97,7 @@ def write_json(output_dir: Path, benchmark: str, config_name: str, results: List
return path
-def derive_config_name(config_file: str | None) -> str:
+def derive_config_name(config_file: Optional[str]) -> str:
if not config_file:
return "default"
stem = Path(config_file).stem
diff --git a/tests/performance/bench_piece_assembly.py b/tests/performance/bench_piece_assembly.py
index c170811..bea5dc2 100644
--- a/tests/performance/bench_piece_assembly.py
+++ b/tests/performance/bench_piece_assembly.py
@@ -22,7 +22,7 @@
from dataclasses import asdict, dataclass
from datetime import datetime, timezone
from pathlib import Path
-from typing import List
+from typing import List, Optional
from ccbt.storage.file_assembler import AsyncFileAssembler # type: ignore
from ccbt.models import TorrentInfo, FileInfo # type: ignore
@@ -108,7 +108,7 @@ def write_json(output_dir: Path, benchmark: str, config_name: str, results: List
return path
-def derive_config_name(config_file: str | None) -> str:
+def derive_config_name(config_file: Optional[str]) -> str:
if not config_file:
return "default"
stem = Path(config_file).stem
diff --git a/tests/performance/bench_utils.py b/tests/performance/bench_utils.py
index 5826ea8..cf5ce7e 100644
--- a/tests/performance/bench_utils.py
+++ b/tests/performance/bench_utils.py
@@ -10,7 +10,7 @@
import sys
from datetime import datetime, timezone
from pathlib import Path
-from typing import Any, Dict, Literal
+from typing import Any, Dict, Literal, Optional
# Configure logging
logging.basicConfig(
@@ -101,7 +101,7 @@ def get_git_metadata() -> Dict[str, Any]:
def determine_record_mode(
- requested_mode: str | None, env_var: str | None = None
+ requested_mode: Optional[str], env_var: Optional[str] = None
) -> Literal["pre-commit", "commit", "both", "none"]:
"""Determine the actual recording mode based on context.
@@ -286,8 +286,8 @@ def record_benchmark_results(
config_name: str,
results: list[Any],
record_mode: str,
- output_base: Path | None = None,
-) -> tuple[Path | None, Path | None]:
+ output_base: Optional[Path] = None,
+) -> tuple[Optional[Path], Optional[Path]]:
"""Record benchmark results according to the specified mode.
Args:
@@ -312,8 +312,8 @@ def record_benchmark_results(
if actual_mode == "none":
return (None, None)
- per_run_path: Path | None = None
- timeseries_path: Path | None = None
+ per_run_path: Optional[Path] = None
+ timeseries_path: Optional[Path] = None
# Platform info
platform_info = {
diff --git a/tests/performance/test_webrtc_performance.py b/tests/performance/test_webrtc_performance.py
index 5b87374..41879e2 100644
--- a/tests/performance/test_webrtc_performance.py
+++ b/tests/performance/test_webrtc_performance.py
@@ -19,6 +19,7 @@
from dataclasses import asdict, dataclass
from datetime import datetime, timezone
from pathlib import Path
+from typing import Optional
from unittest.mock import AsyncMock, MagicMock
try:
@@ -80,9 +81,9 @@ class WebRTCBenchmarkResults:
platform: str
python_version: str
timestamp: str
- connection_establishment: ConnectionEstablishmentResult | None = None
- data_channel_throughput: DataChannelThroughputResult | None = None
- memory_usage: MemoryUsageResult | None = None
+ connection_establishment: Optional[ConnectionEstablishmentResult] = None
+ data_channel_throughput: Optional[DataChannelThroughputResult] = None
+ memory_usage: Optional[MemoryUsageResult] = None
def get_memory_usage_mb() -> float:
diff --git a/tests/scripts/analyze_coverage.py b/tests/scripts/analyze_coverage.py
index 1416af8..3ea14f6 100644
--- a/tests/scripts/analyze_coverage.py
+++ b/tests/scripts/analyze_coverage.py
@@ -5,6 +5,8 @@
line-level analysis of uncovered code.
"""
+from __future__ import annotations
+
import sys
import os
import xml.etree.ElementTree as ET
diff --git a/tests/scripts/bench_all.py b/tests/scripts/bench_all.py
index 7efe7d4..a303580 100644
--- a/tests/scripts/bench_all.py
+++ b/tests/scripts/bench_all.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import subprocess
import sys
from pathlib import Path
diff --git a/tests/scripts/upload_coverage.py b/tests/scripts/upload_coverage.py
index 32181cf..3eb0fbe 100644
--- a/tests/scripts/upload_coverage.py
+++ b/tests/scripts/upload_coverage.py
@@ -15,6 +15,7 @@
import subprocess
import sys
from pathlib import Path
+from typing import Optional
# Configure logging
logging.basicConfig(
@@ -27,8 +28,8 @@
def upload_to_codecov(
coverage_file: Path,
- flags: str | None = None,
- token: str | None = None,
+ flags: Optional[str] = None,
+ token: Optional[str] = None,
) -> int:
"""Upload coverage report to Codecov.
diff --git a/tests/unit/cli/test_advanced_commands_phase2_fixes.py b/tests/unit/cli/test_advanced_commands_phase2_fixes.py
index 1236a1f..543a121 100644
--- a/tests/unit/cli/test_advanced_commands_phase2_fixes.py
+++ b/tests/unit/cli/test_advanced_commands_phase2_fixes.py
@@ -288,6 +288,15 @@ def test_performance_command_execution(self, mock_get_config):
+
+
+
+
+
+
+
+
+
diff --git a/tests/unit/cli/test_interactive.py b/tests/unit/cli/test_interactive.py
index b38f0d2..3998b46 100644
--- a/tests/unit/cli/test_interactive.py
+++ b/tests/unit/cli/test_interactive.py
@@ -15,6 +15,29 @@
pytestmark = [pytest.mark.unit, pytest.mark.cli]
+def _create_mock_config_manager(mock_config=None, config_file=None):
+ """Helper function to create a properly structured mock ConfigManager.
+
+ Args:
+ mock_config: Optional mock config object. If None, creates a default one.
+ config_file: Optional config file path. Defaults to None.
+
+ Returns:
+ Mock ConfigManager instance with config and config_file attributes.
+ """
+ from unittest.mock import Mock
+
+ if mock_config is None:
+ mock_config = Mock()
+ mock_config.model_dump.return_value = {"network": {"port": 6881}}
+ mock_config.disk.backup_dir = "/tmp/backups"
+
+ mock_cm = Mock()
+ mock_cm.config = mock_config
+ mock_cm.config_file = config_file
+ return mock_cm
+
+
@pytest.fixture
def mock_session():
"""Create a mock AsyncSessionManager."""
@@ -56,8 +79,12 @@ def mock_console():
@pytest.fixture
-def interactive_cli(mock_session):
- """Create InteractiveCLI instance."""
+def interactive_cli(mock_session, mock_config_manager):
+ """Create InteractiveCLI instance.
+
+ Uses mock_config_manager fixture to ensure ConfigManager is patched
+ at module level for all commands that create ConfigManager(None) instances.
+ """
from ccbt.cli.interactive import InteractiveCLI
from ccbt.executor.executor import UnifiedCommandExecutor
from ccbt.executor.session_adapter import LocalSessionAdapter
@@ -839,8 +866,8 @@ async def test_cmd_auto_tune_apply(self, interactive_cli):
with patch("ccbt.config.config_conditional.ConditionalConfig", return_value=mock_cc):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
with patch("ccbt.config.config.set_config") as mock_set_config:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
+ mock_config = MagicMock()
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_auto_tune(["apply"])
@@ -881,9 +908,9 @@ async def test_cmd_template_apply(self, interactive_cli):
with patch.object(ConfigTemplates, "apply_template", return_value=mock_new_dict):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_template(["apply", "test"])
@@ -930,9 +957,9 @@ async def test_cmd_profile_apply(self, interactive_cli):
with patch.object(ConfigProfiles, "apply_profile", return_value=mock_new_dict):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_profile(["apply", "test"])
@@ -957,10 +984,10 @@ async def test_cmd_config_backup_list(self, interactive_cli):
mock_cb_class.return_value = mock_cb
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.disk = MagicMock()
- mock_cm.config.disk.backup_dir = "/tmp"
+ mock_config = MagicMock()
+ mock_config.disk = MagicMock()
+ mock_config.disk.backup_dir = "/tmp"
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_backup(["list"])
@@ -976,11 +1003,10 @@ async def test_cmd_config_backup_create(self, interactive_cli, tmp_path):
mock_cb_class.return_value = mock_cb
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.disk = MagicMock()
- mock_cm.config.disk.backup_dir = "/tmp"
- mock_cm.config_file = str(tmp_path / "config.toml")
+ mock_config = MagicMock()
+ mock_config.disk = MagicMock()
+ mock_config.disk.backup_dir = "/tmp"
+ mock_cm = _create_mock_config_manager(mock_config, config_file=str(tmp_path / "config.toml"))
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_backup(["create", "test"])
@@ -996,11 +1022,10 @@ async def test_cmd_config_backup_create_failure(self, interactive_cli, tmp_path)
mock_cb_class.return_value = mock_cb
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.disk = MagicMock()
- mock_cm.config.disk.backup_dir = "/tmp"
- mock_cm.config_file = str(tmp_path / "config.toml")
+ mock_config = MagicMock()
+ mock_config.disk = MagicMock()
+ mock_config.disk.backup_dir = "/tmp"
+ mock_cm = _create_mock_config_manager(mock_config, config_file=str(tmp_path / "config.toml"))
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_backup(["create", "test"])
@@ -1018,11 +1043,10 @@ async def test_cmd_config_backup_restore(self, interactive_cli, tmp_path):
mock_cb_class.return_value = mock_cb
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.disk = MagicMock()
- mock_cm.config.disk.backup_dir = "/tmp"
- mock_cm.config_file = str(tmp_path / "config.toml")
+ mock_config = MagicMock()
+ mock_config.disk = MagicMock()
+ mock_config.disk.backup_dir = "/tmp"
+ mock_cm = _create_mock_config_manager(mock_config, config_file=str(tmp_path / "config.toml"))
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_backup(["restore", str(backup_file)])
@@ -1040,11 +1064,10 @@ async def test_cmd_config_backup_restore_failure(self, interactive_cli, tmp_path
mock_cb_class.return_value = mock_cb
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.disk = MagicMock()
- mock_cm.config.disk.backup_dir = "/tmp"
- mock_cm.config_file = str(tmp_path / "config.toml")
+ mock_config = MagicMock()
+ mock_config.disk = MagicMock()
+ mock_config.disk.backup_dir = "/tmp"
+ mock_cm = _create_mock_config_manager(mock_config, config_file=str(tmp_path / "config.toml"))
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_backup(["restore", str(backup_file)])
@@ -1066,9 +1089,9 @@ async def test_cmd_config_diff(self, interactive_cli):
with patch("ccbt.config.config_diff.ConfigDiff", return_value=mock_diff):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_diff([])
@@ -1081,9 +1104,9 @@ async def test_cmd_config_export(self, interactive_cli, tmp_path):
output_file = tmp_path / "config.json"
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"test": "value"}) # Must be JSON-serializable
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"test": "value"}) # Must be JSON-serializable
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_export(["json", str(output_file)])
@@ -1095,7 +1118,8 @@ async def test_cmd_config_export(self, interactive_cli, tmp_path):
async def test_cmd_config_export_no_file(self, interactive_cli):
"""Test cmd_config_export without file (lines 1531-1561)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
+ mock_config = MagicMock()
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm.export = Mock(return_value='{"test": "value"}')
mock_cm_class.return_value = mock_cm
@@ -1110,7 +1134,8 @@ async def test_cmd_config_import(self, interactive_cli, tmp_path):
import_file.write_text('{"network": {"listen_port": 6881}}')
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
+ mock_config = MagicMock()
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm.import_config = Mock()
mock_cm_class.return_value = mock_cm
@@ -1141,9 +1166,9 @@ async def test_cmd_config_schema(self, interactive_cli):
async def test_cmd_config_show_all(self, interactive_cli):
"""Test cmd_config show all (lines 1626-1655)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"test": "value"})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"test": "value"})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["show"])
@@ -1154,9 +1179,9 @@ async def test_cmd_config_show_all(self, interactive_cli):
async def test_cmd_config_show_section(self, interactive_cli):
"""Test cmd_config show section (lines 1626-1655)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["show", "network"])
@@ -1167,9 +1192,9 @@ async def test_cmd_config_show_section(self, interactive_cli):
async def test_cmd_config_show_key_not_found(self, interactive_cli):
"""Test cmd_config show with key not found (lines 1646-1651)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["show", "nonexistent.key"])
@@ -1180,9 +1205,9 @@ async def test_cmd_config_show_key_not_found(self, interactive_cli):
async def test_cmd_config_get(self, interactive_cli):
"""Test cmd_config get (lines 1656-1667)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["get", "network.listen_port"])
@@ -1193,9 +1218,9 @@ async def test_cmd_config_get(self, interactive_cli):
async def test_cmd_config_get_not_found(self, interactive_cli):
"""Test cmd_config get with key not found (lines 1656-1667)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["get", "nonexistent.key"])
@@ -1213,9 +1238,9 @@ async def test_cmd_config_get_no_args(self, interactive_cli):
async def test_cmd_config_set_bool(self, interactive_cli):
"""Test cmd_config set with bool value (lines 1668-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config") as mock_model_class:
@@ -1230,9 +1255,9 @@ async def test_cmd_config_set_bool(self, interactive_cli):
async def test_cmd_config_set_int(self, interactive_cli):
"""Test cmd_config set with int value (lines 1668-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config") as mock_model_class:
@@ -1247,9 +1272,9 @@ async def test_cmd_config_set_int(self, interactive_cli):
async def test_cmd_config_set_float(self, interactive_cli):
"""Test cmd_config set with float value (lines 1668-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config") as mock_model_class:
@@ -1264,9 +1289,9 @@ async def test_cmd_config_set_float(self, interactive_cli):
async def test_cmd_config_set_string(self, interactive_cli):
"""Test cmd_config set with string value (lines 1668-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config") as mock_model_class:
@@ -1281,9 +1306,9 @@ async def test_cmd_config_set_string(self, interactive_cli):
async def test_cmd_config_set_error(self, interactive_cli):
"""Test cmd_config set with error (lines 1706-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config", side_effect=Exception("Validation error")):
@@ -1345,8 +1370,27 @@ async def test_cmd_alerts(interactive_cli):
async def test_cmd_auto_tune(interactive_cli):
"""Test cmd_auto_tune command handler."""
if hasattr(interactive_cli, "cmd_auto_tune"):
- await interactive_cli.cmd_auto_tune([])
- assert True
+ from unittest.mock import patch, MagicMock, Mock
+ from ccbt.config.config_conditional import ConditionalConfig
+
+ mock_cc = MagicMock()
+ mock_tuned_config = MagicMock()
+ mock_tuned_config.model_dump = Mock(return_value={"test": "value"})
+ mock_cc.adjust_for_system = Mock(return_value=(mock_tuned_config, []))
+
+ with patch("ccbt.config.config_conditional.ConditionalConfig", return_value=mock_cc):
+ with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
+ mock_config = Mock()
+ # Create proper disk mock with read_ahead_kib attribute
+ mock_disk = Mock()
+ mock_disk.read_ahead_kib = 512
+ mock_config.disk = mock_disk
+ mock_cm = _create_mock_config_manager(mock_config)
+ mock_cm_class.return_value = mock_cm
+
+ await interactive_cli.cmd_auto_tune([])
+
+ assert interactive_cli.console.print.called
diff --git a/tests/unit/cli/test_interactive_commands_comprehensive.py b/tests/unit/cli/test_interactive_commands_comprehensive.py
index 320c1e5..3744a64 100644
--- a/tests/unit/cli/test_interactive_commands_comprehensive.py
+++ b/tests/unit/cli/test_interactive_commands_comprehensive.py
@@ -18,6 +18,27 @@
pytestmark = [pytest.mark.unit, pytest.mark.cli]
+def _create_mock_config_manager(mock_config=None, config_file=None):
+ """Helper function to create a properly structured mock ConfigManager.
+
+ Args:
+ mock_config: Optional mock config object. If None, creates a default one.
+ config_file: Optional config file path. Defaults to None.
+
+ Returns:
+ Mock ConfigManager instance with config and config_file attributes.
+ """
+ if mock_config is None:
+ mock_config = Mock()
+ mock_config.model_dump.return_value = {"network": {"port": 6881}}
+ mock_config.disk.backup_dir = "/tmp/backups"
+
+ mock_cm = Mock()
+ mock_cm.config = mock_config
+ mock_cm.config_file = config_file
+ return mock_cm
+
+
@pytest.fixture
def mock_session():
"""Create a mock AsyncSessionManager."""
@@ -57,8 +78,12 @@ def mock_console():
@pytest.fixture
-def interactive_cli(mock_session, mock_console):
- """Create an InteractiveCLI instance."""
+def interactive_cli(mock_session, mock_console, mock_config_manager):
+ """Create an InteractiveCLI instance.
+
+ Uses mock_config_manager fixture to ensure ConfigManager is patched
+ at module level for all commands that create ConfigManager(None) instances.
+ """
from tests.conftest import create_interactive_cli
cli = create_interactive_cli(mock_session, mock_console)
@@ -109,7 +134,7 @@ async def test_cmd_auto_tune_preview(interactive_cli):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm, \
patch('ccbt.config.config_conditional.ConditionalConfig') as mock_cc:
mock_config = Mock()
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_cc_instance = Mock()
# Return a mock config object with model_dump method
@@ -131,8 +156,7 @@ async def test_cmd_auto_tune_apply(interactive_cli):
patch('ccbt.config.config_conditional.ConditionalConfig') as mock_cc, \
patch('ccbt.config.config.set_config') as mock_set:
mock_config = Mock()
- mock_cm_instance = Mock(config=mock_config)
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_cc_instance = Mock()
# Return a mock config object (could be dict or model)
@@ -152,7 +176,7 @@ async def test_cmd_auto_tune_with_warnings(interactive_cli):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm, \
patch('ccbt.config.config_conditional.ConditionalConfig') as mock_cc:
mock_config = Mock()
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_cc_instance = Mock()
# Return a mock config object with model_dump method
@@ -201,9 +225,14 @@ async def test_cmd_template_apply(interactive_cli):
patch('ccbt.config.config_templates.ConfigTemplates') as mock_templates, \
patch('ccbt.config.config.set_config') as mock_set, \
patch('ccbt.models.Config') as mock_config_model:
+ # CRITICAL FIX: Ensure mock ConfigManager has all required attributes
mock_config = Mock()
mock_config.model_dump.return_value = {"existing": "config"}
- mock_cm.return_value = Mock(config=mock_config)
+
+ mock_cm_instance = Mock()
+ mock_cm_instance.config = mock_config
+ mock_cm_instance.config_file = None
+ mock_cm.return_value = mock_cm_instance
mock_templates.apply_template.return_value = {"new": "config"}
mock_config_model.model_validate.return_value = Mock()
@@ -224,7 +253,7 @@ async def test_cmd_template_apply_with_strategy(interactive_cli):
patch('ccbt.models.Config') as mock_config_model:
mock_config = Mock()
mock_config.model_dump.return_value = {"existing": "config"}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_templates.apply_template.return_value = {"new": "config"}
mock_config_model.model_validate.return_value = Mock()
@@ -276,9 +305,14 @@ async def test_cmd_profile_apply(interactive_cli):
patch('ccbt.config.config_templates.ConfigProfiles') as mock_profiles, \
patch('ccbt.config.config.set_config') as mock_set, \
patch('ccbt.models.Config') as mock_config_model:
+ # CRITICAL FIX: Ensure mock ConfigManager has all required attributes
mock_config = Mock()
mock_config.model_dump.return_value = {"existing": "config"}
- mock_cm.return_value = Mock(config=mock_config)
+
+ mock_cm_instance = Mock()
+ mock_cm_instance.config = mock_config
+ mock_cm_instance.config_file = None
+ mock_cm.return_value = mock_cm_instance
mock_profiles.apply_profile.return_value = {"new": "config"}
mock_config_model.model_validate.return_value = Mock()
@@ -305,7 +339,7 @@ async def test_cmd_config_backup_list(interactive_cli):
patch('ccbt.config.config_backup.ConfigBackup') as mock_backup:
mock_config = Mock()
mock_config.disk.backup_dir = "/backup/dir"
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_backup_instance = Mock()
mock_backup_instance.list_backups.return_value = [
@@ -326,7 +360,7 @@ async def test_cmd_config_backup_list_empty(interactive_cli):
patch('ccbt.config.config_backup.ConfigBackup') as mock_backup:
mock_config = Mock()
mock_config.disk.backup_dir = "/backup/dir"
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_backup_instance = Mock()
mock_backup_instance.list_backups.return_value = []
@@ -344,8 +378,7 @@ async def test_cmd_config_backup_create(interactive_cli):
patch('ccbt.config.config_backup.ConfigBackup') as mock_backup:
mock_config = Mock()
mock_config.disk.backup_dir = "/backup/dir"
- mock_config.config_file = "/path/to/config.toml"
- mock_cm.return_value = Mock(config=mock_config, config_file="/path/to/config.toml")
+ mock_cm.return_value = _create_mock_config_manager(mock_config, config_file="/path/to/config.toml")
mock_backup_instance = Mock()
mock_backup_instance.create_backup.return_value = (True, "/backup/file.tar.gz", [])
@@ -364,7 +397,7 @@ async def test_cmd_config_backup_create_with_description(interactive_cli):
patch('ccbt.config.config_backup.ConfigBackup') as mock_backup:
mock_config = Mock()
mock_config.disk.backup_dir = "/backup/dir"
- mock_cm.return_value = Mock(config=mock_config, config_file="/path/to/config.toml")
+ mock_cm.return_value = _create_mock_config_manager(mock_config, config_file="/path/to/config.toml")
mock_backup_instance = Mock()
mock_backup_instance.create_backup.return_value = (True, "/backup/file.tar.gz", [])
@@ -382,7 +415,7 @@ async def test_cmd_config_backup_create_no_config_file(interactive_cli):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm:
mock_config = Mock()
mock_config.disk.backup_dir = "/backup/dir"
- mock_cm.return_value = Mock(config=mock_config, config_file=None)
+ mock_cm.return_value = _create_mock_config_manager(mock_config, config_file=None)
await interactive_cli.cmd_config_backup(["create"])
@@ -396,8 +429,7 @@ async def test_cmd_config_backup_restore(interactive_cli):
patch('ccbt.config.config_backup.ConfigBackup') as mock_backup:
mock_config = Mock()
mock_config.disk.backup_dir = "/backup/dir"
- mock_cm_instance = Mock(config=mock_config, config_file="/path/to/config.toml")
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config, config_file="/path/to/config.toml")
mock_backup_instance = Mock()
# restore_backup returns (ok: bool, msgs: list[str])
@@ -417,7 +449,7 @@ async def test_cmd_config_backup_restore_failure(interactive_cli):
patch('ccbt.config.config_backup.ConfigBackup') as mock_backup:
mock_config = Mock()
mock_config.disk.backup_dir = "/backup/dir"
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_backup_instance = Mock()
mock_backup_instance.restore_backup.return_value = (False, "error")
@@ -1036,7 +1068,7 @@ async def test_cmd_config_export_json(interactive_cli, tmp_path):
patch('pathlib.Path') as mock_path:
mock_config = Mock()
mock_config.model_dump.return_value = {"config": "data"}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_path_instance = Mock()
mock_path.return_value = mock_path_instance
@@ -1056,7 +1088,7 @@ async def test_cmd_config_export_toml(interactive_cli):
tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.toml') as tmp:
mock_config = Mock()
mock_config.model_dump.return_value = {"config": "data"}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
tmp_path = tmp.name
@@ -1086,7 +1118,7 @@ async def test_cmd_config_export_yaml(interactive_cli):
tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.yaml') as tmp:
mock_config = Mock()
mock_config.model_dump.return_value = {"config": "data"}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
tmp_path = tmp.name
@@ -1110,7 +1142,7 @@ async def test_cmd_config_export_yaml_not_installed(interactive_cli, tmp_path):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm:
mock_config = Mock()
mock_config.model_dump.return_value = {"config": "data"}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
# Simulate import error
with patch.dict('sys.modules', {'yaml': None}):
@@ -1143,8 +1175,7 @@ async def test_cmd_config_import_json(interactive_cli, tmp_path):
patch('ccbt.config.config.set_config') as mock_set:
mock_config = Mock()
mock_config.model_dump.return_value = {"existing": "config"}
- mock_cm_instance = Mock(config=mock_config)
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_path_instance = Mock()
mock_path_instance.read_text.return_value = '{"new": "config"}'
@@ -1186,7 +1217,7 @@ async def test_cmd_config_show_all(interactive_cli):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {"port": 6881}}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
await interactive_cli.cmd_config(["show"])
@@ -1199,7 +1230,7 @@ async def test_cmd_config_show_section(interactive_cli):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {"port": 6881}}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
await interactive_cli.cmd_config(["show", "network"])
@@ -1212,7 +1243,7 @@ async def test_cmd_config_show_key_not_found(interactive_cli):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {"port": 6881}}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
await interactive_cli.cmd_config(["show", "nonexistent.key"])
@@ -1225,7 +1256,7 @@ async def test_cmd_config_get(interactive_cli):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {"port": 6881}}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
await interactive_cli.cmd_config(["get", "network.port"])
@@ -1246,7 +1277,7 @@ async def test_cmd_config_get_key_not_found(interactive_cli):
with patch('ccbt.cli.interactive.ConfigManager') as mock_cm:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {"port": 6881}}
- mock_cm.return_value = Mock(config=mock_config)
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
await interactive_cli.cmd_config(["get", "nonexistent.key"])
@@ -1269,8 +1300,7 @@ async def test_cmd_config_set(interactive_cli):
patch('ccbt.config.config.set_config') as mock_set:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {"port": 6881}}
- mock_cm_instance = Mock(config=mock_config)
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_config_model.return_value = Mock()
@@ -1288,8 +1318,7 @@ async def test_cmd_config_set_boolean_true(interactive_cli):
patch('ccbt.config.config.set_config') as mock_set:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {}}
- mock_cm_instance = Mock(config=mock_config)
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_config_model.return_value = Mock()
@@ -1307,8 +1336,7 @@ async def test_cmd_config_set_boolean_false(interactive_cli):
patch('ccbt.config.config.set_config') as mock_set:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {}}
- mock_cm_instance = Mock(config=mock_config)
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_config_model.return_value = Mock()
@@ -1326,8 +1354,7 @@ async def test_cmd_config_set_float(interactive_cli):
patch('ccbt.config.config.set_config') as mock_set:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {}}
- mock_cm_instance = Mock(config=mock_config)
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_config_model.return_value = Mock()
@@ -1344,8 +1371,7 @@ async def test_cmd_config_set_error(interactive_cli):
patch('ccbt.models.Config') as mock_config_model:
mock_config = Mock()
mock_config.model_dump.return_value = {"network": {}}
- mock_cm_instance = Mock(config=mock_config)
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
# Make ConfigModel raise an error
mock_config_model.side_effect = ValueError("Invalid config")
@@ -1704,8 +1730,7 @@ async def test_cmd_config_import_yaml_not_installed(interactive_cli, tmp_path):
patch('pathlib.Path') as mock_path:
mock_config = Mock()
mock_config.model_dump.return_value = {"existing": "config"}
- mock_cm_instance = Mock(config=mock_config)
- mock_cm.return_value = mock_cm_instance
+ mock_cm.return_value = _create_mock_config_manager(mock_config)
mock_path_instance = Mock()
mock_path_instance.read_text.return_value = "status: error"
diff --git a/tests/unit/cli/test_interactive_comprehensive.py b/tests/unit/cli/test_interactive_comprehensive.py
index 1b4b557..e8e6f22 100644
--- a/tests/unit/cli/test_interactive_comprehensive.py
+++ b/tests/unit/cli/test_interactive_comprehensive.py
@@ -15,6 +15,29 @@
pytestmark = [pytest.mark.unit, pytest.mark.cli]
+def _create_mock_config_manager(mock_config=None, config_file=None):
+ """Helper function to create a properly structured mock ConfigManager.
+
+ Args:
+ mock_config: Optional mock config object. If None, creates a default one.
+ config_file: Optional config file path. Defaults to None.
+
+ Returns:
+ Mock ConfigManager instance with config and config_file attributes.
+ """
+ from unittest.mock import Mock
+
+ if mock_config is None:
+ mock_config = Mock()
+ mock_config.model_dump.return_value = {"network": {"port": 6881}}
+ mock_config.disk.backup_dir = "/tmp/backups"
+
+ mock_cm = Mock()
+ mock_cm.config = mock_config
+ mock_cm.config_file = config_file
+ return mock_cm
+
+
@pytest.fixture
def mock_session():
"""Create a mock AsyncSessionManager."""
@@ -46,8 +69,12 @@ def mock_session():
@pytest.fixture
-def interactive_cli(mock_session):
- """Create InteractiveCLI instance."""
+def interactive_cli(mock_session, mock_config_manager):
+ """Create InteractiveCLI instance.
+
+ Uses mock_config_manager fixture to ensure ConfigManager is patched
+ at module level for all commands that create ConfigManager(None) instances.
+ """
from ccbt.cli.interactive import InteractiveCLI
from tests.conftest import create_interactive_cli
@@ -795,8 +822,8 @@ async def test_cmd_auto_tune_preview(self, interactive_cli):
with patch("ccbt.config.config_conditional.ConditionalConfig", return_value=mock_cc):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
+ mock_config = MagicMock()
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_auto_tune(["preview"])
@@ -814,8 +841,8 @@ async def test_cmd_auto_tune_apply(self, interactive_cli):
with patch("ccbt.config.config_conditional.ConditionalConfig", return_value=mock_cc):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
with patch("ccbt.config.config.set_config") as mock_set_config:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
+ mock_config = MagicMock()
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_auto_tune(["apply"])
@@ -856,9 +883,9 @@ async def test_cmd_template_apply(self, interactive_cli):
with patch.object(ConfigTemplates, "apply_template", return_value=mock_new_dict):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_template(["apply", "test"])
@@ -905,9 +932,9 @@ async def test_cmd_profile_apply(self, interactive_cli):
with patch.object(ConfigProfiles, "apply_profile", return_value=mock_new_dict):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_profile(["apply", "test"])
@@ -932,10 +959,10 @@ async def test_cmd_config_backup_list(self, interactive_cli):
mock_cb_class.return_value = mock_cb
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.disk = MagicMock()
- mock_cm.config.disk.backup_dir = "/tmp"
+ mock_config = MagicMock()
+ mock_config.disk = MagicMock()
+ mock_config.disk.backup_dir = "/tmp"
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_backup(["list"])
@@ -1041,9 +1068,9 @@ async def test_cmd_config_diff(self, interactive_cli):
with patch("ccbt.config.config_diff.ConfigDiff", return_value=mock_diff):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_diff([])
@@ -1056,9 +1083,9 @@ async def test_cmd_config_export(self, interactive_cli, tmp_path):
output_file = tmp_path / "config.json"
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"test": "value"}) # Must be JSON-serializable
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"test": "value"}) # Must be JSON-serializable
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_export(["json", str(output_file)])
@@ -1070,7 +1097,8 @@ async def test_cmd_config_export(self, interactive_cli, tmp_path):
async def test_cmd_config_export_no_file(self, interactive_cli):
"""Test cmd_config_export without file (lines 1531-1561)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
+ mock_config = MagicMock()
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm.export = Mock(return_value='{"test": "value"}')
mock_cm_class.return_value = mock_cm
@@ -1085,7 +1113,8 @@ async def test_cmd_config_import(self, interactive_cli, tmp_path):
import_file.write_text('{"network": {"listen_port": 6881}}')
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
+ mock_config = MagicMock()
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm.import_config = Mock()
mock_cm_class.return_value = mock_cm
@@ -1129,9 +1158,9 @@ async def test_cmd_config_show_all(self, interactive_cli):
async def test_cmd_config_show_section(self, interactive_cli):
"""Test cmd_config show section (lines 1626-1655)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["show", "network"])
@@ -1142,9 +1171,9 @@ async def test_cmd_config_show_section(self, interactive_cli):
async def test_cmd_config_show_key_not_found(self, interactive_cli):
"""Test cmd_config show with key not found (lines 1646-1651)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["show", "nonexistent.key"])
@@ -1155,9 +1184,9 @@ async def test_cmd_config_show_key_not_found(self, interactive_cli):
async def test_cmd_config_get(self, interactive_cli):
"""Test cmd_config get (lines 1656-1667)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["get", "network.listen_port"])
@@ -1168,9 +1197,9 @@ async def test_cmd_config_get(self, interactive_cli):
async def test_cmd_config_get_not_found(self, interactive_cli):
"""Test cmd_config get with key not found (lines 1656-1667)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {"listen_port": 6881}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config(["get", "nonexistent.key"])
@@ -1188,9 +1217,9 @@ async def test_cmd_config_get_no_args(self, interactive_cli):
async def test_cmd_config_set_bool(self, interactive_cli):
"""Test cmd_config set with bool value (lines 1668-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config") as mock_model_class:
@@ -1205,9 +1234,9 @@ async def test_cmd_config_set_bool(self, interactive_cli):
async def test_cmd_config_set_int(self, interactive_cli):
"""Test cmd_config set with int value (lines 1668-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config") as mock_model_class:
@@ -1222,9 +1251,9 @@ async def test_cmd_config_set_int(self, interactive_cli):
async def test_cmd_config_set_float(self, interactive_cli):
"""Test cmd_config set with float value (lines 1668-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config") as mock_model_class:
@@ -1239,9 +1268,9 @@ async def test_cmd_config_set_float(self, interactive_cli):
async def test_cmd_config_set_string(self, interactive_cli):
"""Test cmd_config set with string value (lines 1668-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config") as mock_model_class:
@@ -1256,9 +1285,9 @@ async def test_cmd_config_set_string(self, interactive_cli):
async def test_cmd_config_set_error(self, interactive_cli):
"""Test cmd_config set with error (lines 1706-1707)."""
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.model_dump = Mock(return_value={"network": {}})
+ mock_config = MagicMock()
+ mock_config.model_dump = Mock(return_value={"network": {}})
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
with patch("ccbt.models.Config", side_effect=Exception("Validation error")):
diff --git a/tests/unit/cli/test_interactive_coverage.py b/tests/unit/cli/test_interactive_coverage.py
index 9278247..a0a729f 100644
--- a/tests/unit/cli/test_interactive_coverage.py
+++ b/tests/unit/cli/test_interactive_coverage.py
@@ -33,8 +33,12 @@ def mock_session():
@pytest.fixture
-def interactive_cli(mock_session):
- """Create InteractiveCLI instance."""
+def interactive_cli(mock_session, mock_config_manager):
+ """Create InteractiveCLI instance.
+
+ Uses mock_config_manager fixture to ensure ConfigManager is patched
+ at module level for all commands that create ConfigManager(None) instances.
+ """
from tests.conftest import create_interactive_cli
console = Console(file=open("nul", "w") if hasattr(open, "__call__") else None)
cli = create_interactive_cli(mock_session, console)
diff --git a/tests/unit/cli/test_interactive_enhanced.py b/tests/unit/cli/test_interactive_enhanced.py
index e2d2b83..97b87b8 100644
--- a/tests/unit/cli/test_interactive_enhanced.py
+++ b/tests/unit/cli/test_interactive_enhanced.py
@@ -8,6 +8,7 @@
import pytest
from rich.console import Console
+from typing import Optional
from ccbt.cli.interactive import InteractiveCLI
@@ -19,7 +20,7 @@ def __init__(self) -> None:
async def add_torrent(self, td: dict, resume: bool = False) -> str:
return "00" * 20
- async def get_torrent_status(self, ih: str) -> dict | None:
+ async def get_torrent_status(self, ih: str) -> Optional[dict]:
return self._status
async def pause_torrent(self, ih: str) -> bool:
diff --git a/tests/unit/cli/test_interactive_expanded.py b/tests/unit/cli/test_interactive_expanded.py
index 2de6a03..46220b8 100644
--- a/tests/unit/cli/test_interactive_expanded.py
+++ b/tests/unit/cli/test_interactive_expanded.py
@@ -62,8 +62,12 @@ def mock_console():
@pytest.fixture
-def interactive_cli(mock_session, mock_console):
- """Create an InteractiveCLI instance."""
+def interactive_cli(mock_session, mock_console, mock_config_manager):
+ """Create an InteractiveCLI instance.
+
+ Uses mock_config_manager fixture to ensure ConfigManager is patched
+ at module level for all commands that create ConfigManager(None) instances.
+ """
from tests.conftest import create_interactive_cli
cli = create_interactive_cli(mock_session, mock_console)
@@ -695,8 +699,31 @@ async def test_cmd_capabilities(interactive_cli):
async def test_cmd_auto_tune(interactive_cli):
"""Test cmd_auto_tune command handler."""
if hasattr(interactive_cli, "cmd_auto_tune"):
- await interactive_cli.cmd_auto_tune([])
- assert True
+ from unittest.mock import patch, MagicMock, Mock
+ from ccbt.config.config_conditional import ConditionalConfig
+
+ mock_cc = MagicMock()
+ mock_tuned_config = MagicMock()
+ mock_tuned_config.model_dump = Mock(return_value={"test": "value"})
+ mock_cc.adjust_for_system = Mock(return_value=(mock_tuned_config, []))
+
+ with patch("ccbt.config.config_conditional.ConditionalConfig", return_value=mock_cc):
+ with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
+ mock_config = Mock()
+ # Create proper disk mock with read_ahead_kib attribute
+ mock_disk = Mock()
+ mock_disk.read_ahead_kib = 512
+ mock_config.disk = mock_disk
+ mock_config.model_dump.return_value = {"network": {"port": 6881}}
+ # Create mock ConfigManager instance
+ mock_cm = MagicMock()
+ mock_cm.config = mock_config
+ mock_cm.config_file = None
+ mock_cm_class.return_value = mock_cm
+
+ await interactive_cli.cmd_auto_tune([])
+
+ assert interactive_cli.console.print.called
@pytest.mark.asyncio
diff --git a/tests/unit/cli/test_interactive_expanded_coverage.py b/tests/unit/cli/test_interactive_expanded_coverage.py
index f9999dc..8cf4c35 100644
--- a/tests/unit/cli/test_interactive_expanded_coverage.py
+++ b/tests/unit/cli/test_interactive_expanded_coverage.py
@@ -44,8 +44,12 @@ def mock_session():
@pytest.fixture
-def interactive_cli(mock_session):
- """Create InteractiveCLI instance."""
+def interactive_cli(mock_session, mock_config_manager):
+ """Create InteractiveCLI instance.
+
+ Uses mock_config_manager fixture to ensure ConfigManager is patched
+ at module level for all commands that create ConfigManager(None) instances.
+ """
from tests.conftest import create_interactive_cli
console = Mock(spec=Console)
diff --git a/tests/unit/cli/test_interactive_file_selection.py b/tests/unit/cli/test_interactive_file_selection.py
index 1800c51..2714a1c 100644
--- a/tests/unit/cli/test_interactive_file_selection.py
+++ b/tests/unit/cli/test_interactive_file_selection.py
@@ -127,8 +127,12 @@ def interactive_cli_with_layout(interactive_cli):
@pytest.fixture
-def interactive_cli(mock_session, mock_console):
- """Create an InteractiveCLI instance."""
+def interactive_cli(mock_session, mock_console, mock_config_manager):
+ """Create an InteractiveCLI instance.
+
+ Uses mock_config_manager fixture to ensure ConfigManager is patched
+ at module level for all commands that create ConfigManager(None) instances.
+ """
from tests.conftest import create_interactive_cli
return create_interactive_cli(mock_session, mock_console)
diff --git a/tests/unit/cli/test_interactive_final_coverage.py b/tests/unit/cli/test_interactive_final_coverage.py
index 7944270..332ee80 100644
--- a/tests/unit/cli/test_interactive_final_coverage.py
+++ b/tests/unit/cli/test_interactive_final_coverage.py
@@ -24,6 +24,29 @@
pytestmark = [pytest.mark.unit, pytest.mark.cli]
+def _create_mock_config_manager(mock_config=None, config_file=None):
+ """Helper function to create a properly structured mock ConfigManager.
+
+ Args:
+ mock_config: Optional mock config object. If None, creates a default one.
+ config_file: Optional config file path. Defaults to None.
+
+ Returns:
+ Mock ConfigManager instance with config and config_file attributes.
+ """
+ from unittest.mock import Mock
+
+ if mock_config is None:
+ mock_config = Mock()
+ mock_config.model_dump.return_value = {"network": {"port": 6881}}
+ mock_config.disk.backup_dir = "/tmp/backups"
+
+ mock_cm = Mock()
+ mock_cm.config = mock_config
+ mock_cm.config_file = config_file
+ return mock_cm
+
+
@pytest.fixture
def mock_session():
"""Create a mock AsyncSessionManager."""
@@ -38,8 +61,12 @@ def mock_session():
@pytest.fixture
-def interactive_cli(mock_session):
- """Create InteractiveCLI instance."""
+def interactive_cli(mock_session, mock_config_manager):
+ """Create InteractiveCLI instance.
+
+ Uses mock_config_manager fixture to ensure ConfigManager is patched
+ at module level for all commands that create ConfigManager(None) instances.
+ """
from ccbt.cli.interactive import InteractiveCLI
from tests.conftest import create_interactive_cli
@@ -221,8 +248,8 @@ async def test_cmd_auto_tune_with_warnings(self, interactive_cli):
with patch("ccbt.config.config_conditional.ConditionalConfig", return_value=mock_cc):
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
+ mock_config = MagicMock()
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_auto_tune(["preview"])
@@ -240,10 +267,10 @@ async def test_cmd_config_backup_list_empty(self, interactive_cli):
mock_cb_class.return_value = mock_cb
with patch("ccbt.cli.interactive.ConfigManager") as mock_cm_class:
- mock_cm = MagicMock()
- mock_cm.config = MagicMock()
- mock_cm.config.disk = MagicMock()
- mock_cm.config.disk.backup_dir = "/tmp"
+ mock_config = MagicMock()
+ mock_config.disk = MagicMock()
+ mock_config.disk.backup_dir = "/tmp"
+ mock_cm = _create_mock_config_manager(mock_config)
mock_cm_class.return_value = mock_cm
await interactive_cli.cmd_config_backup(["list"])
diff --git a/tests/unit/cli/test_main.py b/tests/unit/cli/test_main.py
index 793b83b..7b46b28 100644
--- a/tests/unit/cli/test_main.py
+++ b/tests/unit/cli/test_main.py
@@ -3,6 +3,8 @@
Target: 95%+ coverage for ccbt/__main__.py.
"""
+from __future__ import annotations
+
import argparse
import asyncio
import importlib
diff --git a/tests/unit/cli/test_simplification_regression.py b/tests/unit/cli/test_simplification_regression.py
index 9bcc419..e125c50 100644
--- a/tests/unit/cli/test_simplification_regression.py
+++ b/tests/unit/cli/test_simplification_regression.py
@@ -337,6 +337,15 @@ def test_no_regressions_in_existing_tests(self):
+
+
+
+
+
+
+
+
+
diff --git a/tests/unit/discovery/test_tracker_session_statistics.py b/tests/unit/discovery/test_tracker_session_statistics.py
index 2857fef..e69a530 100644
--- a/tests/unit/discovery/test_tracker_session_statistics.py
+++ b/tests/unit/discovery/test_tracker_session_statistics.py
@@ -301,6 +301,15 @@ def test_tracker_session_statistics_persistence(self):
+
+
+
+
+
+
+
+
+
diff --git a/tests/unit/protocols/test_bittorrent_v2_upgrade.py b/tests/unit/protocols/test_bittorrent_v2_upgrade.py
index 509688a..375bd8c 100644
--- a/tests/unit/protocols/test_bittorrent_v2_upgrade.py
+++ b/tests/unit/protocols/test_bittorrent_v2_upgrade.py
@@ -200,7 +200,7 @@ def test_check_extension_protocol_support_with_reserved_bytes(self):
connection.extension_protocol = None
connection.extension_manager = None
reserved_bytes = bytearray(RESERVED_BYTES_LEN)
- reserved_bytes[0] |= 0x10 # Set bit 5 for extension protocol
+ reserved_bytes[5] |= 0x10 # Set bit 4 in byte 5 for extension protocol
connection.reserved_bytes = bytes(reserved_bytes)
result = _check_extension_protocol_support(connection)
diff --git a/tests/unit/protocols/test_ipfs_connection.py b/tests/unit/protocols/test_ipfs_connection.py
index f3224ae..79d9a49 100644
--- a/tests/unit/protocols/test_ipfs_connection.py
+++ b/tests/unit/protocols/test_ipfs_connection.py
@@ -142,7 +142,8 @@ async def test_reconnect_ipfs_success(ipfs_protocol, mock_ipfs_client):
"""Test successful reconnection to IPFS."""
ipfs_protocol._connection_retries = 1
- with patch("ccbt.protocols.ipfs.ipfshttpclient.connect", return_value=mock_ipfs_client):
+ with patch("ccbt.protocols.ipfs.ipfshttpclient.connect", return_value=mock_ipfs_client), \
+ patch("asyncio.sleep"):
result = await ipfs_protocol._reconnect_ipfs()
assert result is True
diff --git a/tests/unit/protocols/test_ipfs_protocol_comprehensive.py b/tests/unit/protocols/test_ipfs_protocol_comprehensive.py
index f68ba90..a258bbc 100644
--- a/tests/unit/protocols/test_ipfs_protocol_comprehensive.py
+++ b/tests/unit/protocols/test_ipfs_protocol_comprehensive.py
@@ -288,10 +288,11 @@ async def mock_to_thread(func, *args, **kwargs):
# For add_peer or other calls, just return what's needed
return None
+ mock_send = patch.object(ipfs_protocol, "send_message", return_value=True)
with (
patch.object(ipfs_protocol, "_parse_multiaddr", side_effect=mock_parse_multiaddr),
patch.object(ipfs_protocol, "_setup_message_listener", return_value=None),
- patch.object(ipfs_protocol, "send_message", return_value=True) as mock_send,
+ mock_send,
patch("ccbt.protocols.ipfs.to_thread", side_effect=mock_to_thread),
):
result = await ipfs_protocol.connect_peer(peer_info)
diff --git a/tests/unit/protocols/test_protocol_base.py b/tests/unit/protocols/test_protocol_base.py
index 3895d2a..fdb6ad4 100644
--- a/tests/unit/protocols/test_protocol_base.py
+++ b/tests/unit/protocols/test_protocol_base.py
@@ -4,6 +4,7 @@
from __future__ import annotations
import pytest
+from typing import Optional
from ccbt.models import PeerInfo, TorrentInfo
from ccbt.protocols.base import (
@@ -51,7 +52,7 @@ async def send_message(self, peer_id: str, message: bytes) -> bool:
return True
return False
- async def receive_message(self, peer_id: str) -> bytes | None:
+ async def receive_message(self, peer_id: str) -> Optional[bytes]:
"""Receive message from peer."""
if peer_id in self.active_connections:
self.stats.messages_received += 1
diff --git a/tests/unit/protocols/test_protocol_base_comprehensive.py b/tests/unit/protocols/test_protocol_base_comprehensive.py
index e8ddb23..5e19490 100644
--- a/tests/unit/protocols/test_protocol_base_comprehensive.py
+++ b/tests/unit/protocols/test_protocol_base_comprehensive.py
@@ -17,6 +17,7 @@
import asyncio
import time
+from typing import Optional
from unittest.mock import AsyncMock, MagicMock, Mock, patch
import pytest
@@ -74,7 +75,7 @@ async def send_message(self, peer_id: str, message: bytes) -> bool:
return True
return False
- async def receive_message(self, peer_id: str) -> bytes | None:
+ async def receive_message(self, peer_id: str) -> Optional[bytes]:
"""Receive message from peer."""
if peer_id in self.active_connections:
self.stats.messages_received += 1
diff --git a/tests/unit/protocols/test_webrtc_manager.py b/tests/unit/protocols/test_webrtc_manager.py
index 0dfac45..71b0ac2 100644
--- a/tests/unit/protocols/test_webrtc_manager.py
+++ b/tests/unit/protocols/test_webrtc_manager.py
@@ -10,6 +10,7 @@
import asyncio
import pytest
+from typing import Optional
from unittest.mock import AsyncMock, MagicMock, Mock, patch
# Try to import aiortc, skip tests if not available
@@ -200,7 +201,7 @@ async def test_create_peer_connection_with_ice_callback(
peer_id = "test_peer_1"
callback_called = []
- async def ice_callback(peer_id: str, candidate: dict | None):
+ async def ice_callback(peer_id: str, candidate: Optional[dict]):
callback_called.append((peer_id, candidate))
from ccbt.protocols.webtorrent import webrtc_manager as webrtc_manager_module
diff --git a/tests/unit/protocols/test_webrtc_manager_coverage.py b/tests/unit/protocols/test_webrtc_manager_coverage.py
index ca3e6d3..01dcae1 100644
--- a/tests/unit/protocols/test_webrtc_manager_coverage.py
+++ b/tests/unit/protocols/test_webrtc_manager_coverage.py
@@ -6,6 +6,7 @@
from __future__ import annotations
import pytest
+from typing import Optional
from unittest.mock import AsyncMock, MagicMock, patch
# Try to import aiortc, skip tests if not available
@@ -66,7 +67,7 @@ async def test_create_peer_connection_ice_candidate_none(self, webrtc_manager, m
peer_id = "test_peer_1"
callback_called = []
- async def ice_callback(peer_id: str, candidate: dict | None):
+ async def ice_callback(peer_id: str, candidate: Optional[dict]):
callback_called.append((peer_id, candidate))
with patch.object(webrtc_manager_module, "RTCPeerConnection") as mock_pc_class:
diff --git a/tests/unit/proxy/conftest.py b/tests/unit/proxy/conftest.py
index 2cb9b37..b6c909a 100644
--- a/tests/unit/proxy/conftest.py
+++ b/tests/unit/proxy/conftest.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+from typing import Optional
from unittest.mock import AsyncMock, MagicMock
@@ -29,7 +30,7 @@ def __await__(self):
return iter([])
-def create_async_response_mock(status: int = 200, headers: dict | None = None) -> AsyncMock:
+def create_async_response_mock(status: int = 200, headers: Optional[dict] = None) -> AsyncMock:
"""Create a properly configured async response mock.
Args:
diff --git a/tests/unit/security/test_rate_limiter_coverage_gaps.py b/tests/unit/security/test_rate_limiter_coverage_gaps.py
index c550546..221c355 100644
--- a/tests/unit/security/test_rate_limiter_coverage_gaps.py
+++ b/tests/unit/security/test_rate_limiter_coverage_gaps.py
@@ -142,7 +142,9 @@ async def test_get_peer_wait_time_when_limited(rate_limiter):
# Should calculate wait time based on remaining window
assert wait_time >= 0.0
assert wait_time <= 60.0
- assert wait_time == max(0.0, 60.0 - 30.0) # time_window - time_since_last
+ # Use approximate comparison due to floating point precision
+ expected_wait = max(0.0, 60.0 - 30.0) # time_window - time_since_last
+ assert abs(wait_time - expected_wait) < 0.01 # Allow small floating point differences
@pytest.mark.asyncio
diff --git a/tests/unit/session/test_announce_controller.py b/tests/unit/session/test_announce_controller.py
index e6b51f2..8dc75b3 100644
--- a/tests/unit/session/test_announce_controller.py
+++ b/tests/unit/session/test_announce_controller.py
@@ -2,7 +2,7 @@
import asyncio
from types import SimpleNamespace
-from typing import Any, List
+from typing import Any, List, Optional
from ccbt.config.config import get_config
from ccbt.session.announce import AnnounceController
@@ -29,7 +29,7 @@ async def announce_to_multiple( # type: ignore[override]
port: int = 6881,
uploaded: int = 0,
downloaded: int = 0,
- left: int | None = None,
+ left: Optional[int] = None,
event: str = "started",
) -> List[Any]:
# Return two peers across two responses
diff --git a/tests/unit/session/test_async_main_metrics.py b/tests/unit/session/test_async_main_metrics.py
index ecee710..f6b3a6f 100644
--- a/tests/unit/session/test_async_main_metrics.py
+++ b/tests/unit/session/test_async_main_metrics.py
@@ -25,9 +25,19 @@ async def test_metrics_attribute_initialized_as_none(self):
@pytest.mark.asyncio
async def test_metrics_initialized_on_start_when_enabled(self, mock_config_enabled):
"""Test metrics initialized when enabled in config."""
+ from unittest.mock import AsyncMock, MagicMock, patch
+
session = AsyncSessionManager()
-
- await session.start()
+
+ # Mock NAT manager to prevent hanging on discovery
+ mock_nat = MagicMock()
+ mock_nat.start = AsyncMock()
+ mock_nat.stop = AsyncMock()
+ mock_nat.map_listen_ports = AsyncMock()
+ mock_nat.wait_for_mapping = AsyncMock()
+
+ with patch.object(session, '_make_nat_manager', return_value=mock_nat):
+ await session.start()
# Check if metrics were initialized
# They may be None if dependencies missing or config disabled
@@ -36,7 +46,8 @@ async def test_metrics_initialized_on_start_when_enabled(self, mock_config_enabl
# If metrics enabled, should be initialized (if no errors)
# We can't assert it's not None because dependencies might be missing
# But we can assert it's either None or MetricsCollector
- assert session.metrics is None or hasattr(session.metrics, "get_all_metrics")
+ # MetricsCollector has methods like get_metrics_summary, get_torrent_metrics, etc.
+ assert session.metrics is None or hasattr(session.metrics, "get_metrics_summary")
await session.stop()
@@ -44,13 +55,26 @@ async def test_metrics_initialized_on_start_when_enabled(self, mock_config_enabl
async def test_metrics_not_initialized_when_disabled(self, mock_config_disabled):
"""Test metrics not initialized when disabled in config."""
from ccbt.monitoring import shutdown_metrics
+ from unittest.mock import AsyncMock, MagicMock, patch
# Ensure clean state
await shutdown_metrics()
+ # CRITICAL: Patch session.config directly to use mocked config
+ # The session manager caches config in __init__(), so we need to patch it
session = AsyncSessionManager()
-
- await session.start()
+ # Override the cached config with the mocked one
+ session.config = mock_config_disabled
+
+ # Mock NAT manager to prevent hanging on discovery
+ mock_nat = MagicMock()
+ mock_nat.start = AsyncMock()
+ mock_nat.stop = AsyncMock()
+ mock_nat.map_listen_ports = AsyncMock()
+ mock_nat.wait_for_mapping = AsyncMock()
+
+ with patch.object(session, '_make_nat_manager', return_value=mock_nat):
+ await session.start()
# Metrics should be None when disabled
assert session.metrics is None
@@ -63,9 +87,19 @@ async def test_metrics_not_initialized_when_disabled(self, mock_config_disabled)
@pytest.mark.asyncio
async def test_metrics_shutdown_on_stop(self, mock_config_enabled):
"""Test metrics shutdown when session stops."""
+ from unittest.mock import AsyncMock, MagicMock, patch
+
session = AsyncSessionManager()
-
- await session.start()
+
+ # Mock NAT manager to prevent hanging on discovery
+ mock_nat = MagicMock()
+ mock_nat.start = AsyncMock()
+ mock_nat.stop = AsyncMock()
+ mock_nat.map_listen_ports = AsyncMock()
+ mock_nat.wait_for_mapping = AsyncMock()
+
+ with patch.object(session, '_make_nat_manager', return_value=mock_nat):
+ await session.start()
# Track if metrics were set
had_metrics = session.metrics is not None
@@ -84,10 +118,20 @@ async def test_metrics_shutdown_on_stop(self, mock_config_enabled):
@pytest.mark.asyncio
async def test_metrics_shutdown_when_not_initialized(self):
"""Test shutdown when metrics were never initialized."""
+ from unittest.mock import AsyncMock, MagicMock, patch
+
session = AsyncSessionManager()
-
- # Start without metrics
- await session.start()
+
+ # Mock NAT manager to prevent hanging on discovery
+ mock_nat = MagicMock()
+ mock_nat.start = AsyncMock()
+ mock_nat.stop = AsyncMock()
+ mock_nat.map_listen_ports = AsyncMock()
+ mock_nat.wait_for_mapping = AsyncMock()
+
+ with patch.object(session, '_make_nat_manager', return_value=mock_nat):
+ # Start without metrics
+ await session.start()
# If metrics weren't initialized, stop should still work
await session.stop()
@@ -110,11 +154,21 @@ def raise_error():
monkeypatch.setattr(config_module, "get_config", raise_error)
+ from unittest.mock import AsyncMock, MagicMock, patch
+
session = AsyncSessionManager()
-
- # Should not raise, but metrics should be None
- # init_metrics() handles exceptions internally and returns None
- await session.start()
+
+ # Mock NAT manager to prevent hanging on discovery
+ mock_nat = MagicMock()
+ mock_nat.start = AsyncMock()
+ mock_nat.stop = AsyncMock()
+ mock_nat.map_listen_ports = AsyncMock()
+ mock_nat.wait_for_mapping = AsyncMock()
+
+ with patch.object(session, '_make_nat_manager', return_value=mock_nat):
+ # Should not raise, but metrics should be None
+ # init_metrics() handles exceptions internally and returns None
+ await session.start()
# Exception is caught in init_metrics() and returns None, so self.metrics is None
assert session.metrics is None
@@ -137,9 +191,19 @@ async def raise_error():
shutdown_called = True
raise Exception("Shutdown error")
+ from unittest.mock import AsyncMock, MagicMock, patch
+
# First start normally
session = AsyncSessionManager()
- await session.start()
+ # Mock NAT manager to prevent hanging on discovery
+ mock_nat = MagicMock()
+ mock_nat.start = AsyncMock()
+ mock_nat.stop = AsyncMock()
+ mock_nat.map_listen_ports = AsyncMock()
+ mock_nat.wait_for_mapping = AsyncMock()
+
+ with patch.object(session, '_make_nat_manager', return_value=mock_nat):
+ await session.start()
# Then patch shutdown to raise
monkeypatch.setattr(monitoring_module, "shutdown_metrics", raise_error)
@@ -163,42 +227,69 @@ async def raise_error():
@pytest.mark.asyncio
async def test_metrics_accessible_during_session(self, mock_config_enabled):
"""Test metrics are accessible via session.metrics during session."""
+ from unittest.mock import AsyncMock, MagicMock, patch
+
session = AsyncSessionManager()
-
- await session.start()
+
+ # Mock NAT manager to prevent hanging on discovery
+ mock_nat = MagicMock()
+ mock_nat.start = AsyncMock()
+ mock_nat.stop = AsyncMock()
+ mock_nat.map_listen_ports = AsyncMock()
+ mock_nat.wait_for_mapping = AsyncMock()
+
+ with patch.object(session, '_make_nat_manager', return_value=mock_nat):
+ await session.start()
if session.metrics is not None:
# Should be able to call methods
- all_metrics = session.metrics.get_all_metrics()
- assert isinstance(all_metrics, dict)
-
- stats = session.metrics.get_metrics_statistics()
- assert isinstance(stats, dict)
+ summary = session.metrics.get_metrics_summary()
+ assert isinstance(summary, dict)
await session.stop()
@pytest.mark.asyncio
async def test_multiple_start_stop_cycles(self, mock_config_enabled):
"""Test metrics handling across multiple start/stop cycles."""
+ from unittest.mock import AsyncMock, MagicMock, patch
+
+ # CRITICAL: Patch session.config directly to use mocked config
+ # The session manager caches config in __init__(), so we need to patch it
session = AsyncSessionManager()
-
- # First cycle
- await session.start()
- metrics1 = session.metrics
- await session.stop()
- assert session.metrics is None
-
- # Second cycle
- await session.start()
- metrics2 = session.metrics
- await session.stop()
- assert session.metrics is None
+ # Override the cached config with the mocked one
+ session.config = mock_config_enabled
+
+ # Mock NAT manager to prevent hanging on discovery
+ mock_nat = MagicMock()
+ mock_nat.start = AsyncMock()
+ mock_nat.stop = AsyncMock()
+ mock_nat.map_listen_ports = AsyncMock()
+ mock_nat.wait_for_mapping = AsyncMock()
+
+ with patch.object(session, '_make_nat_manager', return_value=mock_nat):
+ # First cycle
+ await session.start()
+ metrics1 = session.metrics
+ await session.stop()
+ assert session.metrics is None
+
+ # Second cycle
+ await session.start()
+ metrics2 = session.metrics
+ await session.stop()
+ assert session.metrics is None
# Metrics should be reinitialized on each start
- # (singleton means they might be the same instance)
+ # Note: Metrics() creates a new instance each time (not a singleton),
+ # so metrics1 and metrics2 will be different instances
+ # The important thing is that metrics are properly initialized and cleaned up
if metrics1 is not None and metrics2 is not None:
- # They should be the same singleton instance
- assert metrics1 is metrics2
+ # Both should be MetricsCollector instances
+ from ccbt.utils.metrics import MetricsCollector
+ assert isinstance(metrics1, MetricsCollector)
+ assert isinstance(metrics2, MetricsCollector)
+ # They will be different instances (not singletons)
+ # This is expected behavior - each start() creates a new Metrics instance
@pytest.fixture(scope="function")
diff --git a/tests/unit/session/test_checkpoint_controller.py b/tests/unit/session/test_checkpoint_controller.py
index 253e994..c7ffb70 100644
--- a/tests/unit/session/test_checkpoint_controller.py
+++ b/tests/unit/session/test_checkpoint_controller.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import asyncio
from pathlib import Path
from types import SimpleNamespace
diff --git a/tests/unit/session/test_checkpoint_persistence.py b/tests/unit/session/test_checkpoint_persistence.py
index e8b52ad..18db1ec 100644
--- a/tests/unit/session/test_checkpoint_persistence.py
+++ b/tests/unit/session/test_checkpoint_persistence.py
@@ -9,7 +9,7 @@
import asyncio
from pathlib import Path
from types import SimpleNamespace
-from typing import Any
+from typing import Any, Optional
import pytest
@@ -57,8 +57,8 @@ class FakeSession:
def __init__(
self,
info_hash: bytes,
- options: dict[str, Any] | None = None,
- session_manager: Any | None = None,
+ options: Optional[dict[str, Any]] = None,
+ session_manager: Optional[Any] = None,
) -> None:
self.info = SimpleNamespace(info_hash=info_hash, name="test_torrent")
self.options = options or {}
@@ -122,6 +122,10 @@ def __init__(self) -> None:
self._per_torrent_limits = {
info_hash: {"down_kib": 100, "up_kib": 50}
}
+
+ def get_per_torrent_limits(self, info_hash: bytes) -> dict[str, int] | None:
+ """Get per-torrent rate limits."""
+ return self._per_torrent_limits.get(info_hash)
session_manager = FakeSessionManager()
session = FakeSession(info_hash, session_manager=session_manager)