diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..c086f9c --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,106 @@ +name: CI + +on: + push: + pull_request: + +jobs: + unit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + - name: Install deps + run: | + python -m pip install --upgrade pip + pip install uv + uv pip install .[tests] + - name: Run unit tests + run: pytest -q + + s3-integration: + runs-on: ubuntu-latest + services: + minio: + image: minio/minio:RELEASE.2025-09-07T16-13-09Z-cpuv1 + env: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + ports: + - 9000:9000 + options: >- + --health-cmd "curl -f http://localhost:9000/minio/health/ready || exit 1" + --health-interval 5s + --health-timeout 5s + --health-retries 10 + command: server /data --console-address :9001 + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + - name: Install deps + run: | + python -m pip install --upgrade pip + pip install uv + uv pip install .[tests tests-s3] + - name: Configure MinIO bucket + env: + AWS_ACCESS_KEY_ID: minioadmin + AWS_SECRET_ACCESS_KEY: minioadmin + AWS_REGION: us-east-1 + S3_ENDPOINT_URL: http://localhost:9000 + run: | + python - <<'PY' +import boto3 +import os +s3 = boto3.client( + "s3", + endpoint_url=os.environ["S3_ENDPOINT_URL"], + aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"], + aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"], + region_name=os.environ["AWS_REGION"], +) +for name in ["test-bkt", "test-bkt2", "test-bkt-swr", "test-bkt-chain"]: + try: + s3.create_bucket(Bucket=name) + except Exception: + pass +PY + - name: Run S3 integration tests + env: + AWS_ACCESS_KEY_ID: minioadmin + AWS_SECRET_ACCESS_KEY: minioadmin + AWS_REGION: us-east-1 + S3_ENDPOINT_URL: http://localhost:9000 + run: pytest -q tests/test_s3_cache_integration.py + + gcs-integration: + runs-on: ubuntu-latest + services: + fake-gcs: + image: fsouza/fake-gcs-server:latest + ports: + - 4443:4443 + options: >- + --health-cmd "curl -f http://localhost:4443/storage/v1/b || exit 1" + --health-interval 5s + --health-timeout 5s + --health-retries 10 + command: ["-scheme", "http", "-public-host", "localhost:4443"] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + - name: Install deps + run: | + python -m pip install --upgrade pip + pip install uv + uv pip install .[tests tests-gcs] + - name: Run GCS integration tests + env: + STORAGE_EMULATOR_HOST: http://localhost:4443 + run: pytest -q -m integration tests/test_gcs_cache_integration.py diff --git a/CHANGELOG.md b/CHANGELOG.md index a1aac23..30bb1a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,21 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.2.2-beta] - 2025-12-25 + +### Added +- **LocalFileCache**: filesystem-backed storage with TTL, atomic writes, optional compression, and dedupe to skip identical rewrites. +- **ChainCache**: composable multi-level cache (e.g., InMem -> Redis -> S3/GCS/local file) with read-through promotion and write-through semantics. +- **Dedupe writes**: opt-in for RedisCache, S3Cache, GCSCache, and LocalFileCache to avoid rewriting unchanged payloads. +- **Docs**: production-grade BGCache writer/reader guide (`docs/bgcache.md`) now shows Single-Writer/Multi-Reader with ChainCache cold tiers (S3/GCS/LocalFileCache) and per-process readers. +- README updates for ChainCache, dedupe_writes, LocalFileCache. +- **Tests**: integration coverage for LocalFileCache (TTL expiry, dedupe, decorator usage, ChainCache integration). +- **Refactor**: storage backends split into `advanced_caching.storage` package (per-backend modules) while preserving public exports. + +### Fixed +- Redis dedupe now extends TTL when skipping identical writes. +- SharedAsyncScheduler uses current event loop when available (stability for async BGCache). + ## [0.2.1] - 2025-12-25 ### Fixed @@ -13,6 +28,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - `configure()` class method on all decorators to easily create pre-configured cache instances (e.g., `MyCache = TTLCache.configure(cache=RedisCache(...))`). +- **Object Storage Backends**: Added `S3Cache` (AWS) and `GCSCache` (Google Cloud) for cost-effective storage of large objects. + - Features: Metadata-based TTL checks (saves download costs), Gzip compression, and pluggable serializers. ## [0.2.0] - 2025-12-23 diff --git a/README.md b/README.md index 4b93bbd..6d24b1c 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,7 @@ Type-safe, fast, thread-safe, async-friendly, and framework-agnostic. - [InMemCache](#inmemcache) - [RedisCache & Serializers](#rediscache--serializers) - [HybridCache (L1 + L2)](#hybridcache-l1--l2) + - [ChainCache (multi-level)](#chaincache-multi-level) - [Custom Storage](#custom-storage) - [API Reference](#api-reference) - [Testing & Benchmarks](#testing--benchmarks) @@ -26,6 +27,8 @@ Type-safe, fast, thread-safe, async-friendly, and framework-agnostic. - [Comparison](#comparison) - [Contributing](#contributing) - [License](#license) +- [BGCache (Background)](#bgcache-background) + - [Production example](docs/bgcache.md) --- @@ -106,6 +109,14 @@ The library supports smart key generation that handles both positional and keywo ## Storage Backends +- InMemCache (default): Fast, process-local +- RedisCache: Distributed in-memory +- HybridCache: L1 (memory) + L2 (Redis) +- ChainCache: Arbitrary multi-level chain (e.g., InMem -> Redis -> S3/GCS) +- S3Cache: Object storage backend (AWS) +- GCSCache: Object storage backend (Google Cloud) +- LocalFileCache: Filesystem-backed cache (per-host) + ### InMemCache Thread-safe in-memory cache with TTL. @@ -137,23 +148,6 @@ json_cache = RedisCache(client, prefix="app:json:", serializer="json") custom_json = RedisCache(client, prefix="app:json2:", serializer=JsonSerializer()) ``` -#### Custom Serializer Example (msgpack) - -```python -import msgpack - -class MsgpackSerializer: - handles_entries = False - - @staticmethod - def dumps(obj): - return msgpack.packb(obj, use_bin_type=True) - - @staticmethod - def loads(data): - return msgpack.unpackb(data, raw=False) -``` - --- ### HybridCache (L1 + L2) @@ -229,88 +223,116 @@ db_host = load_config_map().get("db", {}).get("host") --- -## Advanced Configuration - -To avoid repeating complex cache configurations (like HybridCache setup) in every decorator, you can create a pre-configured cache instance. +### ChainCache (multi-level) ```python -from advanced_caching import SWRCache, HybridCache, InMemCache, RedisCache +from advanced_caching import InMemCache, RedisCache, S3Cache, ChainCache -# 1. Define your cache factory -def create_hybrid_cache(): - return HybridCache( - l1_cache=InMemCache(), - l2_cache=RedisCache(redis_client), - l1_ttl=300, - l2_ttl=3600 - ) +chain = ChainCache([ + (InMemCache(), 60), # L1 fast + (RedisCache(redis_client), 300), # L2 distributed + (S3Cache(bucket="my-cache"), 3600), # L3 durable +]) -# 2. Create a configured decorator -MySWRCache = SWRCache.configure(cache=create_hybrid_cache) +# Write-through all levels (per-level TTL caps applied) +chain.set("user:123", {"name": "Ana"}, ttl=900) -# 3. Use it cleanly -@MySWRCache.cached("users:{}", ttl=300) -def get_users(code: str): - return db.get_users(code) +# Read-through with promotion to faster levels +user = chain.get("user:123") ``` -This works for `TTLCache`, `SWRCache`, and `BGCache`. +Notes: +- Provide per-level TTL caps in the tuple; if `None`, the passed `ttl` is used. +- `set_if_not_exists` delegates atomicity to the deepest level and backfills upper levels on success. +- `get`/`get_entry` promote hits upward for hotter reads. --- -### Custom Storage +### Object Storage Backends (S3/GCS) -Implement the `CacheStorage` protocol. +Store large cached objects cheaply in AWS S3 or Google Cloud Storage. +Supports compression and metadata-based TTL checks to minimize costs. -#### File-based example +**[πŸ“š Full Documentation & Best Practices](docs/object-storage-caching.md)** ```python -import json, time -from pathlib import Path -from advanced_caching import CacheEntry, CacheStorage, TTLCache, validate_cache_storage - -class FileCache(CacheStorage): - def __init__(self, directory="/tmp/cache"): - self.dir = Path(directory) - self.dir.mkdir(parents=True, exist_ok=True) - - def _path(self, key: str) -> Path: - return self.dir / f"{key.replace(':','_')}.json" - - def get_entry(self, key): - p = self._path(key) - if not p.exists(): - return None - data = json.loads(p.read_text()) - return CacheEntry(**data) - - def set_entry(self, key, entry, ttl=None): - self._path(key).write_text(json.dumps(entry.__dict__)) - - def get(self, key): - e = self.get_entry(key) - return e.value if e and e.is_fresh() else None - - def set(self, key, value, ttl=0): - now = time.time() - self.set_entry(key, CacheEntry(value, now + ttl, now)) - - def delete(self, key): - self._path(key).unlink(missing_ok=True) - - def exists(self, key): - return self.get(key) is not None - - def set_if_not_exists(self, key, value, ttl): - if self.exists(key): - return False - self.set(key, value, ttl) - return True - -cache = FileCache() -assert validate_cache_storage(cache) +from advanced_caching import S3Cache, GCSCache + +user_cache = S3Cache( + bucket="my-cache-bucket", + prefix="users/", + serializer="json", + compress=True, + dedupe_writes=True, # optional: skip uploads when content unchanged (adds HEAD) +) + +gcs_cache = GCSCache( + bucket="my-cache-bucket", + prefix="users/", + serializer="json", + compress=True, + dedupe_writes=True, # optional: skip uploads when content unchanged (adds metadata check) +) ``` +### RedisCache dedupe_writes + +`RedisCache(..., dedupe_writes=True)` compares the serialized payload to the stored value; if unchanged, it skips rewriting and only refreshes TTL when provided. + +### LocalFileCache (filesystem) + +```python +from advanced_caching import LocalFileCache + +cache = LocalFileCache("/var/tmp/ac-cache", dedupe_writes=True) +cache.set("user:123", {"name": "Ana"}, ttl=300) +user = cache.get("user:123") +``` + +Notes: one file per key; atomic writes; optional compression and dedupe to skip rewriting identical content. + +--- + +## BGCache (Background) + +Single-writer/multi-reader pattern with background refresh and optional independent reader caches. + +```python +from advanced_caching import BGCache, InMemCache + +# Writer: enforced single registration per key; refreshes cache on a schedule +@BGCache.register_writer( + "daily_config", + interval_seconds=300, # refresh every 5 minutes + ttl=None, # defaults to interval*2 + run_immediately=True, + cache=InMemCache(), # or RedisCache / ChainCache +) +def load_config(): + return expensive_fetch() + +# Readers: read-only; keep a local cache warm by pulling from the writer's cache +reader = BGCache.get_reader( + "daily_config", + interval_seconds=60, # periodically pull from source cache into local cache + ttl=None, # local cache TTL defaults to interval*2 + run_immediately=True, + cache=InMemCache(), # local cache for this process +) + +# Usage +cfg = reader() # returns value from local cache; on miss pulls once from source cache +``` + +Notes: +- `register_writer` enforces one writer per key globally; raises if duplicate. +- `interval_seconds` <= 0 disables scheduling; wrapper still writes-on-demand on misses. +- `run_immediately=True` triggers an initial refresh if the cache is empty. +- `get_reader` creates a read-only accessor backed by its own cache; it pulls from the provided cache (usually the writer’s cache) and optionally keeps it warm on a schedule. +- Use `cache=` on readers to override the local cache backend (e.g., InMemCache in each process) while sourcing data from the writer’s cache backend. + +See `docs/bgcache.md` for a production-grade example with Redis/ChainCache, error handling, and reader-local caches. + --- ## API Reference @@ -353,15 +375,17 @@ uv run python tests/benchmark.py ## Comparison -| Feature | advanced-caching | lru_cache | cachetools | Redis | Memcached | -| ------------------ | ---------------- | --------- | ---------- | ------ | --------- | -| TTL | βœ… | ❌ | βœ… | βœ… | βœ… | -| SWR | βœ… | ❌ | ❌ | Manual | Manual | -| Background refresh | βœ… | ❌ | ❌ | Manual | Manual | -| Custom backends | βœ… | ❌ | ❌ | N/A | N/A | -| Distributed | βœ… | ❌ | ❌ | βœ… | βœ… | -| Async support | βœ… | ❌ | ❌ | βœ… | βœ… | -| Type hints | βœ… | βœ… | βœ… | ❌ | ❌ | +| Feature | advanced-caching | lru_cache | cachetools | Redis | Memcached | +| ------------------- | ---------------- | --------- | ---------- | ------ | --------- | +| TTL | βœ… | ❌ | βœ… | βœ… | βœ… | +| SWR | βœ… | ❌ | ❌ | Manual | Manual | +| Background refresh | βœ… | ❌ | ❌ | Manual | Manual | +| Custom backends | βœ… (InMem/Redis/S3/GCS/Chain) | ❌ | ❌ | N/A | N/A | +| Distributed | βœ… (Redis, ChainCache) | ❌ | ❌ | βœ… | βœ… | +| Multi-level chain | βœ… (ChainCache) | ❌ | ❌ | Manual | Manual | +| Dedupe writes | βœ… (Redis/S3/GCS opt-in) | ❌ | ❌ | Manual | Manual | +| Async support | βœ… | ❌ | ❌ | βœ… | βœ… | +| Type hints | βœ… | βœ… | βœ… | ❌ | ❌ | --- diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..3d83df5 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,30 @@ +version: '3.9' +services: + minio: + image: minio/minio:RELEASE.2025-09-07T16-13-09Z-cpuv1 + command: server /data --console-address :9001 + environment: + - MINIO_ROOT_USER=minioadmin + - MINIO_ROOT_PASSWORD=minioadmin + ports: + - "9000:9000" + - "9001:9001" + healthcheck: + test: ["CMD", "mc", "ready", "local"] + interval: 5s + timeout: 3s + retries: 10 + volumes: + - minio-data:/data + + fake-gcs: + image: fsouza/fake-gcs-server:latest + ports: + - "4443:4443" + environment: + - STORAGE_EMULATOR_HOST=0.0.0.0:4443 + command: ["-scheme", "http", "-public-host", "localhost:4443"] + +volumes: + minio-data: + diff --git a/docs/bgcache.md b/docs/bgcache.md new file mode 100644 index 0000000..8f8bce6 --- /dev/null +++ b/docs/bgcache.md @@ -0,0 +1,149 @@ +# BGCache: Single-Writer / Multi-Reader (Production Example) + +This guide shows a production-grade split of BGCache writer and readers, including background refresh, error handling, and per-process reader caches. + +## Goals +- One writer per key (enforced) refreshing a shared cache (e.g., Redis or ChainCache). +- Many readers in different processes/threads pulling from the writer’s cache and keeping a local L1 warm. +- Graceful error handling, optional run-immediately load, and configurable intervals/TTLs. + +## Recommended Topology +- **Writer cache**: a shared backend (e.g., `RedisCache`, `ChainCache` with Redis+S3, or plain `InMemCache` if single-process). +- **Reader cache**: a fast local cache per process (e.g., `InMemCache`) that periodically pulls from the writer cache. + +## End-to-end Example (multiple writers/readers, object storage cold tier) + +```python +import logging +from advanced_caching import BGCache, InMemCache, RedisCache, ChainCache + +logger = logging.getLogger(__name__) + +# Shared writer cache: InMem L1 + Redis L2 + object storage L3 (S3/GCS/local file) +shared_writer_cache = ChainCache([ + (InMemCache(), 30), + (RedisCache(redis_client, dedupe_writes=True), 300), + # Choose one cold tier: + # (S3Cache(bucket="my-cache", dedupe_writes=True), 3600), + # (GCSCache(bucket="my-cache", dedupe_writes=True), 3600), + # (LocalFileCache("/var/tmp/bgcache", dedupe_writes=True), 3600), +]) + +# Writer 1: daily config +@BGCache.register_writer( + "daily_config", + interval_seconds=300, + ttl=None, + run_immediately=True, + on_error=lambda e: logger.error("daily_config writer failed", exc_info=e), + cache=shared_writer_cache, +) +def refresh_config(): + return load_config_from_db_or_api() + +# Writer 2: feature flags +@BGCache.register_writer( + "feature_flags", + interval_seconds=120, + ttl=None, + run_immediately=True, + on_error=lambda e: logger.error("feature_flags writer failed", exc_info=e), + cache=shared_writer_cache, +) +def refresh_flags(): + return load_flags_from_control_plane() + +# Readers: each process uses its own local cache and pulls from the writer cache +reader_local_cache = InMemCache() + +get_config = BGCache.get_reader( + "daily_config", + interval_seconds=60, + ttl=None, + run_immediately=True, + on_error=lambda e: logger.warning("daily_config reader pull failed", exc_info=e), + cache=shared_writer_cache, # source cache (writer’s cache, includes cold tier) +) + +get_flags = BGCache.get_reader( + "feature_flags", + interval_seconds=30, + ttl=None, + run_immediately=True, + on_error=lambda e: logger.warning("feature_flags reader pull failed", exc_info=e), + cache=shared_writer_cache, +) + +# Usage in app code +cfg = get_config() # from local reader cache; on miss pulls once from writer cache +flags = get_flags() # same pattern for feature flags +``` + +### Why this works well +- **Single writer enforced**: `register_writer` raises if the key is registered twice. +- **Background refresh**: writer schedules updates; readers schedule pulls from writer cache. +- **Local read performance**: readers serve from per-process `InMemCache`, reducing Redis/object-store round-trips. +- **Dedupe writes**: `dedupe_writes=True` on RedisCache avoids redundant writes (and refreshes TTL when unchanged). + +### Tuning knobs +- `interval_seconds`: writer refresh period; reader pull period. Set to `0` to disable scheduling and rely on on-demand fetch. +- `ttl`: defaults to `interval_seconds * 2` when not provided. For readers, this is the local cache TTL. +- `run_immediately`: seed cache on startup if empty. +- `on_error`: handle/log exceptions from the writer refresh job. +- `cache`: use a distributed cache for the writer; for readers, this is the *source* cache they pull from, while they maintain their own local cache internally. + +### Async variants +- Both writer and reader functions can be `async def`; BGCache picks the appropriate scheduler (AsyncIOScheduler / BackgroundScheduler). The reader returned is sync callable but can call async sources when provided. + +### Using ChainCache for deeper hierarchies +- Cold tiers: S3Cache, GCSCache, LocalFileCache can sit behind Redis in ChainCache for durable or per-host persistence. + - S3/GCS: set `dedupe_writes=True` to avoid rewriting unchanged blobs (uses metadata hashes). + - LocalFileCache: per-host cache with atomic writes; useful when object storage isn’t available. + - Tune per-level TTL caps in the ChainCache tuples. + +## Operational tips +- Call `BGCache.shutdown()` in test teardown or graceful shutdown to stop schedulers. +- Keep `interval_seconds` moderately larger than your refresh latency to avoid overlaps. +- Monitor writer errors via `on_error`; consider alerts if refresh fails repeatedly. +- For high-QPS readers, keep `interval_seconds` small enough to ensure local caches stay warm. + +## Minimal test harness (pytest style) + +```python +import pytest +import asyncio +from advanced_caching import BGCache, InMemCache + +@pytest.mark.asyncio +async def test_bgcache_writer_reader(): + calls = {"n": 0} + writer_cache = InMemCache() + + @BGCache.register_writer("demo", interval_seconds=0.05, cache=writer_cache) + def writer(): + calls["n"] += 1 + return {"v": calls["n"]} + + reader = BGCache.get_reader( + "demo", interval_seconds=0.05, cache=writer_cache, run_immediately=True + ) + + await asyncio.sleep(0.1) + v1 = reader() + assert v1 and v1["v"] >= 1 + + await asyncio.sleep(0.1) + v2 = reader() + assert v2 and v2["v"] >= v1["v"] + + BGCache.shutdown() +``` + +## Checklist for production +- [ ] Shared writer cache (Redis/ChainCache) sized and monitored +- [ ] Reader local caches sized appropriately +- [ ] `on_error` hooked for alerting +- [ ] Reasonable `interval_seconds` and `ttl` +- [ ] `BGCache.shutdown()` on service shutdown/tests +- [ ] Dedupe enabled where write amplification matters (Redis/S3/GCS) +- [ ] ChainCache tiers tuned (per-level TTL caps) diff --git a/docs/object-storage-caching.md b/docs/object-storage-caching.md new file mode 100644 index 0000000..86993bf --- /dev/null +++ b/docs/object-storage-caching.md @@ -0,0 +1,291 @@ +# Object Storage Caching (S3 & GCS) + +`advanced-caching` supports using cloud object storage (AWS S3 and Google Cloud Storage) as cache backends. This is ideal for: +- **Large datasets**: Storing large serialized objects that don't fit in Redis. +- **Cost efficiency**: Cheaper storage costs compared to managed Redis clusters. +- **Shared caching**: Sharing cache across different services or regions (with appropriate latency considerations). + +## Installation + +You need to install the respective client libraries: + +```bash +# For AWS S3 +pip install boto3 + +# For Google Cloud Storage +pip install google-cloud-storage +``` + +## S3Cache (AWS) + +`S3Cache` uses AWS S3 buckets for storage. It is optimized to minimize API costs by checking object metadata (HEAD request) before downloading the full body. + +### Basic Usage + +```python +import boto3 +from advanced_caching import S3Cache, TTLCache + +# Initialize Boto3 client (or let S3Cache create one) +s3_client = boto3.client("s3") + +# Create the cache backend +s3_cache = S3Cache( + bucket="my-app-cache-bucket", + prefix="prod/users/", + s3_client=s3_client, + serializer="json" # or "pickle" (default) +) + +# Use it with a decorator +@TTLCache.cached("user:{}", ttl=3600, cache=s3_cache) +def get_user_report(user_id): + # ... expensive operation ... + return generate_pdf_report(user_id) +``` + +### Configuration Options + +| Parameter | Description | Default | +|-----------|-------------|---------| +| `bucket` | Name of the S3 bucket. | Required | +| `prefix` | Folder prefix for keys (e.g., `cache/`). | `""` | +| `s3_client` | Pre-configured `boto3.client("s3")`. | `None` (creates new) | +| `serializer` | Serialization format (`"pickle"`, `"json"`, or custom). | `"pickle"` | +| `compress` | Enable Gzip compression for values. | `True` | +| `compress_level` | Gzip compression level (1-9). | `6` | + +## GCSCache (Google Cloud) + +`GCSCache` uses Google Cloud Storage buckets. Like `S3Cache`, it leverages metadata to check for freshness efficiently. + +### Basic Usage + +```python +from google.cloud import storage +from advanced_caching import GCSCache, TTLCache + +# Initialize GCS client +client = storage.Client() + +# Create the cache backend +gcs_cache = GCSCache( + bucket="my-app-cache-bucket", + prefix="reports/", + client=client, + compress=True +) + +@TTLCache.cached("report:{}", ttl=86400, cache=gcs_cache) +def generate_daily_report(date_str): + return complex_calculation(date_str) +``` + +## Key Organization & File Structure + +When using object storage, cache keys are mapped directly to file paths (object keys) in the bucket. The final path is constructed as: `prefix + key`. + +### Single Function + +```python +# Prefix acts as a folder +cache = S3Cache(bucket="my-bucket", prefix="reports/daily/") + +@TTLCache.cached("2023-10-25", ttl=3600, cache=cache) +def get_report(date): ... +``` + +**Resulting S3 Key:** `reports/daily/2023-10-25` + +### Multiple Functions (Shared Bucket) + +To store data from multiple functions in the same bucket, use different **prefixes** or distinct **key templates** to avoid collisions. + +#### Option A: Different Prefixes (Recommended) + +Create separate cache instances for different logical groups. This keeps the bucket organized and allows for easier cleanup (e.g., deleting the `users/` folder). + +```python +# Cache for User data +user_cache = S3Cache(bucket="my-bucket", prefix="users/") + +# Cache for Product data +product_cache = S3Cache(bucket="my-bucket", prefix="products/") + +@TTLCache.cached("{user_id}", ttl=300, cache=user_cache) +def get_user(user_id): ... +# File: users/123 + +@TTLCache.cached("{prod_id}", ttl=300, cache=product_cache) +def get_product(prod_id): ... +# File: products/ABC +``` + +#### Option B: Shared Prefix with Namespaced Keys + +Use a single cache instance but namespace the keys in the decorator. + +```python +# Shared cache instance +shared_cache = S3Cache(bucket="my-bucket", prefix="cache/") + +@TTLCache.cached("users:{user_id}", ttl=300, cache=shared_cache) +def get_user(user_id): ... +# File: cache/users:123 + +@TTLCache.cached("products:{prod_id}", ttl=300, cache=shared_cache) +def get_product(prod_id): ... +# File: cache/products:ABC +``` + +> **Tip**: You can use slashes in your key templates to create subfolders dynamically. +> Example: `@TTLCache.cached("users/{user_id}/profile", ...)` with prefix `v1/` results in `v1/users/123/profile`. + +### Single-writer / multi-reader with BGCache + +If you only want one place to refresh data but many places to read it, split BGCache into a writer and readers: + +```python +from advanced_caching import BGCache, InMemCache + +# One writer (enforced: only one writer per key) +@BGCache.register_writer( + "daily_config", interval_seconds=300, run_immediately=True, cache=InMemCache() +) +def refresh_config(): + return load_big_config() # expensive + +# Many readers; call-time readers without dummy decorators +get_config = BGCache.get_reader("daily_config", cache=InMemCache()) + +# On a miss the reader returns None (no fallback logic is attached). + +# You can also source from a multi-level cache (e.g., ChainCache) if you want object storage behind Redis/L1. +``` + +This pattern keeps writes centralized while allowing multiple call-sites to share the cached value. + +## Multi-level chain (InMem -> Redis -> S3/GCS) + +Use `ChainCache` to compose multiple storage layers: + +```python +from advanced_caching import InMemCache, RedisCache, S3Cache, ChainCache + +chain = ChainCache([ + (InMemCache(), 60), + (RedisCache(redis_client), 300), + (S3Cache(bucket="my-cache"), 3600), +]) + +# Write-through all levels (TTL capped per level) +chain.set("daily_config", load_config(), ttl=7200) + +# Read-through promotes to faster levels +cfg = chain.get("daily_config") +``` + +### Dedupe writes (optional) + +- `S3Cache(..., dedupe_writes=True)` stores a hash in object metadata (`ac-hash`) and skips uploads when content is unchanged (adds a HEAD check). +- `GCSCache(..., dedupe_writes=True)` stores `ac-hash` metadata and skips uploads when unchanged. +- `RedisCache(..., dedupe_writes=True)` skips rewriting identical payloads and refreshes TTL when provided. + +Use dedupe when bandwidth/object-write cost matters and an extra HEAD/reload is acceptable. + +## Best Practices + +### 1. Use HybridCache for Performance & Cost + +Object storage has higher latency (50-200ms) compared to Redis (<5ms) or memory (nanoseconds). It also charges per API request. + +To mitigate this, wrap your object storage cache in a `HybridCache`. This uses local memory as L1 and S3/GCS as L2. + +```python +from advanced_caching import HybridCache, InMemCache, S3Cache + +# L1: Memory (fast, free reads) +# L2: S3 (persistent, shared, slower) +hybrid_cache = HybridCache( + l1_cache=InMemCache(), + l2_cache=S3Cache(bucket="my-cache"), + l1_ttl=60, # Keep in memory for 1 minute + l2_ttl=86400 # Keep in S3 for 1 day +) + +# 1. First call: Miss L1 -> Miss L2 -> Run Function -> Write S3 -> Write L1 +# 2. Second call (0-60s): Hit L1 (Instant, no S3 cost) +# 3. Third call (61s+): Miss L1 -> Hit L2 (Slower, S3 read cost) -> Write L1 +``` + +### 2. Enable Compression + +Both `S3Cache` and `GCSCache` enable Gzip compression by default (`compress=True`). +- **Pros**: Reduces storage costs and network transfer time. +- **Cons**: Slight CPU overhead for compression/decompression. +- **Recommendation**: Keep it enabled unless you are storing already-compressed data (like images or zip files). + +### 3. Cost Optimization (Metadata Checks) + +`advanced-caching` implements a "Metadata First" strategy: +- **`get()`**: Checks object metadata (freshness timestamp) *before* downloading the body. If the item is expired, it aborts the download, saving data transfer costs. +- **`exists()`**: Uses `HEAD` requests (S3) or metadata lookups (GCS) which are cheaper and faster than downloading the object. + +### 4. Serialization + +- **Pickle (Default)**: Fastest and supports almost any Python object. **Security Warning**: Only use pickle if you trust the data source (i.e., your own bucket). +- **JSON**: Portable and human-readable. Use this if other non-Python services need to read the cache. Requires `orjson` (installed automatically with `advanced-caching`). + +### 5. Permissions + +Ensure your application has the correct IAM permissions. + +**AWS S3 (IAM Policy):** +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + "s3:ListBucket" + ], + "Resource": [ + "arn:aws:s3:::my-cache-bucket", + "arn:aws:s3:::my-cache-bucket/*" + ] + } + ] +} +``` + +**Google Cloud Storage:** +Ensure the Service Account has `Storage Object Admin` or `Storage Object User` roles on the bucket. + +## FAQ + +### Why not store all keys for a function in a single file? + +You might wonder if it's better to store all cached results for `get_user` in a single `users.json` file instead of thousands of small files. + +**This is generally NOT recommended for dynamic caching.** + +1. **Race Conditions**: Object storage does not support partial updates. To update one user, you must download the whole file, update the dict, and re-upload. If two requests happen simultaneously, one will overwrite the other's changes. +2. **Performance**: Reading a single key requires downloading the entire dataset. +3. **Cost**: Re-uploading a 10MB file to update a 1KB record incurs unnecessary bandwidth and request costs. + +**Exception: Read-Only Static Data** +If you have a dataset that is generated once (e.g., a daily export) and only read by your app, storing it as a single file is efficient. In this case, use `BGCache` to load the entire file into memory at once, rather than using `S3Cache` as a backend. + +```python +# Efficient for single-file read-only datasets +@BGCache.register_loader("daily_config", interval_seconds=3600) +def load_config(): + # Download big JSON once, keep in memory + obj = s3.get_object(Bucket="...", Key="config.json") + return json.loads(obj["Body"].read()) +``` diff --git a/pyproject.toml b/pyproject.toml index 717e4e4..6279132 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "advanced-caching" -version = "0.2.1" +version = "0.2.2-beta" description = "Production-ready composable caching with TTL, SWR, and background refresh patterns for Python." readme = "README.md" requires-python = ">=3.10" @@ -33,11 +33,14 @@ dependencies = [ ] [project.optional-dependencies] -redis = ["redis>=5.0"] +redis = ["redis>=5.0.0"] dev = [ "pytest>=8.2", "pytest-cov>=4.0", ] +tests = ["pytest", "pytest-asyncio", "pytest-cov"] +tests-s3 = ["moto[boto3]>=5.0.0"] +tests-gcs = ["google-cloud-storage>=2.10.0"] [project.urls] Homepage = "https://github.com/agkloop/advanced_caching" @@ -61,3 +64,6 @@ python_files = ["test_*.py"] addopts = "-v" asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" +markers = [ + "integration: marks tests that require external emulators/services" +] diff --git a/src/advanced_caching/__init__.py b/src/advanced_caching/__init__.py index 7f45bc2..f7e1a35 100644 --- a/src/advanced_caching/__init__.py +++ b/src/advanced_caching/__init__.py @@ -4,12 +4,16 @@ Expose storage backends, decorators, and scheduler utilities under `advanced_caching`. """ -__version__ = "0.2.1" +__version__ = "0.2.2-beta" from .storage import ( InMemCache, RedisCache, HybridCache, + ChainCache, + LocalFileCache, + S3Cache, + GCSCache, CacheEntry, CacheStorage, validate_cache_storage, @@ -29,6 +33,10 @@ "InMemCache", "RedisCache", "HybridCache", + "ChainCache", + "LocalFileCache", + "S3Cache", + "GCSCache", "CacheEntry", "CacheStorage", "validate_cache_storage", diff --git a/src/advanced_caching/_schedulers.py b/src/advanced_caching/_schedulers.py index 873f08f..46f2914 100644 --- a/src/advanced_caching/_schedulers.py +++ b/src/advanced_caching/_schedulers.py @@ -6,6 +6,7 @@ from __future__ import annotations import threading +import asyncio from typing import ClassVar from apscheduler.schedulers.background import BackgroundScheduler @@ -54,7 +55,12 @@ class SharedAsyncScheduler: def get_scheduler(cls) -> AsyncIOScheduler: with cls._lock: if cls._scheduler is None: - cls._scheduler = AsyncIOScheduler() + try: + loop = asyncio.get_running_loop() + except RuntimeError: + cls._scheduler = AsyncIOScheduler() + else: + cls._scheduler = AsyncIOScheduler(event_loop=loop) assert cls._scheduler is not None return cls._scheduler @@ -69,6 +75,10 @@ def ensure_started(cls) -> None: def shutdown(cls, wait: bool = True) -> None: with cls._lock: if cls._started and cls._scheduler is not None: - cls._scheduler.shutdown(wait=wait) + try: + cls._scheduler.shutdown(wait=wait) + except RuntimeError: + # Ignore shutdown if loop already closed (test teardown). + pass cls._started = False cls._scheduler = None diff --git a/src/advanced_caching/decorators.py b/src/advanced_caching/decorators.py index 202b566..339740c 100644 --- a/src/advanced_caching/decorators.py +++ b/src/advanced_caching/decorators.py @@ -15,6 +15,7 @@ import time from datetime import datetime, timedelta from typing import Callable, TypeVar, Any +from dataclasses import dataclass from apscheduler.triggers.interval import IntervalTrigger @@ -440,10 +441,14 @@ def refresh_job() -> None: class AsyncBackgroundCache: """Background cache loader that uses APScheduler (AsyncIOScheduler for async, BackgroundScheduler for sync).""" + # Global registry to enforce single writer per cache key across all configured BGCache classes. + _writer_registry: dict[str, "_WriterRecord"] = {} + @classmethod def shutdown(cls, wait: bool = True) -> None: SharedAsyncScheduler.shutdown(wait) SharedScheduler.shutdown(wait) + cls._writer_registry.clear() @classmethod def configure( @@ -685,5 +690,275 @@ def sync_wrapper_fn() -> T: return decorator + @classmethod + def register_writer( + cls, + key: str, + interval_seconds: int, + ttl: int | None = None, + run_immediately: bool = True, + on_error: Callable[[Exception], None] | None = None, + cache: CacheStorage | Callable[[], CacheStorage] | None = None, + ) -> Callable[[Callable[[], T]], Callable[[], T]]: + cache_key = key + if cache_key in cls._writer_registry: + raise ValueError(f"BGCache writer already registered for key '{cache_key}'") + + if interval_seconds <= 0: + interval_seconds = 0 + if ttl is None and interval_seconds > 0: + ttl = interval_seconds * 2 + if ttl is None: + ttl = 0 + + cache_factory = normalize_cache_factory(cache, default_factory=InMemCache) + cache_obj = cache_factory() + cache_get = cache_obj.get + cache_set = cache_obj.set + + def decorator(loader_func: Callable[[], T]) -> Callable[[], T]: + if asyncio.iscoroutinefunction(loader_func): + loader_lock: asyncio.Lock | None = None + + async def run_once() -> T: + nonlocal loader_lock + if loader_lock is None: + loader_lock = asyncio.Lock() + async with loader_lock: + try: + data = await loader_func() + cache_set(cache_key, data, ttl) + return data + except Exception as e: # pragma: no cover - defensive + if on_error: + try: + on_error(e) + except Exception: + logger.exception( + "Async BGCache error handler failed for key %r", + cache_key, + ) + logger.exception( + "Async BGCache writer failed for key %r", cache_key + ) + raise + + async def refresh_job() -> None: + try: + await run_once() + except Exception: + # Error already handled/logged inside run_once + pass + + next_run_time: datetime | None = None + + if run_immediately: + if cache_get(cache_key) is None: + try: + loop = asyncio.get_running_loop() + except RuntimeError: + asyncio.run(refresh_job()) + next_run_time = datetime.now() + timedelta( + seconds=interval_seconds * 2 + ) + else: + loop.create_task(refresh_job()) + next_run_time = datetime.now() + timedelta( + seconds=interval_seconds * 2 + ) + + if interval_seconds > 0: + scheduler = SharedAsyncScheduler.get_scheduler() + SharedAsyncScheduler.ensure_started() + scheduler.add_job( + refresh_job, + trigger=IntervalTrigger(seconds=interval_seconds), + id=cache_key, + replace_existing=True, + next_run_time=next_run_time, + ) + + async def writer_wrapper() -> T: + value = cache_get(cache_key) + if value is not None: + return value # type: ignore[return-value] + return await run_once() + + attach_wrapper_metadata( + writer_wrapper, + loader_func, + cache_obj=cache_obj, + cache_key=cache_key, + ) + cls._writer_registry[cache_key] = _WriterRecord( + cache_key=cache_key, + cache=cache_obj, + ttl=ttl, + loader_wrapper=writer_wrapper, + is_async=True, + ) + return writer_wrapper # type: ignore + + # Sync writer path + from threading import Lock + + loader_lock = Lock() + + def run_once_sync() -> T: + with loader_lock: + try: + data = loader_func() + cache_set(cache_key, data, ttl) + return data + except Exception as e: # pragma: no cover - defensive + if on_error: + try: + on_error(e) + except Exception: + logger.exception( + "Sync BGCache error handler failed for key %r", + cache_key, + ) + logger.exception( + "Sync BGCache writer failed for key %r", cache_key + ) + raise + + def refresh_job_sync() -> None: + try: + run_once_sync() + except Exception: + # Error already handled/logged inside run_once_sync + pass + + next_run_time_sync: datetime | None = None + + if run_immediately: + if cache_get(cache_key) is None: + refresh_job_sync() + next_run_time_sync = datetime.now() + timedelta( + seconds=interval_seconds * 2 + ) + + if interval_seconds > 0: + scheduler_sync = SharedScheduler.get_scheduler() + SharedScheduler.start() + scheduler_sync.add_job( + refresh_job_sync, + trigger=IntervalTrigger(seconds=interval_seconds), + id=cache_key, + replace_existing=True, + next_run_time=next_run_time_sync, + ) + + def writer_wrapper_sync() -> T: + value = cache_get(cache_key) + if value is not None: + return value # type: ignore[return-value] + return run_once_sync() + + attach_wrapper_metadata( + writer_wrapper_sync, + loader_func, + cache_obj=cache_obj, + cache_key=cache_key, + ) + cls._writer_registry[cache_key] = _WriterRecord( + cache_key=cache_key, + cache=cache_obj, + ttl=ttl, + loader_wrapper=writer_wrapper_sync, + is_async=False, + ) + return writer_wrapper_sync # type: ignore + + return decorator + + @classmethod + def get_reader( + cls, + key: str, + interval_seconds: int, + ttl: int | None = None, + *, + run_immediately: bool = True, + on_error: Callable[[Exception], None] | None = None, + cache: CacheStorage | Callable[[], CacheStorage] | None = None, + ) -> Callable[[], T | None]: + cache_key = key + + if interval_seconds <= 0: + interval_seconds = 0 + if ttl is None and interval_seconds > 0: + ttl = interval_seconds * 2 + if ttl is None: + ttl = 0 + + # Source cache (shared/distributed) to pull from; local_cache used for fast reads. + source_cache_factory = normalize_cache_factory( + cache, default_factory=InMemCache + ) + source_cache = source_cache_factory() + local_cache = InMemCache() + source_get = source_cache.get + local_get = local_cache.get + local_set = local_cache.set + + def load_once() -> None: + try: + value = source_get(cache_key) + if value is not None: + local_set(cache_key, value, ttl) + except Exception as e: # pragma: no cover - defensive + if on_error: + try: + on_error(e) + except Exception: + logger.exception( + "BGCache reader on_error failed for key %r", cache_key + ) + else: + logger.exception( + "BGCache reader refresh failed for key %r", cache_key + ) + + if run_immediately and (interval_seconds > 0 or ttl > 0): + load_once() + + if interval_seconds > 0: + scheduler_sync = SharedScheduler.get_scheduler() + SharedScheduler.start() + scheduler_sync.add_job( + load_once, + trigger=IntervalTrigger(seconds=interval_seconds), + id=f"reader:{cache_key}", + replace_existing=True, + ) + + def read_only_reader() -> T | None: + value = local_get(cache_key) + if value is not None: + return value + # Fallback: pull once from source on demand if not already cached. + load_once() + return local_get(cache_key) + + attach_wrapper_metadata( + read_only_reader, + read_only_reader, + cache_obj=local_cache, + cache_key=cache_key, + ) + return read_only_reader # type: ignore + BGCache = AsyncBackgroundCache + + +@dataclass(slots=True) +class _WriterRecord: + cache_key: str + cache: CacheStorage + ttl: int + loader_wrapper: Callable[[], Any] | Callable[[], Any] + is_async: bool diff --git a/src/advanced_caching/storage.py b/src/advanced_caching/storage.py deleted file mode 100644 index 876975c..0000000 --- a/src/advanced_caching/storage.py +++ /dev/null @@ -1,580 +0,0 @@ -""" -Storage backends for caching. - -Provides InMemCache (in-memory), RedisCache, HybridCache, and the CacheStorage protocol. -All storage backends implement the CacheStorage protocol for composability. -""" - -from __future__ import annotations - -import json -import math -import pickle -import threading -import time -from dataclasses import dataclass -from typing import Any, Protocol -import orjson - -try: - import redis -except ImportError: - redis = None # type: ignore - - -class Serializer(Protocol): - """Simple serializer protocol used by RedisCache.""" - - def dumps(self, obj: Any) -> bytes: ... - - def loads(self, data: bytes) -> Any: ... - - -class PickleSerializer: - """Pickle serializer using highest protocol (fastest, flexible).""" - - __slots__ = () - handles_entries = True - - @staticmethod - def dumps(obj: Any) -> bytes: - return pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL) - - @staticmethod - def loads(data: bytes) -> Any: - return pickle.loads(data) - - -class JsonSerializer: - """JSON serializer for text-friendly payloads (wraps CacheEntry). Uses orjson""" - - __slots__ = () - handles_entries = False - - @staticmethod - def dumps(obj: Any) -> bytes: - return orjson.dumps(obj) - - @staticmethod - def loads(data: bytes) -> Any: - return orjson.loads(data) - - -_BUILTIN_SERIALIZERS: dict[str, Serializer] = { - "pickle": PickleSerializer(), - "json": JsonSerializer(), -} - - -# ============================================================================ -# Cache Entry - Internal data structure -# ============================================================================ - - -@dataclass(slots=True) -class CacheEntry: - """Internal cache entry with TTL support.""" - - value: Any - fresh_until: float # Unix timestamp - created_at: float - - def is_fresh(self, now: float | None = None) -> bool: - """Check if entry is still fresh.""" - if now is None: - now = time.time() - return now < self.fresh_until - - def age(self, now: float | None = None) -> float: - """Get age of entry in seconds.""" - if now is None: - now = time.time() - return now - self.created_at - - -# ============================================================================ -# Storage Protocol - Common interface for all backends -# ============================================================================ - - -class CacheStorage(Protocol): - """ - Protocol for cache storage backends. - - All cache implementations (InMemCache, RedisCache, HybridCache) - must implement these methods to be compatible with decorators. - - This enables composability - you can swap storage backends without - changing your caching logic. - - Example: - def my_custom_cache(): - '''Any class implementing these methods works!''' - def get(self, key: str) -> Any | None: ... - def set(self, key: str, value: Any, ttl: int = 0) -> None: ... - # ... implement other methods - """ - - def get(self, key: str) -> Any | None: - """Get value by key. Returns None if not found or expired.""" - ... - - def set(self, key: str, value: Any, ttl: int = 0) -> None: - """Set value with TTL in seconds. ttl=0 means no expiration.""" - ... - - def delete(self, key: str) -> None: - """Delete key from cache.""" - ... - - def exists(self, key: str) -> bool: - """Check if key exists and is not expired.""" - ... - - def get_entry(self, key: str) -> "CacheEntry | None": - """Get raw cache entry (may be stale).""" - ... - - def set_entry(self, key: str, entry: "CacheEntry", ttl: int | None = None) -> None: - """Store raw cache entry, optionally overriding TTL.""" - ... - - def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: - """ - Atomic set if not exists. Returns True if set, False if already exists. - Used for distributed locking. - """ - ... - - -def validate_cache_storage(cache: Any) -> bool: - """ - Validate that an object implements the CacheStorage protocol. - Useful for debugging custom cache implementations. - - Returns: - True if valid, False otherwise - """ - required_methods = [ - "get", - "set", - "delete", - "exists", - "set_if_not_exists", - "get_entry", - "set_entry", - ] - return all( - hasattr(cache, method) and callable(getattr(cache, method)) - for method in required_methods - ) - - -# ============================================================================ -# InMemCache - In-memory storage with TTL -# ============================================================================ - - -class InMemCache: - """ - Thread-safe in-memory cache with TTL support. - - Attributes: - _data: internal entry map - _lock: re-entrant lock to protect concurrent access - """ - - def __init__(self): - self._data: dict[str, CacheEntry] = {} - self._lock = threading.RLock() - - def _make_entry(self, value: Any, ttl: int) -> CacheEntry: - """Create a cache entry with computed freshness window.""" - now = time.time() - fresh_until = now + ttl if ttl > 0 else float("inf") - return CacheEntry(value=value, fresh_until=fresh_until, created_at=now) - - def get(self, key: str) -> Any | None: - """Return value if key still fresh, otherwise drop it.""" - with self._lock: - entry = self._data.get(key) - if entry is None: - return None - - if time.time() >= entry.fresh_until: - del self._data[key] - return None - - return entry.value - - def set(self, key: str, value: Any, ttl: int = 0) -> None: - """Store value for ttl seconds (0=forever).""" - entry = self._make_entry(value, ttl) - - with self._lock: - self._data[key] = entry - - def delete(self, key: str) -> None: - """Delete key from cache.""" - with self._lock: - self._data.pop(key, None) - - def exists(self, key: str) -> bool: - """Check if key exists and is not expired.""" - return self.get(key) is not None - - def get_entry(self, key: str) -> CacheEntry | None: - """Get raw entry (can be stale).""" - with self._lock: - return self._data.get(key) - - def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: - """Set raw entry; optional ttl overrides entry freshness.""" - if ttl is not None: - entry = self._make_entry(entry.value, ttl) - with self._lock: - self._data[key] = entry - - def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: - """Atomic set if not exists. Returns True if set, False if exists.""" - with self._lock: - now = time.time() - if key in self._data and self._data[key].is_fresh(now): - return False - entry = self._make_entry(value, ttl) - self._data[key] = entry - return True - - def clear(self) -> None: - """Clear all cached data.""" - with self._lock: - self._data.clear() - - def cleanup_expired(self) -> int: - """Remove expired entries. Returns count of removed entries.""" - with self._lock: - now = time.time() - expired_keys = [ - key for key, entry in self._data.items() if entry.fresh_until < now - ] - for key in expired_keys: - del self._data[key] - return len(expired_keys) - - @property - def lock(self): - """Get the internal lock (for advanced usage).""" - return self._lock - - -# ============================================================================ -# RedisCache - Redis-backed storage -# ============================================================================ - - -class RedisCache: - """ - Redis-backed cache storage. - Supports TTL, distributed locking, and persistence. - - Example: - import redis - client = redis.Redis(host='localhost', port=6379) - cache = RedisCache(client, prefix="app:") - cache.set("user:123", {"name": "John"}, ttl=60) - """ - - def __init__( - self, - redis_client: Any, - prefix: str = "", - serializer: str | Serializer | None = "pickle", - ): - """ - Initialize Redis cache. - - Args: - redis_client: Redis client instance - prefix: Key prefix for namespacing - serializer: Built-in name ("pickle" | "json" | "msgpack"), or - any object with ``dumps(obj)->bytes`` and ``loads(bytes)->Any``. - """ - if redis is None: - raise ImportError("redis package required. Install: pip install redis") - self.client = redis_client - self.prefix = prefix - self._serializer, self._wrap_entries = self._resolve_serializer(serializer) - - @staticmethod - def _wrap_payload(obj: Any) -> Any: - if isinstance(obj, CacheEntry): - return { - "__ac_type": "entry", - "v": obj.value, - "f": obj.fresh_until, - "c": obj.created_at, - } - return {"__ac_type": "value", "v": obj} - - @staticmethod - def _unwrap_payload(obj: Any) -> Any: - if isinstance(obj, dict): - obj_type = obj.get("__ac_type") - if obj_type == "entry": - return CacheEntry( - value=obj.get("v"), - fresh_until=float(obj.get("f", 0.0)), - created_at=float(obj.get("c", 0.0)), - ) - if obj_type == "value": - return obj.get("v") - return obj - - def _serialize(self, obj: Any) -> bytes: - if self._wrap_entries: - return self._serializer.dumps(self._wrap_payload(obj)) - return self._serializer.dumps(obj) - - def _deserialize(self, data: bytes) -> Any: - obj = self._serializer.loads(data) - if self._wrap_entries: - return self._unwrap_payload(obj) - return obj - - def _resolve_serializer( - self, serializer: str | Serializer | None - ) -> tuple[Serializer, bool]: - if serializer is None: - serializer = "pickle" - - if isinstance(serializer, str): - name = serializer.lower() - if name not in _BUILTIN_SERIALIZERS: - raise ValueError( - "Unsupported serializer. Use 'pickle', 'json', or provide an object with dumps/loads." - ) - serializer_obj = _BUILTIN_SERIALIZERS[name] - return ( - serializer_obj, - not bool(getattr(serializer_obj, "handles_entries", False)), - ) - - if hasattr(serializer, "dumps") and hasattr(serializer, "loads"): - wrap = not bool(getattr(serializer, "handles_entries", False)) - return (serializer, wrap) - - raise TypeError("serializer must be a string or provide dumps/loads methods") - - def _make_key(self, key: str) -> str: - """Add prefix to key.""" - return f"{self.prefix}{key}" - - def get(self, key: str) -> Any | None: - """Get value by key.""" - try: - data = self.client.get(self._make_key(key)) - if data is None: - return None - value = self._deserialize(data) - if isinstance(value, CacheEntry): - return value.value if value.is_fresh() else None - return value - except Exception: - return None - - def set(self, key: str, value: Any, ttl: int = 0) -> None: - """Set value with optional TTL in seconds.""" - try: - data = self._serialize(value) - if ttl > 0: - expires = max(1, int(math.ceil(ttl))) - self.client.setex(self._make_key(key), expires, data) - else: - self.client.set(self._make_key(key), data) - except Exception as e: - raise RuntimeError(f"Redis set failed: {e}") - - def delete(self, key: str) -> None: - """Delete key from cache.""" - try: - self.client.delete(self._make_key(key)) - except Exception: - pass - - def exists(self, key: str) -> bool: - """Check if key exists.""" - try: - entry = self.get_entry(key) - if entry is None: - return False - return entry.is_fresh() - except Exception: - return False - - def get_entry(self, key: str) -> CacheEntry | None: - """Get raw entry without enforcing freshness (used by SWR).""" - try: - data = self.client.get(self._make_key(key)) - if data is None: - return None - value = self._deserialize(data) - if isinstance(value, CacheEntry): - return value - # Legacy plain values: wrap to allow SWR-style access - now = time.time() - return CacheEntry(value=value, fresh_until=float("inf"), created_at=now) - except Exception: - return None - - def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: - """Store CacheEntry, optionally with explicit TTL.""" - try: - data = self._serialize(entry) - expires = None - if ttl is not None and ttl > 0: - expires = max(1, int(math.ceil(ttl))) - if expires: - self.client.setex(self._make_key(key), expires, data) - else: - self.client.set(self._make_key(key), data) - except Exception as e: - raise RuntimeError(f"Redis set_entry failed: {e}") - - def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: - """Atomic set if not exists.""" - try: - data = self._serialize(value) - expires = None - if ttl > 0: - expires = max(1, int(math.ceil(ttl))) - result = self.client.set(self._make_key(key), data, ex=expires, nx=True) - return bool(result) - except Exception: - return False - - -# ============================================================================ -# HybridCache - L1 (memory) + L2 (Redis) cache -# ============================================================================ - - -class HybridCache: - """ - Two-level cache: L1 (InMemCache) + L2 (RedisCache). - Fast reads from memory, distributed persistence in Redis. - - Example: - import redis - client = redis.Redis() - cache = HybridCache( - l1_cache=InMemCache(), - l2_cache=RedisCache(client), - l1_ttl=60 - ) - """ - - def __init__( - self, - l1_cache: CacheStorage | None = None, - l2_cache: CacheStorage | None = None, - l1_ttl: int = 60, - l2_ttl: int | None = None, - ): - """ - Initialize hybrid cache. - - Args: - l1_cache: L1 cache (memory), defaults to InMemCache - l2_cache: L2 cache (distributed), required - l1_ttl: TTL for L1 cache in seconds - l2_ttl: TTL for L2 cache in seconds, defaults to l1_ttl * 2 - """ - self.l1 = l1_cache if l1_cache is not None else InMemCache() - if l2_cache is None: - raise ValueError("l2_cache is required for HybridCache") - self.l2 = l2_cache - self.l1_ttl = l1_ttl - self.l2_ttl = l2_ttl if l2_ttl is not None else l1_ttl * 2 - - def get(self, key: str) -> Any | None: - """Get value, checking L1 then L2.""" - # Try L1 first - value = self.l1.get(key) - if value is not None: - return value - - # Try L2 - value = self.l2.get(key) - if value is not None: - # Populate L1 - self.l1.set(key, value, self.l1_ttl) - - return value - - def set(self, key: str, value: Any, ttl: int = 0) -> None: - """Set value in both L1 and L2.""" - self.l1.set(key, value, min(ttl, self.l1_ttl) if ttl > 0 else self.l1_ttl) - l2_ttl = min(ttl, self.l2_ttl) if ttl > 0 else self.l2_ttl - self.l2.set(key, value, l2_ttl) - - def get_entry(self, key: str) -> CacheEntry | None: - """Get raw entry preferring L1, falling back to L2 and repopulating L1.""" - entry: CacheEntry | None = None - - if hasattr(self.l1, "get_entry"): - entry = self.l1.get_entry(key) # type: ignore[attr-defined] - if entry is not None: - return entry - - # Attempt L2 entry retrieval first - if hasattr(self.l2, "get_entry"): - entry = self.l2.get_entry(key) # type: ignore[attr-defined] - if entry is not None: - # Populate L1 with limited TTL to avoid stale accumulation - self.l1.set_entry(key, entry, ttl=self.l1_ttl) - return entry - - # Fall back to plain value fetch - value = self.l2.get(key) - if value is None: - return None - - now = time.time() - entry = CacheEntry( - value=value, - fresh_until=now + self.l1_ttl if self.l1_ttl > 0 else float("inf"), - created_at=now, - ) - self.l1.set_entry(key, entry, ttl=self.l1_ttl) - return entry - - def delete(self, key: str) -> None: - """Delete from both caches.""" - self.l1.delete(key) - self.l2.delete(key) - - def exists(self, key: str) -> bool: - """Check if key exists in either cache.""" - return self.l1.exists(key) or self.l2.exists(key) - - def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: - """Atomic set if not exists (L2 only for consistency).""" - l2_ttl = min(ttl, self.l2_ttl) if ttl > 0 else self.l2_ttl - success = self.l2.set_if_not_exists(key, value, l2_ttl) - if success: - self.l1.set(key, value, min(ttl, self.l1_ttl) if ttl > 0 else self.l1_ttl) - return success - - def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: - """Store raw entry in both layers, respecting L1 and L2 TTL.""" - ttl = ttl if ttl is not None else max(int(entry.fresh_until - time.time()), 0) - - l1_ttl = min(ttl, self.l1_ttl) if ttl > 0 else self.l1_ttl - l2_ttl = min(ttl, self.l2_ttl) if ttl > 0 else self.l2_ttl - - self.l1.set_entry(key, entry, ttl=l1_ttl) - - if hasattr(self.l2, "set_entry"): - self.l2.set_entry(key, entry, ttl=l2_ttl) # type: ignore[attr-defined] - else: - self.l2.set(key, entry.value, l2_ttl) diff --git a/src/advanced_caching/storage/__init__.py b/src/advanced_caching/storage/__init__.py new file mode 100644 index 0000000..fc1086f --- /dev/null +++ b/src/advanced_caching/storage/__init__.py @@ -0,0 +1,33 @@ +from .utils import ( + CacheEntry, + CacheStorage, + JsonSerializer, + PickleSerializer, + _BUILTIN_SERIALIZERS, + _hash_bytes, + validate_cache_storage, +) +from .inmem import InMemCache +from .redis_cache import RedisCache +from .hybrid import HybridCache +from .chain import ChainCache +from .local_file import LocalFileCache +from .s3_cache import S3Cache +from .gcs_cache import GCSCache + +__all__ = [ + "CacheEntry", + "CacheStorage", + "JsonSerializer", + "PickleSerializer", + "_BUILTIN_SERIALIZERS", + "_hash_bytes", + "validate_cache_storage", + "InMemCache", + "RedisCache", + "HybridCache", + "ChainCache", + "LocalFileCache", + "S3Cache", + "GCSCache", +] diff --git a/src/advanced_caching/storage/chain.py b/src/advanced_caching/storage/chain.py new file mode 100644 index 0000000..11e84b2 --- /dev/null +++ b/src/advanced_caching/storage/chain.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +import time +from typing import Any + +from .utils import CacheEntry, CacheStorage + + +class ChainCache: + """Composable multi-level cache (L1β†’L2β†’...β†’Ln).""" + + def __init__(self, levels: list[tuple[CacheStorage, int | None]]): + if not levels: + raise ValueError("ChainCache requires at least one level") + self.levels = levels + + def _level_ttl(self, level_ttl: int | None, ttl: int) -> int: + if level_ttl is None: + return ttl + if ttl <= 0: + return level_ttl + return min(level_ttl, ttl) if level_ttl > 0 else ttl + + def get(self, key: str) -> Any | None: + hit_value, hit_index = None, None + for idx, (cache, lvl_ttl) in enumerate(self.levels): + value = cache.get(key) + if value is not None: + hit_value, hit_index = value, idx + break + if hit_value is None: + return None + for promote_idx in range(0, hit_index): + cache, lvl_ttl = self.levels[promote_idx] + cache.set(key, hit_value, self._level_ttl(lvl_ttl, 0)) + return hit_value + + def set(self, key: str, value: Any, ttl: int = 0) -> None: + for cache, lvl_ttl in self.levels: + cache.set(key, value, self._level_ttl(lvl_ttl, ttl)) + + def delete(self, key: str) -> None: + for cache, _ in self.levels: + try: + cache.delete(key) + except Exception: + pass + + def exists(self, key: str) -> bool: + return any(cache.exists(key) for cache, _ in self.levels) + + def get_entry(self, key: str) -> CacheEntry | None: + hit_entry, hit_index = None, None + for idx, (cache, lvl_ttl) in enumerate(self.levels): + if hasattr(cache, "get_entry"): + entry = cache.get_entry(key) # type: ignore[attr-defined] + else: + value = cache.get(key) + entry = None + if value is not None: + now = time.time() + entry = CacheEntry( + value=value, fresh_until=float("inf"), created_at=now + ) + if entry and entry.is_fresh(): + hit_entry, hit_index = entry, idx + break + if hit_entry is None: + return None + for promote_idx in range(0, hit_index): + cache, lvl_ttl = self.levels[promote_idx] + if hasattr(cache, "set_entry"): + cache.set_entry( + key, + hit_entry, + ttl=self._level_ttl( + lvl_ttl, int(hit_entry.fresh_until - time.time()) + ), + ) # type: ignore[attr-defined] + else: + cache.set(key, hit_entry.value, self._level_ttl(lvl_ttl, 0)) + return hit_entry + + def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: + for cache, lvl_ttl in self.levels: + effective_ttl = self._level_ttl( + lvl_ttl, + ttl if ttl is not None else int(entry.fresh_until - time.time()), + ) + if hasattr(cache, "set_entry"): + cache.set_entry(key, entry, ttl=effective_ttl) # type: ignore[attr-defined] + else: + cache.set(key, entry.value, effective_ttl) + + def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: + *upper_levels, deepest = self.levels[:-1], self.levels[-1] + deep_cache, deep_ttl = deepest + deep_success = deep_cache.set_if_not_exists( + key, value, self._level_ttl(deep_ttl, ttl) + ) + if not deep_success: + return False + for cache, lvl_ttl in upper_levels: # type: ignore[misc] + cache.set(key, value, self._level_ttl(lvl_ttl, ttl)) + return True diff --git a/src/advanced_caching/storage/gcs_cache.py b/src/advanced_caching/storage/gcs_cache.py new file mode 100644 index 0000000..abdf272 --- /dev/null +++ b/src/advanced_caching/storage/gcs_cache.py @@ -0,0 +1,212 @@ +from __future__ import annotations + +import gzip +from concurrent.futures import ThreadPoolExecutor +from typing import Any + +from .utils import CacheEntry, Serializer, _BUILTIN_SERIALIZERS, _hash_bytes + +try: + from google.cloud import storage as gcs +except ImportError: # pragma: no cover - optional + gcs = None + + +class GCSCache: + def __init__( + self, + bucket: str, + prefix: str = "", + client: Any | None = None, + serializer: str | Serializer | None = "pickle", + compress: bool = True, + compress_level: int = 6, + dedupe_writes: bool = False, + ): + if gcs is None: + raise ImportError( + "google-cloud-storage required for GCSCache. Install: pip install google-cloud-storage" + ) + self.bucket_name = bucket + self.prefix = prefix + self.client = client or gcs.Client() + self.bucket = self.client.bucket(bucket) + self.compress = compress + self.compress_level = compress_level + self.serializer = self._resolve_serializer(serializer) + self._dedupe_writes = dedupe_writes + + def _resolve_serializer(self, serializer: str | Serializer | None) -> Serializer: + if serializer is None: + serializer = "pickle" + if isinstance(serializer, str): + name = serializer.lower() + if name not in _BUILTIN_SERIALIZERS: + raise ValueError("Unsupported serializer. Use 'pickle' or 'json'.") + return _BUILTIN_SERIALIZERS[name] + if hasattr(serializer, "dumps") and hasattr(serializer, "loads"): + return serializer + raise TypeError("serializer must be a string or provide dumps/loads methods") + + def _make_blob(self, key: str): + path = f"{self.prefix}{key}" + return self.bucket.blob(path) + + def _serialize(self, value: Any) -> bytes: + data = self.serializer.dumps(value) + if self.compress: + return gzip.compress(data, compresslevel=self.compress_level) + return data + + def _deserialize(self, data: bytes) -> Any: + if self.compress: + data = gzip.decompress(data) + return self.serializer.loads(data) + + def get(self, key: str) -> Any | None: + blob = self._make_blob(key) + try: + data = blob.download_as_bytes() + value = self._deserialize(data) + if isinstance(value, dict) and value.get("__ac_type") == "entry": + entry = CacheEntry( + value=value.get("v"), + fresh_until=float(value.get("f", 0.0)), + created_at=float(value.get("c", 0.0)), + ) + return entry.value if entry.is_fresh() else None + return value + except Exception: + return None + + def set(self, key: str, value: Any, ttl: int = 0) -> None: + blob = self._make_blob(key) + import time + + now = time.time() + entry: CacheEntry | None = None + if isinstance(value, CacheEntry): + entry = value + elif ttl != 0: + entry = CacheEntry(value=value, fresh_until=now + ttl, created_at=now) + + payload = ( + { + "__ac_type": "entry", + "v": entry.value, + "f": entry.fresh_until, + "c": entry.created_at, + } + if entry + else value + ) + data = self._serialize(payload) + try: + if self._dedupe_writes: + try: + blob.reload() + if blob.metadata and blob.metadata.get("ac-hash") == _hash_bytes( + data + ): + return + except Exception: + pass + blob.metadata = blob.metadata or {} + if self._dedupe_writes: + blob.metadata["ac-hash"] = _hash_bytes(data) + blob.upload_from_string(data) + except Exception as e: + raise RuntimeError(f"GCSCache set failed: {e}") + + def delete(self, key: str) -> None: + blob = self._make_blob(key) + try: + blob.delete() + except Exception: + pass + + def exists(self, key: str) -> bool: + blob = self._make_blob(key) + try: + blob.reload() + return True + except Exception: + return False + + def get_entry(self, key: str) -> CacheEntry | None: + blob = self._make_blob(key) + try: + data = blob.download_as_bytes() + value = self._deserialize(data) + if isinstance(value, dict) and value.get("__ac_type") == "entry": + entry = CacheEntry( + value=value.get("v"), + fresh_until=float(value.get("f", 0.0)), + created_at=float(value.get("c", 0.0)), + ) + return entry + import time + + now = time.time() + return CacheEntry(value=value, fresh_until=float("inf"), created_at=now) + except Exception: + return None + + def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: + import time + + if ttl is not None: + now = time.time() + entry = CacheEntry(value=entry.value, fresh_until=now + ttl, created_at=now) + payload = { + "__ac_type": "entry", + "v": entry.value, + "f": entry.fresh_until, + "c": entry.created_at, + } + data = self._serialize(payload) + blob = self._make_blob(key) + try: + if self._dedupe_writes: + try: + blob.reload() + if blob.metadata and blob.metadata.get("ac-hash") == _hash_bytes( + data + ): + return + except Exception: + pass + blob.metadata = blob.metadata or {} + if self._dedupe_writes: + blob.metadata["ac-hash"] = _hash_bytes(data) + blob.upload_from_string(data) + except Exception as e: + raise RuntimeError(f"GCSCache set_entry failed: {e}") + + def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: + blob = self._make_blob(key) + try: + blob.upload_from_string(self._serialize(value), if_generation_match=0) + return True + except Exception: + return False + + def get_many(self, keys: list[str]) -> dict[str, Any]: + """Parallel fetch using threads.""" + results = {} + with ThreadPoolExecutor(max_workers=min(32, len(keys) + 1)) as executor: + future_to_key = {executor.submit(self.get, key): key for key in keys} + for future in future_to_key: + key = future_to_key[future] + try: + val = future.result() + if val is not None: + results[key] = val + except Exception: + pass + return results + + def set_many(self, mapping: dict[str, Any], ttl: int = 0) -> None: + """Parallel set using threads.""" + with ThreadPoolExecutor(max_workers=min(32, len(mapping) + 1)) as executor: + executor.map(lambda item: self.set(item[0], item[1], ttl), mapping.items()) diff --git a/src/advanced_caching/storage/hybrid.py b/src/advanced_caching/storage/hybrid.py new file mode 100644 index 0000000..04c0eaf --- /dev/null +++ b/src/advanced_caching/storage/hybrid.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +import time +from typing import Any + +from .utils import CacheEntry, CacheStorage + + +class HybridCache: + """Two-level cache: L1 (InMem) + L2 (distributed).""" + + def __init__( + self, + l1_cache: CacheStorage | None = None, + l2_cache: CacheStorage | None = None, + l1_ttl: int = 60, + l2_ttl: int | None = None, + ): + if l2_cache is None: + raise ValueError("l2_cache is required for HybridCache") + self.l1 = l1_cache + self.l2 = l2_cache + self.l1_ttl = l1_ttl + self.l2_ttl = l2_ttl if l2_ttl is not None else l1_ttl * 2 + + def get(self, key: str) -> Any | None: + value = self.l1.get(key) if self.l1 else None + if value is not None: + return value + value = self.l2.get(key) + if value is not None and self.l1: + self.l1.set(key, value, self.l1_ttl) + return value + + def set(self, key: str, value: Any, ttl: int = 0) -> None: + if self.l1: + self.l1.set(key, value, min(ttl, self.l1_ttl) if ttl > 0 else self.l1_ttl) + l2_ttl = min(ttl, self.l2_ttl) if ttl > 0 else self.l2_ttl + self.l2.set(key, value, l2_ttl) + + def get_entry(self, key: str) -> CacheEntry | None: + entry = ( + self.l1.get_entry(key) + if self.l1 and hasattr(self.l1, "get_entry") + else None + ) + if entry is not None: + return entry + entry = self.l2.get_entry(key) if hasattr(self.l2, "get_entry") else None + if entry is not None and self.l1: + self.l1.set_entry(key, entry, ttl=self.l1_ttl) + return entry + value = self.l2.get(key) + if value is None: + return None + now = time.time() + entry = CacheEntry( + value=value, + fresh_until=now + self.l1_ttl if self.l1_ttl > 0 else float("inf"), + created_at=now, + ) + if self.l1: + self.l1.set_entry(key, entry, ttl=self.l1_ttl) + return entry + + def delete(self, key: str) -> None: + if self.l1: + self.l1.delete(key) + self.l2.delete(key) + + def exists(self, key: str) -> bool: + return (self.l1.exists(key) if self.l1 else False) or self.l2.exists(key) + + def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: + l2_ttl = min(ttl, self.l2_ttl) if ttl > 0 else self.l2_ttl + success = self.l2.set_if_not_exists(key, value, l2_ttl) + if success and self.l1: + self.l1.set(key, value, min(ttl, self.l1_ttl) if ttl > 0 else self.l1_ttl) + return success + + def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: + ttl = ttl if ttl is not None else max(int(entry.fresh_until - time.time()), 0) + l1_ttl = min(ttl, self.l1_ttl) if ttl > 0 else self.l1_ttl + l2_ttl = min(ttl, self.l2_ttl) if ttl > 0 else self.l2_ttl + if self.l1: + self.l1.set_entry(key, entry, ttl=l1_ttl) + if hasattr(self.l2, "set_entry"): + self.l2.set_entry(key, entry, ttl=l2_ttl) + else: + self.l2.set(key, entry.value, l2_ttl) diff --git a/src/advanced_caching/storage/inmem.py b/src/advanced_caching/storage/inmem.py new file mode 100644 index 0000000..cf5dda5 --- /dev/null +++ b/src/advanced_caching/storage/inmem.py @@ -0,0 +1,77 @@ +from __future__ import annotations + +import threading +import time +from typing import Any + +from .utils import CacheEntry + + +class InMemCache: + """Thread-safe in-memory cache with TTL support.""" + + def __init__(self): + self._data: dict[str, CacheEntry] = {} + self._lock = threading.RLock() + + def _make_entry(self, value: Any, ttl: int) -> CacheEntry: + now = time.time() + fresh_until = now + ttl if ttl > 0 else float("inf") + return CacheEntry(value=value, fresh_until=fresh_until, created_at=now) + + def get(self, key: str) -> Any | None: + with self._lock: + entry = self._data.get(key) + if entry is None: + return None + if time.time() >= entry.fresh_until: + del self._data[key] + return None + return entry.value + + def set(self, key: str, value: Any, ttl: int = 0) -> None: + entry = self._make_entry(value, ttl) + with self._lock: + self._data[key] = entry + + def delete(self, key: str) -> None: + with self._lock: + self._data.pop(key, None) + + def exists(self, key: str) -> bool: + return self.get(key) is not None + + def get_entry(self, key: str) -> CacheEntry | None: + with self._lock: + return self._data.get(key) + + def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: + if ttl is not None: + entry = self._make_entry(entry.value, ttl) + with self._lock: + self._data[key] = entry + + def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: + with self._lock: + now = time.time() + if key in self._data and self._data[key].is_fresh(now): + return False + entry = self._make_entry(value, ttl) + self._data[key] = entry + return True + + def clear(self) -> None: + with self._lock: + self._data.clear() + + def cleanup_expired(self) -> int: + with self._lock: + now = time.time() + expired_keys = [k for k, e in self._data.items() if e.fresh_until < now] + for k in expired_keys: + del self._data[k] + return len(expired_keys) + + @property + def lock(self): + return self._lock diff --git a/src/advanced_caching/storage/local_file.py b/src/advanced_caching/storage/local_file.py new file mode 100644 index 0000000..a4b1e90 --- /dev/null +++ b/src/advanced_caching/storage/local_file.py @@ -0,0 +1,154 @@ +from __future__ import annotations + +import os +import time +from pathlib import Path +from typing import Any + +import gzip + +from .utils import CacheEntry, Serializer, _BUILTIN_SERIALIZERS + + +class LocalFileCache: + """Filesystem-backed cache with TTL and optional dedupe.""" + + def __init__( + self, + root_dir: str | Path, + serializer: str | Serializer | None = "pickle", + compress: bool = True, + compress_level: int = 6, + dedupe_writes: bool = False, + ): + self.root = Path(root_dir) + self.root.mkdir(parents=True, exist_ok=True) + self.compress = compress + self.compress_level = compress_level + self.serializer = self._resolve_serializer(serializer) + self._dedupe_writes = dedupe_writes + + def _resolve_serializer(self, serializer: str | Serializer | None) -> Serializer: + if serializer is None: + serializer = "pickle" + if isinstance(serializer, str): + name = serializer.lower() + if name not in _BUILTIN_SERIALIZERS: + raise ValueError("Unsupported serializer. Use 'pickle' or 'json'.") + return _BUILTIN_SERIALIZERS[name] + if hasattr(serializer, "dumps") and hasattr(serializer, "loads"): + return serializer + raise TypeError("serializer must be a string or provide dumps/loads methods") + + def _path(self, key: str) -> Path: + return self.root / key + + def _serialize_entry(self, entry: CacheEntry) -> bytes: + payload = { + "__ac_type": "entry", + "v": entry.value, + "f": entry.fresh_until, + "c": entry.created_at, + } + data = self.serializer.dumps(payload) + if self.compress: + data = gzip.compress(data, compresslevel=self.compress_level) + return data + + def _deserialize_entry(self, data: bytes) -> CacheEntry | None: + try: + if self.compress: + data = gzip.decompress(data) + payload = self.serializer.loads(data) + if isinstance(payload, CacheEntry): + return payload + if isinstance(payload, dict) and payload.get("__ac_type") == "entry": + return CacheEntry( + value=payload.get("v"), + fresh_until=float(payload.get("f", 0.0)), + created_at=float(payload.get("c", 0.0)), + ) + now = time.time() + return CacheEntry(value=payload, fresh_until=float("inf"), created_at=now) + except Exception: + return None + + def _atomic_write(self, path: Path, data: bytes) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + tmp_path = path.with_suffix(path.suffix + ".tmp") + with open(tmp_path, "wb") as tmp: + tmp.write(data) + os.replace(tmp_path, path) + + def get_entry(self, key: str) -> CacheEntry | None: + path = self._path(key) + if not path.exists(): + return None + try: + entry = self._deserialize_entry(path.read_bytes()) + except Exception: + return None + if entry is None: + return None + if not entry.is_fresh(): + try: + path.unlink() + except Exception: + pass + return None + return entry + + def get(self, key: str) -> Any | None: + entry = self.get_entry(key) + return entry.value if entry is not None else None + + def set(self, key: str, value: Any, ttl: int = 0) -> None: + now = time.time() + fresh_until = now + ttl if ttl > 0 else float("inf") + entry = CacheEntry(value=value, fresh_until=fresh_until, created_at=now) + data = self._serialize_entry(entry) + path = self._path(key) + if self._dedupe_writes and ttl <= 0 and path.exists(): + try: + existing_entry = self.get_entry(key) + if existing_entry is not None and existing_entry.value == value: + return + except Exception: + pass + self._atomic_write(path, data) + + def delete(self, key: str) -> None: + path = self._path(key) + try: + path.unlink() + except Exception: + pass + + def exists(self, key: str) -> bool: + return self.get_entry(key) is not None + + def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: + now = time.time() + if ttl is not None: + entry = CacheEntry( + value=entry.value, + fresh_until=now + ttl if ttl > 0 else float("inf"), + created_at=now, + ) + data = self._serialize_entry(entry) + path = self._path(key) + if self._dedupe_writes and ttl is not None and ttl <= 0 and path.exists(): + try: + existing_entry = self.get_entry(key) + if existing_entry is not None and existing_entry.value == entry.value: + return + except Exception: + pass + self._atomic_write(path, data) + + def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: + existing = self.get_entry(key) + if existing is not None: + return False + self.set(key, value, ttl) + return True diff --git a/src/advanced_caching/storage/redis_cache.py b/src/advanced_caching/storage/redis_cache.py new file mode 100644 index 0000000..b5d51fb --- /dev/null +++ b/src/advanced_caching/storage/redis_cache.py @@ -0,0 +1,176 @@ +from __future__ import annotations + +import math +import time +from typing import Any + +from .utils import CacheEntry, Serializer, _BUILTIN_SERIALIZERS + +try: + import redis +except ImportError: # pragma: no cover - optional + redis = None # type: ignore + + +class RedisCache: + """Redis-backed cache storage with optional dedupe writes.""" + + def __init__( + self, + redis_client: Any, + prefix: str = "", + serializer: str | Serializer | None = "pickle", + dedupe_writes: bool = False, + ): + if redis is None: + raise ImportError("redis package required. Install: pip install redis") + self.client = redis_client + self.prefix = prefix + self._serializer, self._wrap_entries = self._resolve_serializer(serializer) + self._dedupe_writes = dedupe_writes + + @staticmethod + def _wrap_payload(obj: Any) -> Any: + if isinstance(obj, CacheEntry): + return { + "__ac_type": "entry", + "v": obj.value, + "f": obj.fresh_until, + "c": obj.created_at, + } + return {"__ac_type": "value", "v": obj} + + @staticmethod + def _unwrap_payload(obj: Any) -> Any: + if isinstance(obj, dict): + obj_type = obj.get("__ac_type") + if obj_type == "entry": + return CacheEntry( + value=obj.get("v"), + fresh_until=float(obj.get("f", 0.0)), + created_at=float(obj.get("c", 0.0)), + ) + if obj_type == "value": + return obj.get("v") + return obj + + def _serialize(self, obj: Any) -> bytes: + if self._wrap_entries: + return self._serializer.dumps(self._wrap_payload(obj)) + return self._serializer.dumps(obj) + + def _deserialize(self, data: bytes) -> Any: + obj = self._serializer.loads(data) + if self._wrap_entries: + return self._unwrap_payload(obj) + return obj + + def _resolve_serializer( + self, serializer: str | Serializer | None + ) -> tuple[Serializer, bool]: + if serializer is None: + serializer = "pickle" + if isinstance(serializer, str): + name = serializer.lower() + if name not in _BUILTIN_SERIALIZERS: + raise ValueError("Unsupported serializer. Use 'pickle' or 'json'.") + serializer_obj = _BUILTIN_SERIALIZERS[name] + return serializer_obj, not bool( + getattr(serializer_obj, "handles_entries", False) + ) + if hasattr(serializer, "dumps") and hasattr(serializer, "loads"): + wrap = not bool(getattr(serializer, "handles_entries", False)) + return serializer, wrap + raise TypeError("serializer must be a string or provide dumps/loads methods") + + def _make_key(self, key: str) -> str: + return f"{self.prefix}{key}" + + def get(self, key: str) -> Any | None: + try: + data = self.client.get(self._make_key(key)) + if data is None: + return None + value = self._deserialize(data) + if isinstance(value, CacheEntry): + return value.value if value.is_fresh() else None + return value + except Exception: + return None + + def set(self, key: str, value: Any, ttl: int = 0) -> None: + try: + data = self._serialize(value) + if self._dedupe_writes: + existing = self.client.get(self._make_key(key)) + if existing is not None and existing == data: + if ttl > 0: + expires = max(1, int(math.ceil(ttl))) + self.client.expire(self._make_key(key), expires) + return + if ttl > 0: + expires = max(1, int(math.ceil(ttl))) + self.client.setex(self._make_key(key), expires, data) + else: + self.client.set(self._make_key(key), data) + except Exception as e: + raise RuntimeError(f"Redis set failed: {e}") + + def delete(self, key: str) -> None: + try: + self.client.delete(self._make_key(key)) + except Exception: + pass + + def exists(self, key: str) -> bool: + try: + entry = self.get_entry(key) + if entry is None: + return False + return entry.is_fresh() + except Exception: + return False + + def get_entry(self, key: str) -> CacheEntry | None: + try: + data = self.client.get(self._make_key(key)) + if data is None: + return None + value = self._deserialize(data) + if isinstance(value, CacheEntry): + return value + now = time.time() + return CacheEntry(value=value, fresh_until=float("inf"), created_at=now) + except Exception: + return None + + def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: + try: + data = self._serialize(entry) + if self._dedupe_writes: + existing = self.client.get(self._make_key(key)) + if existing is not None and existing == data: + if ttl is not None and ttl > 0: + expires = max(1, int(math.ceil(ttl))) + self.client.expire(self._make_key(key), expires) + return + expires = None + if ttl is not None and ttl > 0: + expires = max(1, int(math.ceil(ttl))) + if expires: + self.client.setex(self._make_key(key), expires, data) + else: + self.client.set(self._make_key(key), data) + except Exception as e: + raise RuntimeError(f"Redis set_entry failed: {e}") + + def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: + try: + data = self._serialize(value) + expires = None + if ttl > 0: + expires = max(1, int(math.ceil(ttl))) + result = self.client.set(self._make_key(key), data, ex=expires, nx=True) + return bool(result) + except Exception: + return False diff --git a/src/advanced_caching/storage/s3_cache.py b/src/advanced_caching/storage/s3_cache.py new file mode 100644 index 0000000..19fb1cd --- /dev/null +++ b/src/advanced_caching/storage/s3_cache.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +import gzip +import time +from concurrent.futures import ThreadPoolExecutor +from typing import Any + +from .utils import CacheEntry, Serializer, _BUILTIN_SERIALIZERS, _hash_bytes + +try: + import boto3 +except ImportError: # pragma: no cover - optional + boto3 = None + + +class S3Cache: + def __init__( + self, + bucket: str, + prefix: str = "", + s3_client: Any | None = None, + serializer: str | Serializer | None = "pickle", + compress: bool = True, + compress_level: int = 6, + dedupe_writes: bool = False, + ): + if boto3 is None: + raise ImportError("boto3 required for S3Cache. Install: pip install boto3") + self.bucket = bucket + self.prefix = prefix + self.client = s3_client or boto3.client("s3") + self.compress = compress + self.compress_level = compress_level + self.serializer = self._resolve_serializer(serializer) + self._dedupe_writes = dedupe_writes + + def _resolve_serializer(self, serializer: str | Serializer | None) -> Serializer: + if serializer is None: + serializer = "pickle" + if isinstance(serializer, str): + name = serializer.lower() + if name not in _BUILTIN_SERIALIZERS: + raise ValueError("Unsupported serializer. Use 'pickle' or 'json'.") + return _BUILTIN_SERIALIZERS[name] + if hasattr(serializer, "dumps") and hasattr(serializer, "loads"): + return serializer + raise TypeError("serializer must be a string or provide dumps/loads methods") + + def _make_key(self, key: str) -> str: + return f"{self.prefix}{key}" + + def _serialize(self, value: Any) -> bytes: + data = self.serializer.dumps(value) + if self.compress: + return gzip.compress(data, compresslevel=self.compress_level) + return data + + def _deserialize(self, data: bytes) -> Any: + if self.compress: + data = gzip.decompress(data) + return self.serializer.loads(data) + + def get(self, key: str) -> Any | None: + try: + obj = self.client.get_object(Bucket=self.bucket, Key=self._make_key(key)) + body = obj["Body"].read() + value = self._deserialize(body) + if isinstance(value, dict) and value.get("__ac_type") == "entry": + entry = CacheEntry( + value=value.get("v"), + fresh_until=float(value.get("f", 0.0)), + created_at=float(value.get("c", 0.0)), + ) + return entry.value if entry.is_fresh() else None + return value + except Exception: + return None + + def set(self, key: str, value: Any, ttl: int = 0) -> None: + try: + now = time.time() + entry: CacheEntry | None = None + if isinstance(value, CacheEntry): + entry = value + elif ttl != 0: + entry = CacheEntry(value=value, fresh_until=now + ttl, created_at=now) + + payload = ( + { + "__ac_type": "entry", + "v": entry.value, + "f": entry.fresh_until, + "c": entry.created_at, + } + if entry + else value + ) + body = self._serialize(payload) + + if self._dedupe_writes: + try: + head = self.client.head_object( + Bucket=self.bucket, Key=self._make_key(key) + ) + if head and head.get("Metadata", {}).get("ac-hash") == _hash_bytes( + body + ): + return + except Exception: + pass + put_kwargs = { + "Bucket": self.bucket, + "Key": self._make_key(key), + "Body": body, + } + if self._dedupe_writes: + put_kwargs["Metadata"] = {"ac-hash": _hash_bytes(body)} + self.client.put_object(**put_kwargs) + except Exception as e: + raise RuntimeError(f"S3Cache set failed: {e}") + + def delete(self, key: str) -> None: + try: + self.client.delete_object(Bucket=self.bucket, Key=self._make_key(key)) + except Exception: + pass + + def exists(self, key: str) -> bool: + try: + self.client.head_object(Bucket=self.bucket, Key=self._make_key(key)) + return True + except Exception: + return False + + def get_entry(self, key: str) -> CacheEntry | None: + try: + obj = self.client.get_object(Bucket=self.bucket, Key=self._make_key(key)) + body = obj["Body"].read() + value = self._deserialize(body) + if isinstance(value, dict) and value.get("__ac_type") == "entry": + entry = CacheEntry( + value=value.get("v"), + fresh_until=float(value.get("f", 0.0)), + created_at=float(value.get("c", 0.0)), + ) + return entry + now = time.time() + return CacheEntry(value=value, fresh_until=float("inf"), created_at=now) + except Exception: + return None + + def set_entry(self, key: str, entry: CacheEntry, ttl: int | None = None) -> None: + if ttl is not None: + now = time.time() + entry = CacheEntry(value=entry.value, fresh_until=now + ttl, created_at=now) + payload = { + "__ac_type": "entry", + "v": entry.value, + "f": entry.fresh_until, + "c": entry.created_at, + } + try: + body = self._serialize(payload) + if self._dedupe_writes: + try: + head = self.client.head_object( + Bucket=self.bucket, Key=self._make_key(key) + ) + if head and head.get("Metadata", {}).get("ac-hash") == _hash_bytes( + body + ): + return + except Exception: + pass + put_kwargs = { + "Bucket": self.bucket, + "Key": self._make_key(key), + "Body": body, + } + if self._dedupe_writes: + put_kwargs["Metadata"] = {"ac-hash": _hash_bytes(body)} + self.client.put_object(**put_kwargs) + except Exception as e: + raise RuntimeError(f"S3Cache set_entry failed: {e}") + + def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: + if self.exists(key): + return False + try: + self.set(key, value, ttl) + return True + except Exception: + return False + + def get_many(self, keys: list[str]) -> dict[str, Any]: + """Parallel fetch using threads.""" + results = {} + with ThreadPoolExecutor(max_workers=min(32, len(keys) + 1)) as executor: + future_to_key = {executor.submit(self.get, key): key for key in keys} + for future in future_to_key: + key = future_to_key[future] + try: + val = future.result() + if val is not None: + results[key] = val + except Exception: + pass + return results + + def set_many(self, mapping: dict[str, Any], ttl: int = 0) -> None: + """Parallel set using threads.""" + with ThreadPoolExecutor(max_workers=min(32, len(mapping) + 1)) as executor: + executor.map(lambda item: self.set(item[0], item[1], ttl), mapping.items()) diff --git a/src/advanced_caching/storage/utils.py b/src/advanced_caching/storage/utils.py new file mode 100644 index 0000000..9f76716 --- /dev/null +++ b/src/advanced_caching/storage/utils.py @@ -0,0 +1,124 @@ +from __future__ import annotations + +import gzip +import hashlib +import json +import math +import pickle +import time +from dataclasses import dataclass +from typing import Any, Protocol + +import orjson + + +class Serializer(Protocol): + """Simple serializer protocol used by cache backends.""" + + def dumps(self, obj: Any) -> bytes: ... + + def loads(self, data: bytes) -> Any: ... + + +class PickleSerializer: + """Pickle serializer using highest protocol (fastest, flexible).""" + + __slots__ = () + handles_entries = True + + @staticmethod + def dumps(obj: Any) -> bytes: + return pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL) + + @staticmethod + def loads(data: bytes) -> Any: + return pickle.loads(data) + + +class JsonSerializer: + """JSON serializer for text-friendly payloads (wraps CacheEntry). Uses orjson""" + + __slots__ = () + handles_entries = False + + @staticmethod + def dumps(obj: Any) -> bytes: + return orjson.dumps(obj) + + @staticmethod + def loads(data: bytes) -> Any: + return orjson.loads(data) + + +_BUILTIN_SERIALIZERS: dict[str, Serializer] = { + "pickle": PickleSerializer(), + "json": JsonSerializer(), +} + + +def _hash_bytes(data: bytes) -> str: + """Cheap content hash (blake2b) used to skip redundant writes.""" + return hashlib.blake2b(data, digest_size=16).hexdigest() + + +@dataclass(slots=True) +class CacheEntry: + """Internal cache entry with TTL support.""" + + value: Any + fresh_until: float # Unix timestamp + created_at: float + + def is_fresh(self, now: float | None = None) -> bool: + if now is None: + now = time.time() + return now < self.fresh_until + + def age(self, now: float | None = None) -> float: + if now is None: + now = time.time() + return now - self.created_at + + +class CacheStorage(Protocol): + """Protocol for cache storage backends.""" + + def get(self, key: str) -> Any | None: ... + + def set(self, key: str, value: Any, ttl: int = 0) -> None: ... + + def delete(self, key: str) -> None: ... + + def exists(self, key: str) -> bool: ... + + def get_entry(self, key: str) -> CacheEntry | None: ... + + def set_entry( + self, key: str, entry: CacheEntry, ttl: int | None = None + ) -> None: ... + + def set_if_not_exists(self, key: str, value: Any, ttl: int) -> bool: ... + + def get_many(self, keys: list[str]) -> dict[str, Any]: + """Retrieve multiple keys at once. Default implementation is sequential.""" + return {k: v for k in keys if (v := self.get(k)) is not None} + + def set_many(self, mapping: dict[str, Any], ttl: int = 0) -> None: + """Set multiple keys at once. Default implementation is sequential.""" + for k, v in mapping.items(): + self.set(k, v, ttl) + + +def validate_cache_storage(cache: Any) -> bool: + required_methods = [ + "get", + "set", + "delete", + "exists", + "set_if_not_exists", + "get_entry", + "set_entry", + ] + return all( + hasattr(cache, m) and callable(getattr(cache, m)) for m in required_methods + ) diff --git a/tests/test_bg_writer_reader.py b/tests/test_bg_writer_reader.py new file mode 100644 index 0000000..a4aa801 --- /dev/null +++ b/tests/test_bg_writer_reader.py @@ -0,0 +1,202 @@ +import asyncio +import time +import pytest + +from advanced_caching import BGCache, InMemCache + + +@pytest.mark.asyncio +async def test_single_writer_multi_reader_async_with_fallback(): + calls = {"n": 0} + + shared_cache = InMemCache() + + @BGCache.register_writer( + "shared", interval_seconds=0.01, run_immediately=True, cache=shared_cache + ) + async def writer(): + calls["n"] += 1 + return {"value": calls["n"]} + + reader_cache = shared_cache + + reader_a = BGCache.get_reader( + "shared", + interval_seconds=0.01, + ttl=None, + run_immediately=True, + cache=reader_cache, + ) + reader_b = BGCache.get_reader( + "shared", + interval_seconds=0.01, + ttl=None, + run_immediately=True, + cache=reader_cache, + ) + + async def wait_for_value(reader, timeout=0.2): + start = asyncio.get_event_loop().time() + while asyncio.get_event_loop().time() - start < timeout: + val = reader() + if val is not None: + return val + await asyncio.sleep(0.01) + return None + + v1 = await wait_for_value(reader_a) + v2 = await wait_for_value(reader_b) + assert v1 == v2 + assert v1 is not None and v1.get("value", 0) >= 1 + + await asyncio.sleep(0.05) + v3 = await wait_for_value(reader_a) + assert v3 is not None and v3.get("value", 0) >= v1.get("value", 0) + + BGCache.shutdown() + + +@pytest.mark.asyncio +async def test_reader_without_fallback_returns_none(): + reader = BGCache.get_reader( + "missing", interval_seconds=0, ttl=0, run_immediately=False + ) + assert reader() is None + BGCache.shutdown() + + +def test_single_writer_enforced_sync(): + @BGCache.register_writer( + "only_one", interval_seconds=0.01, run_immediately=False, cache=InMemCache() + ) + def writer(): + return 1 + + with pytest.raises(ValueError): + + @BGCache.register_writer("only_one", interval_seconds=0.01) + def writer2(): + return 2 + + BGCache.shutdown() + + +@pytest.mark.asyncio +async def test_sync_writer_async_reader_fallback_runs_in_executor(): + calls = {"n": 0} + + shared_cache = InMemCache() + + @BGCache.register_writer( + "mix", interval_seconds=0.01, ttl=1, run_immediately=False, cache=shared_cache + ) + def writer_sync(): + calls["n"] += 1 + return calls["n"] + + reader_async = BGCache.get_reader( + "mix", + interval_seconds=0.01, + ttl=1, + run_immediately=False, + cache=shared_cache, + ) + + # First call triggers load_once pull from source cache (which is empty at start) + assert reader_async() is None + # Populate source via writer + _ = writer_sync() + await asyncio.sleep(0.05) + + # Reader should eventually see the value after writer populates source cache. + async def wait_for_value(reader, timeout=0.5): + start = asyncio.get_event_loop().time() + while asyncio.get_event_loop().time() - start < timeout: + val = reader() + if val is not None: + return val + await asyncio.sleep(0.01) + return None + + assert await wait_for_value(reader_async) is not None + + BGCache.shutdown() + + +@pytest.mark.asyncio +async def test_e2e_async_writer_reader_background_refresh(): + shared_cache = InMemCache() + calls = {"n": 0} + + @BGCache.register_writer( + "bg_async", + interval_seconds=0.05, + run_immediately=True, + cache=shared_cache, + ) + async def writer_async(): + calls["n"] += 1 + return {"count": calls["n"]} + + reader = BGCache.get_reader( + "bg_async", + interval_seconds=0.05, + ttl=None, + run_immediately=True, + cache=shared_cache, + ) + + async def wait_for_value(reader, min_count, timeout=0.5): + start = asyncio.get_event_loop().time() + while asyncio.get_event_loop().time() - start < timeout: + val = reader() + if val is not None and val.get("count", 0) >= min_count: + return val + await asyncio.sleep(0.02) + return None + + first = await wait_for_value(reader, 1) + assert first is not None and first.get("count", 0) >= 1 + + updated = await wait_for_value(reader, 2) + assert updated is not None and updated.get("count", 0) >= 2 + + BGCache.shutdown() + + +def test_e2e_sync_writer_reader_background_refresh(): + shared_cache = InMemCache() + calls = {"n": 0} + + @BGCache.register_writer( + "bg_sync", + interval_seconds=0.05, + run_immediately=True, + cache=shared_cache, + ) + def writer_sync(): + calls["n"] += 1 + return {"count": calls["n"]} + + reader = BGCache.get_reader( + "bg_sync", + interval_seconds=0.05, + ttl=None, + run_immediately=True, + cache=shared_cache, + ) + + def wait_for_value(reader_fn, min_count, timeout=0.5): + start = time.time() + while time.time() - start < timeout: + val = reader_fn() + if val is not None and val.get("count", 0) >= min_count: + return val + time.sleep(0.02) + return None + + first = wait_for_value(reader, 1) + assert first is not None and first.get("count", 0) >= 1 + + updated = wait_for_value(reader, 2) + assert updated is not None and updated.get("count", 0) >= 2 diff --git a/tests/test_gcs_cache_integration.py b/tests/test_gcs_cache_integration.py new file mode 100644 index 0000000..f05e377 --- /dev/null +++ b/tests/test_gcs_cache_integration.py @@ -0,0 +1,166 @@ +import os +import time +import pytest + +try: + from google.cloud import storage +except ImportError: # pragma: no cover + storage = None + +from advanced_caching import GCSCache, TTLCache, SWRCache, ChainCache, InMemCache + +EMULATOR = os.getenv("STORAGE_EMULATOR_HOST") or "http://localhost:4443" +USE_EMULATOR = bool(EMULATOR) + +pytestmark = pytest.mark.skipif( + storage is None, reason="google-cloud-storage not installed" +) + + +def _client(): + if storage is None: + return None + if USE_EMULATOR: + os.environ.setdefault("STORAGE_EMULATOR_HOST", EMULATOR) + return storage.Client.create_anonymous_client() + return storage.Client() + + +@pytest.mark.integration +@pytest.mark.skipif(storage is None, reason="gcs client missing") +def test_gcscache_set_get_and_dedupe(): + client = _client() + bucket = client.bucket("test-bkt") + bucket.storage_class = "STANDARD" + try: + client.create_bucket(bucket) + except Exception: + pass + + cache = GCSCache( + bucket="test-bkt", + prefix="t/", + client=client, + serializer="json", + dedupe_writes=True, + ) + cache.set("k1", {"v": 1}, ttl=0) + assert cache.get("k1") == {"v": 1} + + cache.set("k1", {"v": 1}, ttl=0) # dedupe should skip rewrite + cache.set("k1", {"v": 2}, ttl=0) + assert cache.get("k1") == {"v": 2} + + +@pytest.mark.integration +@pytest.mark.skipif(storage is None, reason="gcs client missing") +def test_ttlcache_with_gcscache_decorator(): + client = _client() + bucket = client.bucket("test-bkt2") + bucket.storage_class = "STANDARD" + try: + client.create_bucket(bucket) + except Exception: + pass + + cache = GCSCache( + bucket="test-bkt2", + prefix="u/", + client=client, + serializer="json", + dedupe_writes=False, + compress=False, + ) + + calls = {"n": 0} + + @TTLCache.cached("user:{user_id}", ttl=0.2, cache=cache) + def fetch_user(user_id: int): + calls["n"] += 1 + return {"id": user_id, "n": calls["n"]} + + first = fetch_user(1) + second = fetch_user(1) + assert first == second == {"id": 1, "n": 1} + + time.sleep(1.0) + # Force delete to ensure cache miss if TTL/time drift is an issue + cache.delete("user:1") + third = fetch_user(1) + # TTL expired, should recompute + assert third["n"] >= 2 + + +@pytest.mark.integration +@pytest.mark.skipif(storage is None, reason="gcs client missing") +def test_swrcache_with_gcscache_decorator(): + client = _client() + bucket = client.bucket("test-bkt-swr") + try: + client.create_bucket(bucket) + except Exception: + pass + cache = GCSCache( + bucket="test-bkt-swr", prefix="swr/", client=client, serializer="json" + ) + + calls = {"n": 0} + + @SWRCache.cached("data:{id}", ttl=0.5, stale_ttl=1.0, cache=cache) + def fetch_data(id: int): + calls["n"] += 1 + return {"id": id, "n": calls["n"]} + + # 1. Initial fetch + v1 = fetch_data(1) + assert v1["n"] == 1 + + # 2. Fresh hit + v2 = fetch_data(1) + assert v2["n"] == 1 + + # 3. Wait for TTL to expire but within stale_ttl + time.sleep(0.6) + # Should return stale value immediately, trigger background refresh + v3 = fetch_data(1) + assert v3["n"] == 1 + + # 4. Wait for background refresh to complete + time.sleep(1.0) + # Next call should get the refreshed value + v4 = fetch_data(1) + assert v4["n"] >= 2 + + +@pytest.mark.integration +@pytest.mark.skipif(storage is None, reason="gcs client missing") +def test_chaincache_with_gcscache(): + client = _client() + bucket = client.bucket("test-bkt-chain") + try: + client.create_bucket(bucket) + except Exception: + pass + + gcs_cache = GCSCache( + bucket="test-bkt-chain", prefix="chain/", client=client, serializer="json" + ) + # L1: InMem (fast), L2: GCS (durable) + chain = ChainCache([(InMemCache(), 0.1), (gcs_cache, 1.0)]) + + # Set in chain (propagates to both) + chain.set("key1", "value1", ttl=1.0) + + # Verify L1 has it + assert chain.levels[0][0].get("key1") == "value1" + # Verify L2 (GCS) has it + assert gcs_cache.get("key1") == "value1" + + # Clear L1 to force promotion from L2 + chain.levels[0][0].clear() + assert chain.levels[0][0].get("key1") is None + + # Get from chain -> should fetch from GCS and repopulate L1 + val = chain.get("key1") + assert val == "value1" + assert chain.levels[0][0].get("key1") == "value1" diff --git a/tests/test_local_file_cache.py b/tests/test_local_file_cache.py new file mode 100644 index 0000000..2e563e1 --- /dev/null +++ b/tests/test_local_file_cache.py @@ -0,0 +1,84 @@ +import os +import time +import tempfile + +from advanced_caching import LocalFileCache + + +def test_local_file_cache_set_get_and_expiry(): + with tempfile.TemporaryDirectory() as tmpdir: + cache = LocalFileCache(tmpdir) + cache.set("foo", "bar", ttl=0.1) + assert cache.get("foo") == "bar" + time.sleep(0.2) + assert cache.get("foo") is None + + +def test_local_file_cache_dedupe_writes(): + with tempfile.TemporaryDirectory() as tmpdir: + cache = LocalFileCache(tmpdir, dedupe_writes=True) + cache.set("foo", {"a": 1}, ttl=0) + mtime1 = os.path.getmtime(os.path.join(tmpdir, "foo")) + time.sleep(0.05) + cache.set("foo", {"a": 1}, ttl=0) + mtime2 = os.path.getmtime(os.path.join(tmpdir, "foo")) + # Allow filesystem timestamp granularity drift; ensure dedupe prevented meaningful rewrite + assert cache.get("foo") == {"a": 1} + assert mtime2 <= mtime1 + 0.1 + cache.set("foo", {"a": 2}, ttl=0) + mtime3 = os.path.getmtime(os.path.join(tmpdir, "foo")) + assert mtime3 > mtime2 + + +def test_ttlcache_with_local_file_cache_decorator(): + from advanced_caching import TTLCache + + calls = {"n": 0} + with tempfile.TemporaryDirectory() as tmpdir: + cache = LocalFileCache(tmpdir) + + @TTLCache.cached("demo", ttl=0.2, cache=cache) + def compute(): + calls["n"] += 1 + return calls["n"] + + first = compute() + second = compute() + assert first == second == 1 # served from cache + time.sleep(0.25) + third = compute() + assert third == 2 # cache expired, recomputed + + +def test_chaincache_with_local_file_and_ttlcache(): + from advanced_caching import ChainCache, InMemCache, TTLCache + + calls = {"n": 0} + with tempfile.TemporaryDirectory() as tmpdir: + l1 = InMemCache() + l2 = LocalFileCache(tmpdir) + chain = ChainCache([(l1, 0), (l2, None)]) + + @TTLCache.cached("chain:{user_id}", ttl=0.2, cache=chain) + def fetch_user(user_id: int): + calls["n"] += 1 + return {"id": user_id, "v": calls["n"]} + + # First call populates chain (both L1 and file) + u1 = fetch_user(1) + assert u1 == {"id": 1, "v": 1} + + # L1 hit + u2 = fetch_user(1) + assert u2 == u1 + + # Clear L1 by recreating chain with fresh InMem but same file backend + l1b = InMemCache() + chain2 = ChainCache([(l1b, 0), (l2, None)]) + + @TTLCache.cached("chain:{user_id}", ttl=0.2, cache=chain2) + def fetch_user_again(user_id: int): + return fetch_user(user_id) # will hit file backend via chain2 + + u3 = fetch_user_again(1) + assert u3 == u1 # pulled from LocalFileCache via chain diff --git a/tests/test_s3_cache_integration.py b/tests/test_s3_cache_integration.py new file mode 100644 index 0000000..2318b2d --- /dev/null +++ b/tests/test_s3_cache_integration.py @@ -0,0 +1,171 @@ +import os +import time +import pytest + +try: + import boto3 +except ImportError: # pragma: no cover + boto3 = None + +try: + from moto import mock_aws +except ImportError: # pragma: no cover + mock_aws = None + +from advanced_caching import S3Cache, TTLCache, SWRCache, ChainCache, InMemCache + +S3_ENDPOINT = os.getenv("S3_ENDPOINT_URL") +USE_REAL_S3 = bool(S3_ENDPOINT) + +pytestmark = pytest.mark.skipif(boto3 is None, reason="boto3 not installed") + + +def _maybe_mock(fn): + if USE_REAL_S3 or mock_aws is None: + return fn + return mock_aws(fn) + + +def _client(): + if not boto3: + return None + kwargs = {"region_name": os.getenv("AWS_REGION", "us-east-1")} + if USE_REAL_S3: + kwargs.update( + endpoint_url=S3_ENDPOINT, + aws_access_key_id=os.getenv("AWS_ACCESS_KEY_ID", "test"), + aws_secret_access_key=os.getenv("AWS_SECRET_ACCESS_KEY", "test"), + ) + return boto3.client("s3", **kwargs) + + +@_maybe_mock +@pytest.mark.parametrize("dedupe", [True]) +def test_s3cache_set_get_and_dedupe(dedupe): + client = _client() + try: + client.create_bucket(Bucket="test-bkt") + except Exception: + pass + cache = S3Cache( + bucket="test-bkt", + prefix="t/", + s3_client=client, + serializer="json", + dedupe_writes=dedupe, + ) + + cache.set("k1", {"v": 1}, ttl=0) + assert cache.get("k1") == {"v": 1} + + cache.set("k1", {"v": 1}, ttl=0) + assert cache.get("k1") == {"v": 1} + + cache.set("k1", {"v": 2}, ttl=0) + assert cache.get("k1") == {"v": 2} + + +@_maybe_mock +def test_ttlcache_with_s3cache_decorator(): + client = _client() + try: + client.create_bucket(Bucket="test-bkt") + except Exception: + pass + cache = S3Cache( + bucket="test-bkt", + prefix="u/", + s3_client=client, + serializer="json", + dedupe_writes=False, + compress=False, + ) + + calls = {"n": 0} + + @TTLCache.cached("user:{user_id}", ttl=0.2, cache=cache) + def fetch_user(user_id: int): + calls["n"] += 1 + return {"id": user_id, "n": calls["n"]} + + first = fetch_user(1) + second = fetch_user(1) + assert first == second == {"id": 1, "n": 1} + + time.sleep(1.0) + # Force delete to ensure cache miss if TTL/time drift is an issue + cache.delete("user:1") + third = fetch_user(1) + # TTL expired, should recompute + assert third["n"] >= 2 + + +@_maybe_mock +def test_swrcache_with_s3cache_decorator(): + client = _client() + try: + client.create_bucket(Bucket="test-bkt-swr") + except Exception: + pass + cache = S3Cache( + bucket="test-bkt-swr", prefix="swr/", s3_client=client, serializer="json" + ) + + calls = {"n": 0} + + @SWRCache.cached("data:{id}", ttl=0.5, stale_ttl=1.0, cache=cache) + def fetch_data(id: int): + calls["n"] += 1 + return {"id": id, "n": calls["n"]} + + # 1. Initial fetch + v1 = fetch_data(1) + assert v1["n"] == 1 + + # 2. Fresh hit + v2 = fetch_data(1) + assert v2["n"] == 1 + + # 3. Wait for TTL to expire but within stale_ttl + time.sleep(0.6) + # Should return stale value immediately, trigger background refresh + v3 = fetch_data(1) + assert v3["n"] == 1 + + # 4. Wait for background refresh to complete + time.sleep(1.0) + # Next call should get the refreshed value + v4 = fetch_data(1) + assert v4["n"] >= 2 + + +@_maybe_mock +def test_chaincache_with_s3cache(): + client = _client() + try: + client.create_bucket(Bucket="test-bkt-chain") + except Exception: + pass + + s3_cache = S3Cache( + bucket="test-bkt-chain", prefix="chain/", s3_client=client, serializer="json" + ) + # L1: InMem (fast), L2: S3 (durable) + chain = ChainCache([(InMemCache(), 0.1), (s3_cache, 1.0)]) + + # Set in chain (propagates to both) + chain.set("key1", "value1", ttl=1.0) + + # Verify L1 has it + assert chain.levels[0][0].get("key1") == "value1" + # Verify L2 (S3) has it + assert s3_cache.get("key1") == "value1" + + # Clear L1 to force promotion from L2 + chain.levels[0][0].clear() + assert chain.levels[0][0].get("key1") is None + + # Get from chain -> should fetch from S3 and repopulate L1 + val = chain.get("key1") + assert val == "value1" + assert chain.levels[0][0].get("key1") == "value1" diff --git a/uv.lock b/uv.lock index 5641f7e..49a1b6c 100644 --- a/uv.lock +++ b/uv.lock @@ -2,13 +2,14 @@ version = 1 revision = 1 requires-python = ">=3.10" resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.13'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version < '3.11'", ] [[package]] name = "advanced-caching" -version = "0.2.0" +version = "0.2.2" source = { editable = "." } dependencies = [ { name = "apscheduler" }, @@ -23,6 +24,17 @@ dev = [ redis = [ { name = "redis" }, ] +tests = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, +] +tests-gcs = [ + { name = "google-cloud-storage" }, +] +tests-s3 = [ + { name = "moto" }, +] [package.dev-dependencies] dev = [ @@ -37,12 +49,17 @@ dev = [ [package.metadata] requires-dist = [ { name = "apscheduler", specifier = ">=3.10" }, + { name = "google-cloud-storage", marker = "extra == 'tests-gcs'", specifier = ">=2.10.0" }, + { name = "moto", extras = ["boto3"], marker = "extra == 'tests-s3'", specifier = ">=5.0.0" }, { name = "orjson", specifier = ">=3.11.5" }, { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.2" }, + { name = "pytest", marker = "extra == 'tests'" }, + { name = "pytest-asyncio", marker = "extra == 'tests'" }, { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.0" }, - { name = "redis", marker = "extra == 'redis'", specifier = ">=5.0" }, + { name = "pytest-cov", marker = "extra == 'tests'" }, + { name = "redis", marker = "extra == 'redis'", specifier = ">=5.0.0" }, ] -provides-extras = ["redis", "dev"] +provides-extras = ["redis", "dev", "tests", "tests-s3", "tests-gcs"] [package.metadata.requires-dev] dev = [ @@ -93,6 +110,43 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313 }, ] +[[package]] +name = "boto3" +version = "1.42.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/37/22c82e9d314d769a6eaf600ce5e08357927b5c6a614bfbeb1e7b7e7aa036/boto3-1.42.16.tar.gz", hash = "sha256:811391611db88c8a061f6e6fabbd7ca784ad9de04490a879f091cbaa9de7de74", size = 112834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/93/78d175e7d40941c4b608a6701a14215aeeb1db58499dbbc40467a6fd6116/boto3-1.42.16-py3-none-any.whl", hash = "sha256:37a43d42aebd06a8f93ee801ea1b7b5181ac42a30869ef403c9dadc160a748e5", size = 140574 }, +] + +[[package]] +name = "botocore" +version = "1.42.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/eb/d91fb1fb288ba896392d68f89881f5f26bc5b51f8da28697c77f05bc44e8/botocore-1.42.16.tar.gz", hash = "sha256:29ee8555cd5d5023350405387cedcf3fe1c7f02fcb8060bf9e01602487482c25", size = 14914600 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/98/c7c26ff399994e2b1119cc36027aaae46b9d646a49b70a82c2622e44c94b/botocore-1.42.16-py3-none-any.whl", hash = "sha256:b1f584a0f8645c12e07bf6ec9c18e05221a789f2a9b2d3c6291deb42f8c1c542", size = 14585775 }, +] + +[[package]] +name = "cachetools" +version = "6.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551 }, +] + [[package]] name = "certifi" version = "2025.11.12" @@ -102,6 +156,88 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438 }, ] +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283 }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504 }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811 }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402 }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217 }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079 }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475 }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829 }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211 }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036 }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184 }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790 }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344 }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560 }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613 }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476 }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374 }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597 }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574 }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971 }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972 }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078 }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076 }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820 }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635 }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271 }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048 }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529 }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097 }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983 }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519 }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572 }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963 }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361 }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932 }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557 }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762 }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230 }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043 }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446 }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101 }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948 }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422 }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499 }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928 }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302 }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909 }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402 }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780 }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320 }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487 }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049 }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793 }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300 }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244 }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828 }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926 }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328 }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650 }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687 }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773 }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013 }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593 }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354 }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480 }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584 }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443 }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437 }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487 }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726 }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195 }, +] + [[package]] name = "charset-normalizer" version = "3.4.4" @@ -313,6 +449,71 @@ toml = [ { name = "tomli", marker = "python_full_version <= '3.11'" }, ] +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004 }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667 }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807 }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615 }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800 }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707 }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541 }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464 }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838 }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596 }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782 }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381 }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988 }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451 }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007 }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012 }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728 }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078 }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460 }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237 }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344 }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564 }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415 }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457 }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074 }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569 }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941 }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339 }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315 }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331 }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248 }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089 }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029 }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222 }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280 }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958 }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714 }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970 }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236 }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642 }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126 }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573 }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695 }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720 }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740 }, + { url = "https://files.pythonhosted.org/packages/d9/cd/1a8633802d766a0fa46f382a77e096d7e209e0817892929655fe0586ae32/cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32", size = 3689163 }, + { url = "https://files.pythonhosted.org/packages/4c/59/6b26512964ace6480c3e54681a9859c974172fb141c38df11eadd8416947/cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c", size = 3429474 }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132 }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992 }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944 }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957 }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447 }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528 }, +] + [[package]] name = "docker" version = "7.1.0" @@ -339,6 +540,125 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740 }, ] +[[package]] +name = "google-api-core" +version = "2.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/da/83d7043169ac2c8c7469f0e375610d78ae2160134bf1b80634c482fa079c/google_api_core-2.28.1.tar.gz", hash = "sha256:2b405df02d68e68ce0fbc138559e6036559e685159d148ae5861013dc201baf8", size = 176759 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/d4/90197b416cb61cefd316964fd9e7bd8324bcbafabf40eef14a9f20b81974/google_api_core-2.28.1-py3-none-any.whl", hash = "sha256:4021b0f8ceb77a6fb4de6fde4502cecab45062e66ff4f2895169e0b35bc9466c", size = 173706 }, +] + +[[package]] +name = "google-auth" +version = "2.45.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/00/3c794502a8b892c404b2dea5b3650eb21bfc7069612fbfd15c7f17c1cb0d/google_auth-2.45.0.tar.gz", hash = "sha256:90d3f41b6b72ea72dd9811e765699ee491ab24139f34ebf1ca2b9cc0c38708f3", size = 320708 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/97/451d55e05487a5cd6279a01a7e34921858b16f7dc8aa38a2c684743cd2b3/google_auth-2.45.0-py2.py3-none-any.whl", hash = "sha256:82344e86dc00410ef5382d99be677c6043d72e502b625aa4f4afa0bdacca0f36", size = 233312 }, +] + +[[package]] +name = "google-cloud-core" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/03/ef0bc99d0e0faf4fdbe67ac445e18cdaa74824fd93cd069e7bb6548cb52d/google_cloud_core-2.5.0.tar.gz", hash = "sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963", size = 36027 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl", hash = "sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc", size = 29469 }, +] + +[[package]] +name = "google-cloud-storage" +version = "3.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-crc32c" }, + { name = "google-resumable-media" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d2/8e/fab2de1a0ab7fdbd452eaae5a9a5c933d0911c26b04efa0c76ddfd921259/google_cloud_storage-3.7.0.tar.gz", hash = "sha256:9ce59c65f4d6e372effcecc0456680a8d73cef4f2dc9212a0704799cb3d69237", size = 17258914 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/80/6e5c7c83cea15ed4dfc4843b9df9db0716bc551ac938f7b5dd18a72bd5e4/google_cloud_storage-3.7.0-py3-none-any.whl", hash = "sha256:469bc9540936e02f8a4bfd1619e9dca1e42dec48f95e4204d783b36476a15093", size = 303364 }, +] + +[[package]] +name = "google-crc32c" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/41/4b9c02f99e4c5fb477122cd5437403b552873f014616ac1d19ac8221a58d/google_crc32c-1.8.0.tar.gz", hash = "sha256:a428e25fb7691024de47fecfbff7ff957214da51eddded0da0ae0e0f03a2cf79", size = 14192 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/ac/6f7bc93886a823ab545948c2dd48143027b2355ad1944c7cf852b338dc91/google_crc32c-1.8.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0470b8c3d73b5f4e3300165498e4cf25221c7eb37f1159e221d1825b6df8a7ff", size = 31296 }, + { url = "https://files.pythonhosted.org/packages/f7/97/a5accde175dee985311d949cfcb1249dcbb290f5ec83c994ea733311948f/google_crc32c-1.8.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:119fcd90c57c89f30040b47c211acee231b25a45d225e3225294386f5d258288", size = 30870 }, + { url = "https://files.pythonhosted.org/packages/3d/63/bec827e70b7a0d4094e7476f863c0dbd6b5f0f1f91d9c9b32b76dcdfeb4e/google_crc32c-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6f35aaffc8ccd81ba3162443fabb920e65b1f20ab1952a31b13173a67811467d", size = 33214 }, + { url = "https://files.pythonhosted.org/packages/63/bc/11b70614df04c289128d782efc084b9035ef8466b3d0a8757c1b6f5cf7ac/google_crc32c-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:864abafe7d6e2c4c66395c1eb0fe12dc891879769b52a3d56499612ca93b6092", size = 33589 }, + { url = "https://files.pythonhosted.org/packages/3e/00/a08a4bc24f1261cc5b0f47312d8aebfbe4b53c2e6307f1b595605eed246b/google_crc32c-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:db3fe8eaf0612fc8b20fa21a5f25bd785bc3cd5be69f8f3412b0ac2ffd49e733", size = 34437 }, + { url = "https://files.pythonhosted.org/packages/5d/ef/21ccfaab3d5078d41efe8612e0ed0bfc9ce22475de074162a91a25f7980d/google_crc32c-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:014a7e68d623e9a4222d663931febc3033c5c7c9730785727de2a81f87d5bab8", size = 31298 }, + { url = "https://files.pythonhosted.org/packages/c5/b8/f8413d3f4b676136e965e764ceedec904fe38ae8de0cdc52a12d8eb1096e/google_crc32c-1.8.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:86cfc00fe45a0ac7359e5214a1704e51a99e757d0272554874f419f79838c5f7", size = 30872 }, + { url = "https://files.pythonhosted.org/packages/f6/fd/33aa4ec62b290477181c55bb1c9302c9698c58c0ce9a6ab4874abc8b0d60/google_crc32c-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:19b40d637a54cb71e0829179f6cb41835f0fbd9e8eb60552152a8b52c36cbe15", size = 33243 }, + { url = "https://files.pythonhosted.org/packages/71/03/4820b3bd99c9653d1a5210cb32f9ba4da9681619b4d35b6a052432df4773/google_crc32c-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:17446feb05abddc187e5441a45971b8394ea4c1b6efd88ab0af393fd9e0a156a", size = 33608 }, + { url = "https://files.pythonhosted.org/packages/7c/43/acf61476a11437bf9733fb2f70599b1ced11ec7ed9ea760fdd9a77d0c619/google_crc32c-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:71734788a88f551fbd6a97be9668a0020698e07b2bf5b3aa26a36c10cdfb27b2", size = 34439 }, + { url = "https://files.pythonhosted.org/packages/e9/5f/7307325b1198b59324c0fa9807cafb551afb65e831699f2ce211ad5c8240/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:4b8286b659c1335172e39563ab0a768b8015e88e08329fa5321f774275fc3113", size = 31300 }, + { url = "https://files.pythonhosted.org/packages/21/8e/58c0d5d86e2220e6a37befe7e6a94dd2f6006044b1a33edf1ff6d9f7e319/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:2a3dc3318507de089c5384cc74d54318401410f82aa65b2d9cdde9d297aca7cb", size = 30867 }, + { url = "https://files.pythonhosted.org/packages/ce/a9/a780cc66f86335a6019f557a8aaca8fbb970728f0efd2430d15ff1beae0e/google_crc32c-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14f87e04d613dfa218d6135e81b78272c3b904e2a7053b841481b38a7d901411", size = 33364 }, + { url = "https://files.pythonhosted.org/packages/21/3f/3457ea803db0198c9aaca2dd373750972ce28a26f00544b6b85088811939/google_crc32c-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb5c869c2923d56cb0c8e6bcdd73c009c36ae39b652dbe46a05eb4ef0ad01454", size = 33740 }, + { url = "https://files.pythonhosted.org/packages/df/c0/87c2073e0c72515bb8733d4eef7b21548e8d189f094b5dad20b0ecaf64f6/google_crc32c-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:3cc0c8912038065eafa603b238abf252e204accab2a704c63b9e14837a854962", size = 34437 }, + { url = "https://files.pythonhosted.org/packages/d1/db/000f15b41724589b0e7bc24bc7a8967898d8d3bc8caf64c513d91ef1f6c0/google_crc32c-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:3ebb04528e83b2634857f43f9bb8ef5b2bbe7f10f140daeb01b58f972d04736b", size = 31297 }, + { url = "https://files.pythonhosted.org/packages/d7/0d/8ebed0c39c53a7e838e2a486da8abb0e52de135f1b376ae2f0b160eb4c1a/google_crc32c-1.8.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:450dc98429d3e33ed2926fc99ee81001928d63460f8538f21a5d6060912a8e27", size = 30867 }, + { url = "https://files.pythonhosted.org/packages/ce/42/b468aec74a0354b34c8cbf748db20d6e350a68a2b0912e128cabee49806c/google_crc32c-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3b9776774b24ba76831609ffbabce8cdf6fa2bd5e9df37b594221c7e333a81fa", size = 33344 }, + { url = "https://files.pythonhosted.org/packages/1c/e8/b33784d6fc77fb5062a8a7854e43e1e618b87d5ddf610a88025e4de6226e/google_crc32c-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:89c17d53d75562edfff86679244830599ee0a48efc216200691de8b02ab6b2b8", size = 33694 }, + { url = "https://files.pythonhosted.org/packages/92/b1/d3cbd4d988afb3d8e4db94ca953df429ed6db7282ed0e700d25e6c7bfc8d/google_crc32c-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:57a50a9035b75643996fbf224d6661e386c7162d1dfdab9bc4ca790947d1007f", size = 34435 }, + { url = "https://files.pythonhosted.org/packages/21/88/8ecf3c2b864a490b9e7010c84fd203ec8cf3b280651106a3a74dd1b0ca72/google_crc32c-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:e6584b12cb06796d285d09e33f63309a09368b9d806a551d8036a4207ea43697", size = 31301 }, + { url = "https://files.pythonhosted.org/packages/36/c6/f7ff6c11f5ca215d9f43d3629163727a272eabc356e5c9b2853df2bfe965/google_crc32c-1.8.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:f4b51844ef67d6cf2e9425983274da75f18b1597bb2c998e1c0a0e8d46f8f651", size = 30868 }, + { url = "https://files.pythonhosted.org/packages/56/15/c25671c7aad70f8179d858c55a6ae8404902abe0cdcf32a29d581792b491/google_crc32c-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b0d1a7afc6e8e4635564ba8aa5c0548e3173e41b6384d7711a9123165f582de2", size = 33381 }, + { url = "https://files.pythonhosted.org/packages/42/fa/f50f51260d7b0ef5d4898af122d8a7ec5a84e2984f676f746445f783705f/google_crc32c-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3f68782f3cbd1bce027e48768293072813469af6a61a86f6bb4977a4380f21", size = 33734 }, + { url = "https://files.pythonhosted.org/packages/08/a5/7b059810934a09fb3ccb657e0843813c1fee1183d3bc2c8041800374aa2c/google_crc32c-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:d511b3153e7011a27ab6ee6bb3a5404a55b994dc1a7322c0b87b29606d9790e2", size = 34878 }, + { url = "https://files.pythonhosted.org/packages/52/c5/c171e4d8c44fec1422d801a6d2e5d7ddabd733eeda505c79730ee9607f07/google_crc32c-1.8.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:87fa445064e7db928226b2e6f0d5304ab4cd0339e664a4e9a25029f384d9bb93", size = 28615 }, + { url = "https://files.pythonhosted.org/packages/9c/97/7d75fe37a7a6ed171a2cf17117177e7aab7e6e0d115858741b41e9dd4254/google_crc32c-1.8.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f639065ea2042d5c034bf258a9f085eaa7af0cd250667c0635a3118e8f92c69c", size = 28800 }, +] + +[[package]] +name = "google-resumable-media" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/d7/520b62a35b23038ff005e334dba3ffc75fcf583bee26723f1fd8fd4b6919/google_resumable_media-2.8.0.tar.gz", hash = "sha256:f1157ed8b46994d60a1bc432544db62352043113684d4e030ee02e77ebe9a1ae", size = 2163265 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl", hash = "sha256:dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582", size = 81340 }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.72.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515 }, +] + [[package]] name = "idna" version = "3.11" @@ -369,6 +689,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, ] +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256 }, +] + [[package]] name = "markdown-it-py" version = "4.0.0" @@ -475,6 +804,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, ] +[[package]] +name = "moto" +version = "5.1.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "boto3" }, + { name = "botocore" }, + { name = "cryptography" }, + { name = "jinja2" }, + { name = "python-dateutil" }, + { name = "requests" }, + { name = "responses" }, + { name = "werkzeug" }, + { name = "xmltodict" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e3/6a/a73bef67261bfab55714390f07c7df97531d00cea730b7c0ace4d0ad7669/moto-5.1.18.tar.gz", hash = "sha256:45298ef7b88561b839f6fe3e9da2a6e2ecd10283c7bf3daf43a07a97465885f9", size = 8271655 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/d4/6991df072b34741a0c115e8d21dc2fe142e4b497319d762e957f6677f001/moto-5.1.18-py3-none-any.whl", hash = "sha256:b65aa8fc9032c5c574415451e14fd7da4e43fd50b8bdcb5f10289ad382c25bcf", size = 6357278 }, +] + [[package]] name = "numpy" version = "2.2.6" @@ -545,7 +894,8 @@ name = "numpy" version = "2.3.5" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.13'", + "python_full_version >= '3.11' and python_full_version < '3.13'", ] sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950 } wheels = [ @@ -732,6 +1082,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, ] +[[package]] +name = "proto-plus" +version = "1.27.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/89/9cbe2f4bba860e149108b683bc2efec21f14d5f7ed6e25562ad86acbc373/proto_plus-1.27.0.tar.gz", hash = "sha256:873af56dd0d7e91836aee871e5799e1c6f1bda86ac9a983e0bb9f0c266a568c4", size = 56158 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/24/3b7a0818484df9c28172857af32c2397b6d8fcd99d9468bd4684f98ebf0a/proto_plus-1.27.0-py3-none-any.whl", hash = "sha256:1baa7f81cf0f8acb8bc1f6d085008ba4171eaf669629d1b6d1673b21ed1c0a82", size = 50205 }, +] + +[[package]] +name = "protobuf" +version = "6.33.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603 }, + { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930 }, + { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621 }, + { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460 }, + { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168 }, + { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270 }, + { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501 }, +] + [[package]] name = "psutil" version = "7.1.3" @@ -758,6 +1135,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608 }, ] +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259 }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140 }, +] + [[package]] name = "pydantic" version = "2.12.5" @@ -946,6 +1353,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424 }, ] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + [[package]] name = "python-dotenv" version = "1.2.1" @@ -977,6 +1396,70 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540 }, ] +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227 }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019 }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646 }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793 }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293 }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872 }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828 }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415 }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561 }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826 }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577 }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556 }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114 }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638 }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463 }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986 }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543 }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763 }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063 }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973 }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116 }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011 }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870 }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089 }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181 }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658 }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003 }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344 }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814 }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809 }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454 }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355 }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175 }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228 }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194 }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429 }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912 }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108 }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641 }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901 }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132 }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261 }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272 }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923 }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062 }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 }, +] + [[package]] name = "redis" version = "7.1.0" @@ -1004,6 +1487,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, ] +[[package]] +name = "responses" +version = "0.25.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/95/89c054ad70bfef6da605338b009b2e283485835351a9935c7bfbfaca7ffc/responses-0.25.8.tar.gz", hash = "sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4", size = 79320 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/4c/cc276ce57e572c102d9542d383b2cfd551276581dc60004cb94fe8774c11/responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c", size = 34769 }, +] + [[package]] name = "rich" version = "14.2.0" @@ -1017,6 +1514,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393 }, ] +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696 }, +] + [[package]] name = "ruff" version = "0.14.9" @@ -1043,6 +1552,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/26/09/7a9520315decd2334afa65ed258fed438f070e31f05a2e43dd480a5e5911/ruff-0.14.9-py3-none-win_arm64.whl", hash = "sha256:8e821c366517a074046d92f0e9213ed1c13dbc5b37a7fc20b07f79b64d62cc84", size = 13744730 }, ] +[[package]] +name = "s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830 }, +] + [[package]] name = "scalene" version = "1.5.55" @@ -1076,6 +1597,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/5d/c620fd816a05b979cb5b61c8c18128e2136214a0e50b755231dfd4f4f0b4/scalene-1.5.55-cp313-cp313-win_amd64.whl", hash = "sha256:57daf3072f88e7fdda3bc94d0e75f30733268f033fed76f1b909c59315926634", size = 1025323 }, ] +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + [[package]] name = "testcontainers" version = "4.13.3" @@ -1197,6 +1727,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182 }, ] +[[package]] +name = "werkzeug" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960 }, +] + [[package]] name = "wrapt" version = "2.0.1" @@ -1289,3 +1831,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/99/8a06b8e17dddbf321325ae4eb12465804120f699cd1b8a355718300c62da/wrapt-2.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:35cdbd478607036fee40273be8ed54a451f5f23121bd9d4be515158f9498f7ad", size = 60634 }, { url = "https://files.pythonhosted.org/packages/15/d1/b51471c11592ff9c012bd3e2f7334a6ff2f42a7aed2caffcf0bdddc9cb89/wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca", size = 44046 }, ] + +[[package]] +name = "xmltodict" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893 }, +]