From 4ce3d88313f5ec75a111cd58fd27a377d64db5f8 Mon Sep 17 00:00:00 2001 From: Nightknight3000 Date: Tue, 21 Apr 2026 19:13:31 +0200 Subject: [PATCH 1/4] Staging Release (#45) * feat: propagate partner node statuses and log stop endpoint * feat: implement internal po logging * feat: add JSON formatter for structured logging in po_logging.py * fix: add delay in status loop to prevent busy waiting * test: add unit tests * fix: use environment variable for NGINX image version with a default fallback * fix: enhance code base * docs: enhance docstrings across multiple modules for clarity and completeness * feat: bump po version to 0.5.0 --------- Co-authored-by: davidhieber Co-authored-by: Nightknight3000 --- README.md | 162 ++- poetry.lock | 1291 +++++++++++++---------- pyproject.toml | 19 +- src/api/api.py | 236 ++++- src/api/oauth.py | 37 +- src/k8s/kubernetes.py | 261 ++++- src/k8s/utils.py | 92 +- src/main.py | 29 +- src/resources/analysis/entity.py | 45 +- src/resources/database/db_models.py | 7 + src/resources/database/entity.py | 89 +- src/resources/log/entity.py | 39 +- src/resources/utils.py | 200 +++- src/status/constants.py | 7 + src/status/status.py | 253 +++-- src/utils/hub_client.py | 119 ++- src/utils/other.py | 74 +- src/utils/po_logging.py | 75 ++ src/utils/token.py | 38 +- tests/conftest.py | 305 ++++++ tests/test_api_api.py | 310 ++++++ tests/test_api_oauth.py | 48 + tests/test_database_entity.py | 355 +++++++ tests/test_database_models.py | 169 +++ tests/test_k8s_kubernetes.py | 420 ++++++++ tests/test_k8s_utils.py | 287 +++++ tests/test_main.py | 85 ++ tests/test_resources_analysis_entity.py | 321 ++++++ tests/test_resources_log_entity.py | 224 ++++ tests/test_resources_utils.py | 696 ++++++++++++ tests/test_status_constants.py | 67 ++ tests/test_status_status.py | 360 +++++++ tests/test_utils_hub_client.py | 370 +++++++ tests/test_utils_other.py | 110 ++ tests/test_utils_po_logging.py | 225 ++++ tests/test_utils_token.py | 303 ++++++ 36 files changed, 6859 insertions(+), 869 deletions(-) create mode 100644 src/utils/po_logging.py create mode 100644 tests/conftest.py create mode 100644 tests/test_api_api.py create mode 100644 tests/test_api_oauth.py create mode 100644 tests/test_database_entity.py create mode 100644 tests/test_database_models.py create mode 100644 tests/test_k8s_kubernetes.py create mode 100644 tests/test_k8s_utils.py create mode 100644 tests/test_main.py create mode 100644 tests/test_resources_analysis_entity.py create mode 100644 tests/test_resources_log_entity.py create mode 100644 tests/test_resources_utils.py create mode 100644 tests/test_status_constants.py create mode 100644 tests/test_status_status.py create mode 100644 tests/test_utils_hub_client.py create mode 100644 tests/test_utils_other.py create mode 100644 tests/test_utils_po_logging.py create mode 100644 tests/test_utils_token.py diff --git a/README.md b/README.md index 65d2632..8df7798 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,162 @@ # PrivateAIM Node Pod Orchestration -Given an analysis image, pods should be able to be created, executed, monitored and queried. The pod must receive a token which it can use to access node-side services (result submission, analysis meta information, data source access). Isolation (e.g. Token generation on pod spawn) +Kubernetes-based orchestration service for the [FLAME](https://privateaim.de) (Federated Learning in a Medical Ecosystem) platform. It manages the full lifecycle of analysis pods within a FLAME node: creation, execution, monitoring, cleanup, and archival. + +Each analysis runs in a dedicated Kubernetes deployment with an nginx reverse-proxy sidecar that provides secure, token-authenticated egress to node-side services (FLAME Hub, result store, data sources). + +## Features + +- REST API to create, stop, delete, and inspect analysis pods +- Per-analysis Kubernetes `Deployment`, `Service`, `NetworkPolicy`, `ConfigMap`, and Harbor pull secret +- Background status loop that syncs pod state with the FLAME Hub and auto-restarts stuck pods (up to 10 retries) +- Archival of completed analyses to a separate database table +- Structured JSON logging with custom log levels (`ACTION`, `STATUS_LOOP`) +- Keycloak OAuth2 / JWT authentication on all endpoints except `/po/healthz` + +## Tech Stack + +| Component | Choice | +|------------------|-------------------------------------------| +| Language | Python 3.10+ | +| Web framework | FastAPI + Uvicorn (port 8000) | +| Orchestration | Kubernetes Python client | +| Database | PostgreSQL + SQLAlchemy 2.0 | +| Auth | Keycloak (OAuth2 / JWT) | +| Package manager | Poetry | +| Lint / format | Ruff (via pre-commit) | +| Container base | `python:3.11-alpine` (non-root UID 10000) | ## Requirements -kubernetes -docker -## Installation -### Kubernetes +- Kubernetes cluster with RBAC to manage `Deployment`, `Service`, `NetworkPolicy`, `ConfigMap`, and `Secret` resources +- PostgreSQL database +- Keycloak realm with a configured client +- Access to a [FLAME Hub](https://github.com/PrivateAIM/hub) instance +- Harbor registry for analysis images +- Docker (for local image builds) + +## Quick Start + +### Local development + +```bash +# Install dependencies +poetry install + +# Copy and fill in environment variables +cp .env.template .env + +# Run the service +poetry run python -m src.main +``` + +The API is then available at `http://localhost:8000/po/` with Swagger docs at `http://localhost:8000/api/docs`. + +### Docker + +```bash +docker build -t node-pod-orchestration:latest . +docker run --env-file .env -p 8000:8000 node-pod-orchestration:latest +``` + +### Tests & linting + +```bash +pytest +poetry run ruff check --fix src/ +poetry run ruff format src/ +``` + +## Architecture + +Two threads are started at boot: + +1. **FastAPI server** — exposes the REST API under `/po`. +2. **Status monitoring loop** — periodically reconciles pod state with the FLAME Hub and the Kubernetes cluster. + +Analyses move through the following states: + +``` +STARTING → STARTED → EXECUTING → EXECUTED | STOPPED | FAILED + ↘ STUCK (transient, auto-restart up to 10x) +``` + +Deployments are named `analysis-{analysis_id}-{restart_counter}`. + +## API + +Base path: `/po` — all endpoints require a valid Keycloak bearer token except `GET /po/healthz`. + +| Method | Path | Purpose | +|--------|--------------------------------|------------------------------------| +| POST | `/po/` | Create a new analysis | +| GET | `/po/history` · `/po/history/{id}` | Analysis history | +| GET | `/po/logs` · `/po/logs/{id}` | Analysis logs | +| POST | `/po/stream_logs` | Stream live pod logs | +| GET | `/po/status` · `/po/status/{id}` | Status and progress | +| GET | `/po/pods` · `/po/pods/{id}` | Raw pod info | +| PUT | `/po/stop` · `/po/stop/{id}` | Stop analyses | +| DELETE | `/po/delete` · `/po/delete/{id}` | Delete analyses | +| DELETE | `/po/cleanup/{cleanup_type}` | Bulk cleanup by type | +| GET | `/po/healthz` | Liveness probe (no auth) | + +Interactive docs: `/api/docs` (Swagger), `/api/redoc` (ReDoc). + +## Configuration + +Configuration is supplied via environment variables. See `.env.template` for the full list. + +### Required + +| Variable | Description | +|----------|-------------| +| `POSTGRES_HOST`, `POSTGRES_USER`, `POSTGRES_PASSWORD`, `POSTGRES_DB` | PostgreSQL connection | +| `KEYCLOAK_URL`, `KEYCLOAK_REALM` | Keycloak instance | +| `RESULT_CLIENT_ID`, `RESULT_CLIENT_SECRET` | Result-service OAuth client | +| `HUB_CLIENT_ID`, `HUB_CLIENT_SECRET`, `HUB_URL_CORE`, `HUB_URL_AUTH` | FLAME Hub access | +| `HARBOR_URL`, `HARBOR_USER`, `HARBOR_PW` | Harbor registry for analysis images | +| `NODE_NAME` | Logical node identifier | + +### Optional + +| Variable | Description | +|----------|-------------| +| `NODE_KEY`, `NODE_KEY_PW` | Node private key (path + passphrase) | +| `PO_HTTP_PROXY`, `PO_HTTPS_PROXY` | Outbound proxy | +| `HUB_LOGGING` | Enable Hub client logging | +| `EXTRA_CA_CERTS` | Additional CA bundle path | +| `STATUS_LOOP_INTERVAL` | Status-loop interval in seconds | + +## Project Layout + +``` +src/ +├── main.py # Entry point: config load + API and status threads +├── api/ +│ ├── api.py # FastAPI app + all REST endpoints +│ └── oauth.py # Keycloak JWT validation +├── k8s/ +│ ├── kubernetes.py # K8s resource creation +│ └── utils.py # K8s lookup and deletion +├── resources/ +│ ├── database/ # SQLAlchemy models + CRUD wrapper +│ ├── analysis/ # Analysis Pydantic models +│ ├── log/ # Log entity models +│ └── utils.py # Analysis lifecycle business logic +├── status/ +│ ├── status.py # Background status loop +│ └── constants.py # Status enums and timeouts +└── utils/ # Logging, tokens, Hub client, helpers +tests/ # Pytest suite (see tests/TEST_PLAN.md) +``` + +## Development Conventions + +- **Commits:** Conventional Commits (enforced by pre-commit). +- **Hooks:** ruff check/format, trailing whitespace, large-file check, YAML/TOML validation. +- **Logs:** Single JSON object per line via `src/utils/po_logging.py`; use `get_logger()` in new modules. +- **CI/CD:** GitHub Actions builds and pushes the image to GHCR on push to `main`, `canary`, and `new_hub`. + +## License + +Apache 2.0 — see [LICENSE](LICENSE). \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 79d5a3b..bd4743c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,12 +1,24 @@ -# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. + +[[package]] +name = "annotated-doc" +version = "0.0.4" +description = "Document parameters, class attributes, return types, and variables inline, with Annotated." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"}, + {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"}, +] [[package]] name = "annotated-types" version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" +category = "main" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -16,9 +28,9 @@ files = [ name = "anyio" version = "4.13.0" description = "High-level concurrency and networking framework on top of asyncio or Trio" +category = "main" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ {file = "anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708"}, {file = "anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc"}, @@ -36,9 +48,9 @@ trio = ["trio (>=0.32.0)"] name = "certifi" version = "2026.2.25" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa"}, {file = "certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7"}, @@ -48,10 +60,9 @@ files = [ name = "cffi" version = "2.0.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.9" -groups = ["main"] -markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, @@ -146,9 +157,9 @@ pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} name = "cfgv" version = "3.5.0" description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.10" -groups = ["dev"] files = [ {file = "cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0"}, {file = "cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132"}, @@ -158,9 +169,9 @@ files = [ name = "charset-normalizer" version = "3.4.7" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "charset_normalizer-3.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cdd68a1fb318e290a2077696b7eb7a21a49163c455979c639bf5a5dcdc46617d"}, {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e17b8d5d6a8c47c85e68ca8379def1303fd360c3e22093a807cd34a71cd082b8"}, @@ -295,14 +306,14 @@ files = [ [[package]] name = "click" -version = "8.3.1" +version = "8.3.2" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ - {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, - {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, + {file = "click-8.3.2-py3-none-any.whl", hash = "sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d"}, + {file = "click-8.3.2.tar.gz", hash = "sha256:14162b8b3b3550a7d479eafa77dfd3c38d9dc8951f6f69c78913a8f9a7540fd5"}, ] [package.dependencies] @@ -312,82 +323,213 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} + +[[package]] +name = "coverage" +version = "7.13.5" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.10" +files = [ + {file = "coverage-7.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5"}, + {file = "coverage-7.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:704de6328e3d612a8f6c07000a878ff38181ec3263d5a11da1db294fa6a9bdf8"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a1a6d79a14e1ec1832cabc833898636ad5f3754a678ef8bb4908515208bf84f4"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79060214983769c7ba3f0cee10b54c97609dca4d478fa1aa32b914480fd5738d"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:356e76b46783a98c2a2fe81ec79df4883a1e62895ea952968fb253c114e7f930"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0cef0cdec915d11254a7f549c1170afecce708d30610c6abdded1f74e581666d"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dc022073d063b25a402454e5712ef9e007113e3a676b96c5f29b2bda29352f40"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9b74db26dfea4f4e50d48a4602207cd1e78be33182bc9cbf22da94f332f99878"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ad146744ca4fd09b50c482650e3c1b1f4dfa1d4792e0a04a369c7f23336f0400"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c555b48be1853fe3997c11c4bd521cdd9a9612352de01fa4508f16ec341e6fe0"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7034b5c56a58ae5e85f23949d52c14aca2cfc6848a31764995b7de88f13a1ea0"}, + {file = "coverage-7.13.5-cp310-cp310-win32.whl", hash = "sha256:eb7fdf1ef130660e7415e0253a01a7d5a88c9c4d158bcf75cbbd922fd65a5b58"}, + {file = "coverage-7.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:3e1bb5f6c78feeb1be3475789b14a0f0a5b47d505bfc7267126ccbd50289999e"}, + {file = "coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d"}, + {file = "coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8"}, + {file = "coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf"}, + {file = "coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9"}, + {file = "coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028"}, + {file = "coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01"}, + {file = "coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c"}, + {file = "coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf"}, + {file = "coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810"}, + {file = "coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de"}, + {file = "coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1"}, + {file = "coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17"}, + {file = "coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85"}, + {file = "coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b"}, + {file = "coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664"}, + {file = "coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d"}, + {file = "coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2"}, + {file = "coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a"}, + {file = "coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819"}, + {file = "coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911"}, + {file = "coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f"}, + {file = "coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0"}, + {file = "coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc"}, + {file = "coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633"}, + {file = "coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8"}, + {file = "coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b"}, + {file = "coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a"}, + {file = "coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215"}, + {file = "coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43"}, + {file = "coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45"}, + {file = "coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61"}, + {file = "coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179"}, +] + +[package.extras] +toml = ["tomli"] [[package]] name = "cryptography" -version = "44.0.3" +version = "46.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false -python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["main"] +python-versions = "!=3.9.0,!=3.9.1,>=3.8" files = [ - {file = "cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d"}, - {file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904"}, - {file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44"}, - {file = "cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d"}, - {file = "cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d"}, - {file = "cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f"}, - {file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5"}, - {file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b"}, - {file = "cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028"}, - {file = "cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cad399780053fb383dc067475135e41c9fe7d901a97dd5d9c5dfb5611afc0d7d"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:21a83f6f35b9cc656d71b5de8d519f566df01e660ac2578805ab245ffd8523f8"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fc3c9babc1e1faefd62704bb46a69f359a9819eb0292e40df3fb6e3574715cd4"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:e909df4053064a97f1e6565153ff8bb389af12c5c8d29c343308760890560aff"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:dad80b45c22e05b259e33ddd458e9e2ba099c86ccf4e88db7bbab4b747b18d06"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:479d92908277bed6e1a1c69b277734a7771c2b78633c224445b5c60a9f4bc1d9"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c"}, - {file = "cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053"}, + {file = "cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb"}, + {file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b"}, + {file = "cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85"}, + {file = "cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e"}, + {file = "cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457"}, + {file = "cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b"}, + {file = "cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1"}, + {file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2"}, + {file = "cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e"}, + {file = "cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee"}, + {file = "cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298"}, + {file = "cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb"}, + {file = "cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006"}, + {file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0"}, + {file = "cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85"}, + {file = "cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e"}, + {file = "cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246"}, + {file = "cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3"}, + {file = "cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f"}, + {file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15"}, + {file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455"}, + {file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65"}, + {file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968"}, + {file = "cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4"}, + {file = "cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5"}, ] [package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9\" and platform_python_implementation != \"PyPy\""} +typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] -pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] name = "distlib" version = "0.4.0" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"}, {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}, @@ -397,9 +539,9 @@ files = [ name = "dnspython" version = "2.8.0" description = "DNS toolkit" +category = "main" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ {file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"}, {file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"}, @@ -412,15 +554,15 @@ doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"] doq = ["aioquic (>=1.2.0)"] idna = ["idna (>=3.10)"] trio = ["trio (>=0.30)"] -wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] +wmi = ["wmi (>=1.5.1)"] [[package]] name = "docker" version = "7.1.0" description = "A Python library for the Docker Engine API." +category = "main" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -437,13 +579,25 @@ docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] ssh = ["paramiko (>=2.4.3)"] websockets = ["websocket-client (>=1.3.0)"] +[[package]] +name = "durationpy" +version = "0.10" +description = "Module for converting between datetime.timedelta and Go's Duration strings." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286"}, + {file = "durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba"}, +] + [[package]] name = "email-validator" version = "2.3.0" description = "A robust email address syntax and deliverability validation library." +category = "main" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4"}, {file = "email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426"}, @@ -457,10 +611,9 @@ idna = ">=2.0.0" name = "exceptiongroup" version = "1.3.1" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] -markers = "python_version == \"3.10\"" files = [ {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, @@ -474,46 +627,50 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.109.2" +version = "0.135.4" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "main" optional = false -python-versions = ">=3.8" -groups = ["main"] +python-versions = ">=3.10" files = [ - {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, - {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, + {file = "fastapi-0.135.4-py3-none-any.whl", hash = "sha256:539d3531f8aba9b286ab44658344553f4a4adc218529137501e5d97be071a78b"}, + {file = "fastapi-0.135.4.tar.gz", hash = "sha256:d87c41b0a7bcaa6f14629d73fe48e360821605c7b6d518caacbc00dcf8fa5e0e"}, ] [package.dependencies] -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.36.3,<0.37.0" +annotated-doc = ">=0.0.2" +pydantic = ">=2.9.0" +starlette = ">=0.46.0" typing-extensions = ">=4.8.0" +typing-inspection = ">=0.4.2" [package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" -version = "3.25.2" +version = "3.29.0" description = "A platform independent file lock." +category = "dev" optional = false python-versions = ">=3.10" -groups = ["dev"] files = [ - {file = "filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70"}, - {file = "filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694"}, + {file = "filelock-3.29.0-py3-none-any.whl", hash = "sha256:96f5f6344709aa1572bbf631c640e4ebeeb519e08da902c39a001882f30ac258"}, + {file = "filelock-3.29.0.tar.gz", hash = "sha256:69974355e960702e789734cb4871f884ea6fe50bd8404051a3530bc07809cf90"}, ] [[package]] name = "flame-hub-client" -version = "0.2.13" +version = "0.2.15" description = "HTTP client for interacting with FLAME Hub services." +category = "main" optional = false python-versions = "<4,>=3.10" -groups = ["main"] files = [ - {file = "flame_hub_client-0.2.13-py3-none-any.whl", hash = "sha256:14a130cdf4910b2f20a48071ef83df3b8202fc74a5e8055707104c6c46c23f8a"}, - {file = "flame_hub_client-0.2.13.tar.gz", hash = "sha256:bafdb58c194f079d4ac3e8b5a12d194b4bd21bba5a542a6f8719da95bf4cb1cd"}, + {file = "flame_hub_client-0.2.15-py3-none-any.whl", hash = "sha256:a31f04dd0e55cbdf0f12984613b3834da0cf294fa0e6ae5988a1335133e88734"}, + {file = "flame_hub_client-0.2.15.tar.gz", hash = "sha256:dd296472c05a92bf546e6ec2b9c68a68bc00848607539e77ba12dc756a912a7c"}, ] [package.dependencies] @@ -521,96 +678,73 @@ httpcore = ">=1.0.9,<2.0.0" httpx = ">=0.28.0,<0.29.0" pydantic = {version = ">=2.12.5,<3.0.0", extras = ["email"]} -[[package]] -name = "google-auth" -version = "2.49.1" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "google_auth-2.49.1-py3-none-any.whl", hash = "sha256:195ebe3dca18eddd1b3db5edc5189b76c13e96f29e73043b923ebcf3f1a860f7"}, - {file = "google_auth-2.49.1.tar.gz", hash = "sha256:16d40da1c3c5a0533f57d268fe72e0ebb0ae1cc3b567024122651c045d879b64"}, -] - -[package.dependencies] -cryptography = ">=38.0.3" -pyasn1-modules = ">=0.2.1" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] -cryptography = ["cryptography (>=38.0.3)"] -enterprise-cert = ["pyopenssl"] -pyjwt = ["pyjwt (>=2.0)"] -pyopenssl = ["pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0)"] -rsa = ["rsa (>=3.1.4,<5)"] -testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "flask", "freezegun", "grpcio", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] -urllib3 = ["packaging", "urllib3"] - [[package]] name = "greenlet" -version = "3.3.2" +version = "3.4.0" description = "Lightweight in-process concurrent programming" +category = "main" optional = false python-versions = ">=3.10" -groups = ["main"] -markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" files = [ - {file = "greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d"}, - {file = "greenlet-3.3.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b568183cf65b94919be4438dc28416b234b678c608cafac8874dfeeb2a9bbe13"}, - {file = "greenlet-3.3.2-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:527fec58dc9f90efd594b9b700662ed3fb2493c2122067ac9c740d98080a620e"}, - {file = "greenlet-3.3.2-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:508c7f01f1791fbc8e011bd508f6794cb95397fdb198a46cb6635eb5b78d85a7"}, - {file = "greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f"}, - {file = "greenlet-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:97245cc10e5515dbc8c3104b2928f7f02b6813002770cfaffaf9a6e0fc2b94ef"}, - {file = "greenlet-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c1fdd7d1b309ff0da81d60a9688a8bd044ac4e18b250320a96fc68d31c209ca"}, - {file = "greenlet-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:5d0e35379f93a6d0222de929a25ab47b5eb35b5ef4721c2b9cbcc4036129ff1f"}, - {file = "greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86"}, - {file = "greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f"}, - {file = "greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55"}, - {file = "greenlet-3.3.2-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4375a58e49522698d3e70cc0b801c19433021b5c37686f7ce9c65b0d5c8677d2"}, - {file = "greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358"}, - {file = "greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99"}, - {file = "greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be"}, - {file = "greenlet-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e692b2dae4cc7077cbb11b47d258533b48c8fde69a33d0d8a82e2fe8d8531d5"}, - {file = "greenlet-3.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:02b0a8682aecd4d3c6c18edf52bc8e51eacdd75c8eac52a790a210b06aa295fd"}, - {file = "greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd"}, - {file = "greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd"}, - {file = "greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac"}, - {file = "greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb"}, - {file = "greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070"}, - {file = "greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79"}, - {file = "greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395"}, - {file = "greenlet-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:34308836d8370bddadb41f5a7ce96879b72e2fdfb4e87729330c6ab52376409f"}, - {file = "greenlet-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:d3a62fa76a32b462a97198e4c9e99afb9ab375115e74e9a83ce180e7a496f643"}, - {file = "greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4"}, - {file = "greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986"}, - {file = "greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92"}, - {file = "greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd"}, - {file = "greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab"}, - {file = "greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a"}, - {file = "greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b"}, - {file = "greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124"}, - {file = "greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327"}, - {file = "greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab"}, - {file = "greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082"}, - {file = "greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9"}, - {file = "greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9"}, - {file = "greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506"}, - {file = "greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce"}, - {file = "greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5"}, - {file = "greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492"}, - {file = "greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71"}, - {file = "greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54"}, - {file = "greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4"}, - {file = "greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff"}, - {file = "greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf"}, - {file = "greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4"}, - {file = "greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727"}, - {file = "greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e"}, - {file = "greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a"}, - {file = "greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2"}, + {file = "greenlet-3.4.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d18eae9a7fb0f499efcd146b8c9750a2e1f6e0e93b5a382b3481875354a430e6"}, + {file = "greenlet-3.4.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:636d2f95c309e35f650e421c23297d5011716be15d966e6328b367c9fc513a82"}, + {file = "greenlet-3.4.0-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:234582c20af9742583c3b2ddfbdbb58a756cfff803763ffaae1ac7990a9fac31"}, + {file = "greenlet-3.4.0-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ac6a5f618be581e1e0713aecec8e54093c235e5fa17d6d8eb7ffc487e2300508"}, + {file = "greenlet-3.4.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:523677e69cd4711b5a014e37bc1fb3a29947c3e3a5bb6a527e1cc50312e5a398"}, + {file = "greenlet-3.4.0-cp310-cp310-manylinux_2_39_riscv64.whl", hash = "sha256:d336d46878e486de7d9458653c722875547ac8d36a1cff9ffaf4a74a3c1f62eb"}, + {file = "greenlet-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b45e45fe47a19051a396abb22e19e7836a59ee6c5a90f3be427343c37908d65b"}, + {file = "greenlet-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5434271357be07f3ad0936c312645853b7e689e679e29310e2de09a9ea6c3adf"}, + {file = "greenlet-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:a19093fbad824ed7c0f355b5ff4214bffda5f1a7f35f29b31fcaa240cc0135ab"}, + {file = "greenlet-3.4.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:805bebb4945094acbab757d34d6e1098be6de8966009ab9ca54f06ff492def58"}, + {file = "greenlet-3.4.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:439fc2f12b9b512d9dfa681c5afe5f6b3232c708d13e6f02c845e0d9f4c2d8c6"}, + {file = "greenlet-3.4.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a70ed1cb0295bee1df57b63bf7f46b4e56a5c93709eea769c1fec1bb23a95875"}, + {file = "greenlet-3.4.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8c5696c42e6bb5cfb7c6ff4453789081c66b9b91f061e5e9367fa15792644e76"}, + {file = "greenlet-3.4.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c660bce1940a1acae5f51f0a064f1bc785d07ea16efcb4bc708090afc4d69e83"}, + {file = "greenlet-3.4.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:89995ce5ddcd2896d89615116dd39b9703bfa0c07b583b85b89bf1b5d6eddf81"}, + {file = "greenlet-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ee407d4d1ca9dc632265aee1c8732c4a2d60adff848057cdebfe5fe94eb2c8a2"}, + {file = "greenlet-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:956215d5e355fffa7c021d168728321fd4d31fd730ac609b1653b450f6a4bc71"}, + {file = "greenlet-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:5cb614ace7c27571270354e9c9f696554d073f8aa9319079dcba466bbdead711"}, + {file = "greenlet-3.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:04403ac74fe295a361f650818de93be11b5038a78f49ccfb64d3b1be8fbf1267"}, + {file = "greenlet-3.4.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:1a54a921561dd9518d31d2d3db4d7f80e589083063ab4d3e2e950756ef809e1a"}, + {file = "greenlet-3.4.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16dec271460a9a2b154e3b1c2fa1050ce6280878430320e85e08c166772e3f97"}, + {file = "greenlet-3.4.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90036ce224ed6fe75508c1907a77e4540176dcf0744473627785dd519c6f9996"}, + {file = "greenlet-3.4.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6f0def07ec9a71d72315cf26c061aceee53b306c36ed38c35caba952ea1b319d"}, + {file = "greenlet-3.4.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a1c4f6b453006efb8310affb2d132832e9bbb4fc01ce6df6b70d810d38f1f6dc"}, + {file = "greenlet-3.4.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:0e1254cf0cbaa17b04320c3a78575f29f3c161ef38f59c977108f19ffddaf077"}, + {file = "greenlet-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b2d9a138ffa0e306d0e2b72976d2fb10b97e690d40ab36a472acaab0838e2de"}, + {file = "greenlet-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8424683caf46eb0eb6f626cb95e008e8cc30d0cb675bdfa48200925c79b38a08"}, + {file = "greenlet-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0a53fb071531d003b075c444014ff8f8b1a9898d36bb88abd9ac7b3524648a2"}, + {file = "greenlet-3.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:f38b81880ba28f232f1f675893a39cf7b6db25b31cc0a09bb50787ecf957e85e"}, + {file = "greenlet-3.4.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:43748988b097f9c6f09364f260741aa73c80747f63389824435c7a50bfdfd5c1"}, + {file = "greenlet-3.4.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5566e4e2cd7a880e8c27618e3eab20f3494452d12fd5129edef7b2f7aa9a36d1"}, + {file = "greenlet-3.4.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1054c5a3c78e2ab599d452f23f7adafef55062a783a8e241d24f3b633ba6ff82"}, + {file = "greenlet-3.4.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:98eedd1803353daf1cd9ef23eef23eda5a4d22f99b1f998d273a8b78b70dd47f"}, + {file = "greenlet-3.4.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f82cb6cddc27dd81c96b1506f4aa7def15070c3b2a67d4e46fd19016aacce6cf"}, + {file = "greenlet-3.4.0-cp313-cp313-manylinux_2_39_riscv64.whl", hash = "sha256:b7857e2202aae67bc5725e0c1f6403c20a8ff46094ece015e7d474f5f7020b55"}, + {file = "greenlet-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:227a46251ecba4ff46ae742bc5ce95c91d5aceb4b02f885487aff269c127a729"}, + {file = "greenlet-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5b99e87be7eba788dd5b75ba1cde5639edffdec5f91fe0d734a249535ec3408c"}, + {file = "greenlet-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:849f8bc17acd6295fcb5de8e46d55cc0e52381c56eaf50a2afd258e97bc65940"}, + {file = "greenlet-3.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:9390ad88b652b1903814eaabd629ca184db15e0eeb6fe8a390bbf8b9106ae15a"}, + {file = "greenlet-3.4.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:10a07aca6babdd18c16a3f4f8880acfffc2b88dfe431ad6aa5f5740759d7d75e"}, + {file = "greenlet-3.4.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:076e21040b3a917d3ce4ad68fb5c3c6b32f1405616c4a57aa83120979649bd3d"}, + {file = "greenlet-3.4.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e82689eea4a237e530bb5cb41b180ef81fa2160e1f89422a67be7d90da67f615"}, + {file = "greenlet-3.4.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:06c2d3b89e0c62ba50bd7adf491b14f39da9e7e701647cb7b9ff4c99bee04b19"}, + {file = "greenlet-3.4.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4df3b0b2289ec686d3c821a5fee44259c05cfe824dd5e6e12c8e5f5df23085cf"}, + {file = "greenlet-3.4.0-cp314-cp314-manylinux_2_39_riscv64.whl", hash = "sha256:070b8bac2ff3b4d9e0ff36a0d19e42103331d9737e8504747cd1e659f76297bd"}, + {file = "greenlet-3.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8bff29d586ea415688f4cec96a591fcc3bf762d046a796cdadc1fdb6e7f2d5bf"}, + {file = "greenlet-3.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a569c2fb840c53c13a2b8967c63621fafbd1a0e015b9c82f408c33d626a2fda"}, + {file = "greenlet-3.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:207ba5b97ea8b0b60eb43ffcacf26969dd83726095161d676aac03ff913ee50d"}, + {file = "greenlet-3.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:f8296d4e2b92af34ebde81085a01690f26a51eb9ac09a0fcadb331eb36dbc802"}, + {file = "greenlet-3.4.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d70012e51df2dbbccfaf63a40aaf9b40c8bed37c3e3a38751c926301ce538ece"}, + {file = "greenlet-3.4.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a58bec0751f43068cd40cff31bb3ca02ad6000b3a51ca81367af4eb5abc480c8"}, + {file = "greenlet-3.4.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05fa0803561028f4b2e3b490ee41216a842eaee11aed004cc343a996d9523aa2"}, + {file = "greenlet-3.4.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c4cd56a9eb7a6444edbc19062f7b6fbc8f287c663b946e3171d899693b1c19fa"}, + {file = "greenlet-3.4.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e60d38719cb80b3ab5e85f9f1aed4960acfde09868af6762ccb27b260d68f4ed"}, + {file = "greenlet-3.4.0-cp314-cp314t-manylinux_2_39_riscv64.whl", hash = "sha256:1f85f204c4d54134ae850d401fa435c89cd667d5ce9dc567571776b45941af72"}, + {file = "greenlet-3.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f50c804733b43eded05ae694691c9aa68bca7d0a867d67d4a3f514742a2d53f"}, + {file = "greenlet-3.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2d4f0635dc4aa638cda4b2f5a07ae9a2cff9280327b581a3fcb6f317b4fbc38a"}, + {file = "greenlet-3.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1a4a48f24681300c640f143ba7c404270e1ebbbcf34331d7104a4ff40f8ea705"}, + {file = "greenlet-3.4.0.tar.gz", hash = "sha256:f50a96b64dafd6169e595a5c56c9146ef80333e67d4476a65a9c55f400fc22ff"}, ] [package.extras] @@ -621,9 +755,9 @@ test = ["objgraph", "psutil", "setuptools"] name = "h11" version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, @@ -633,9 +767,9 @@ files = [ name = "httpcore" version = "1.0.9" description = "A minimal low-level HTTP client." +category = "main" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, @@ -648,16 +782,16 @@ h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" version = "0.28.1" description = "The next generation HTTP client." +category = "main" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -666,26 +800,26 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = "==1.*" +httpcore = ">=1.0.0,<2.0.0" idna = "*" [package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "identify" -version = "2.6.18" +version = "2.6.19" description = "File identification library for Python" +category = "dev" optional = false python-versions = ">=3.10" -groups = ["dev"] files = [ - {file = "identify-2.6.18-py2.py3-none-any.whl", hash = "sha256:8db9d3c8ea9079db92cafb0ebf97abdc09d52e97f4dcf773a2e694048b7cd737"}, - {file = "identify-2.6.18.tar.gz", hash = "sha256:873ac56a5e3fd63e7438a7ecbc4d91aca692eb3fefa4534db2b7913f3fc352fd"}, + {file = "identify-2.6.19-py2.py3-none-any.whl", hash = "sha256:20e6a87f786f768c092a721ad107fc9df0eb89347be9396cadf3f4abbd1fb78a"}, + {file = "identify-2.6.19.tar.gz", hash = "sha256:6be5020c38fcb07da56c53733538a3081ea5aa70d36a156f83044bfbf9173842"}, ] [package.extras] @@ -693,26 +827,26 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.11" +version = "3.12" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ - {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, - {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, + {file = "idna-3.12-py3-none-any.whl", hash = "sha256:60ffaa1858fac94c9c124728c24fcde8160f3fb4a7f79aa8cdd33a9d1af60a67"}, + {file = "idna-3.12.tar.gz", hash = "sha256:724e9952cc9e2bd7550ea784adb098d837ab5267ef67a1ab9cf7846bdbdd8254"}, ] [package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +all = ["mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "iniconfig" version = "2.3.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.10" -groups = ["dev"] files = [ {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, @@ -722,9 +856,9 @@ files = [ name = "kong-admin-client" version = "3.5.0" description = "Kong Admin API" +category = "main" optional = false python-versions = "^3.7" -groups = ["main"] files = [] develop = false @@ -742,38 +876,38 @@ resolved_reference = "0524678992903864694c706ed50c0d8cad80f45a" [[package]] name = "kubernetes" -version = "28.1.0" +version = "35.0.0" description = "Kubernetes python client" +category = "main" optional = false python-versions = ">=3.6" -groups = ["main"] files = [ - {file = "kubernetes-28.1.0-py2.py3-none-any.whl", hash = "sha256:10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d"}, - {file = "kubernetes-28.1.0.tar.gz", hash = "sha256:1468069a573430fb1cb5ad22876868f57977930f80a6749405da31cd6086a7e9"}, + {file = "kubernetes-35.0.0-py2.py3-none-any.whl", hash = "sha256:39e2b33b46e5834ef6c3985ebfe2047ab39135d41de51ce7641a7ca5b372a13d"}, + {file = "kubernetes-35.0.0.tar.gz", hash = "sha256:3d00d344944239821458b9efd484d6df9f011da367ecb155dadf9513f05f09ee"}, ] [package.dependencies] -certifi = ">=14.5.14" -google-auth = ">=1.0.1" -oauthlib = ">=3.2.2" +certifi = ">=14.05.14" +durationpy = ">=0.7" python-dateutil = ">=2.5.3" pyyaml = ">=5.4.1" requests = "*" requests-oauthlib = "*" six = ">=1.9.0" -urllib3 = ">=1.24.2,<2.0" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" +urllib3 = ">=1.24.2,<2.6.0 || >2.6.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" [package.extras] adal = ["adal (>=1.0.2)"] +google-auth = ["google-auth (>=1.0.1)"] [[package]] name = "nodeenv" version = "1.10.0" description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] files = [ {file = "nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827"}, {file = "nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb"}, @@ -783,9 +917,9 @@ files = [ name = "oauthlib" version = "3.3.1" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, @@ -798,35 +932,35 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "26.0" +version = "26.1" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"}, - {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"}, + {file = "packaging-26.1-py3-none-any.whl", hash = "sha256:5d9c0669c6285e491e0ced2eee587eaf67b670d94a19e94e3984a481aba6802f"}, + {file = "packaging-26.1.tar.gz", hash = "sha256:f042152b681c4bfac5cae2742a55e103d27ab2ec0f3d88037136b6bfe7c9c5de"}, ] [[package]] name = "platformdirs" -version = "4.9.4" +version = "4.9.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +category = "dev" optional = false python-versions = ">=3.10" -groups = ["dev"] files = [ - {file = "platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868"}, - {file = "platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934"}, + {file = "platformdirs-4.9.6-py3-none-any.whl", hash = "sha256:e61adb1d5e5cb3441b4b7710bea7e4c12250ca49439228cc1021c00dcfac0917"}, + {file = "platformdirs-4.9.6.tar.gz", hash = "sha256:3bfa75b0ad0db84096ae777218481852c0ebc6c727b3168c1b9e0118e458cf0a"}, ] [[package]] name = "pluggy" version = "1.6.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, @@ -838,14 +972,14 @@ testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.8.0" +version = "4.5.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false -python-versions = ">=3.9" -groups = ["dev"] +python-versions = ">=3.10" files = [ - {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, - {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, + {file = "pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77"}, + {file = "pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61"}, ] [package.dependencies] @@ -857,116 +991,88 @@ virtualenv = ">=20.10.0" [[package]] name = "psycopg2-binary" -version = "2.9.11" +version = "2.9.12" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ - {file = "psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c0e4262e089516603a09474ee13eabf09cb65c332277e39af68f6233911087"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c47676e5b485393f069b4d7a811267d3168ce46f988fa602658b8bb901e9e64d"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a28d8c01a7b27a1e3265b11250ba7557e5f72b5ee9e5f3a2fa8d2949c29bf5d2"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f2732cf504a1aa9e9609d02f79bea1067d99edf844ab92c247bbca143303b"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:865f9945ed1b3950d968ec4690ce68c55019d79e4497366d36e090327ce7db14"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91537a8df2bde69b1c1db01d6d944c831ca793952e4f57892600e96cee95f2cd"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dca1f356a67ecb68c81a7bc7809f1569ad9e152ce7fd02c2f2036862ca9f66b"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0da4de5c1ac69d94ed4364b6cbe7190c1a70d325f112ba783d83f8440285f152"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37d8412565a7267f7d79e29ab66876e55cb5e8e7b3bbf94f8206f6795f8f7e7e"}, - {file = "psycopg2_binary-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:c665f01ec8ab273a61c62beeb8cce3014c214429ced8a308ca1fc410ecac3a39"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908"}, - {file = "psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d"}, - {file = "psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1"}, - {file = "psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d"}, - {file = "psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20e7fb94e20b03dcc783f76c0865f9da39559dcc0c28dd1a3fce0d01902a6b9c"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bdab48575b6f870f465b397c38f1b415520e9879fdf10a53ee4f49dcbdf8a21"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9d3a9edcfbe77a3ed4bc72836d466dfce4174beb79eda79ea155cc77237ed9e8"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:44fc5c2b8fa871ce7f0023f619f1349a0aa03a0857f2c96fbc01c657dcbbdb49"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9c55460033867b4622cda1b6872edf445809535144152e5d14941ef591980edf"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2d11098a83cca92deaeaed3d58cfd150d49b3b06ee0d0852be466bf87596899e"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:691c807d94aecfbc76a14e1408847d59ff5b5906a04a23e12a89007672b9e819"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b81627b691f29c4c30a8f322546ad039c40c328373b11dff7490a3e1b517855"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:b637d6d941209e8d96a072d7977238eea128046effbf37d1d8b2c0764750017d"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:41360b01c140c2a03d346cec3280cf8a71aa07d94f3b1509fa0161c366af66b4"}, - {file = "psycopg2_binary-2.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:875039274f8a2361e5207857899706da840768e2a775bf8c65e82f60b197df02"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b818ceff717f98851a64bffd4c5eb5b3059ae280276dcecc52ac658dcf006a4"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fa0d7caca8635c56e373055094eeda3208d901d55dd0ff5abc1d4e47f82b56"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:864c261b3690e1207d14bbfe0a61e27567981b80c47a778561e49f676f7ce433"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c5ee5213445dd45312459029b8c4c0a695461eb517b753d2582315bd07995f5e"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6f9cae1f848779b5b01f417e762c40d026ea93eb0648249a604728cda991dde3"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:63a3ebbd543d3d1eda088ac99164e8c5bac15293ee91f20281fd17d050aee1c4"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d6fcbba8c9fed08a73b8ac61ea79e4821e45b1e92bb466230c5e746bbf3d5256"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:36512911ebb2b60a0c3e44d0bb5048c1980aced91235d133b7874f3d1d93487c"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:8ffdb59fe88f99589e34354a130217aa1fd2d615612402d6edc8b3dbc7a44463"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a46fe069b65255df410f856d842bc235f90e22ffdf532dda625fd4213d3fd9b1"}, + {file = "psycopg2_binary-2.9.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab29414b25dcb698bf26bf213e3348abdcd07bbd5de032a5bec15bd75b298b03"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5c8ce6c61bd1b1f6b9c24ee32211599f6166af2c55abb19456090a21fd16554b"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4a9eaa6e7f4ff91bec10aa3fb296878e75187bced5cc4bafe17dc40915e1326"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c6528cefc8e50fcc6f4a107e27a672058b36cc5736d665476aeb413ba88dbb06"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e4e184b1fb6072bf05388aa41c697e1b2d01b3473f107e7ec44f186a32cfd0b8"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4766ab678563054d3f1d064a4db19cc4b5f9e3a8d9018592a8285cf200c248f3"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5a0253224780c978746cb9be55a946bcdaf40fe3519c0f622924cdabdafe2c39"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0dc9228d47c46bda253d2ecd6bb93b56a9f2d7ad33b684a1fa3622bf74ffe30c"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f921f3cd87035ef7df233383011d7a53ea1d346224752c1385f1edfd790ceb6a"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3d999bd982a723113c1a45b55a7a6a90d64d0ed2278020ed625c490ff7bef96c"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29d4d134bd0ab46ffb04e94aa3c5fa3ef582e9026609165e2f758ff76fc3a3be"}, + {file = "psycopg2_binary-2.9.12-cp311-cp311-win_amd64.whl", hash = "sha256:cb4a1dacdd48077150dc762a9e5ddbf32c256d66cb46f80839391aa458774936"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5cdc05117180c5fa9c40eea8ea559ce64d73824c39d928b7da9fb5f6a9392433"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d3227a3bc228c10d21011a99245edca923e4e8bf461857e869a507d9a41fe9f6"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:995ce929eede89db6254b50827e2b7fd61e50d11f0b116b29fffe4a2e53c4580"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9fe06d93e72f1c048e731a2e3e7854a5bfaa58fc736068df90b352cefe66f03f"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40e7b28b63aaf737cb3a1edc3a9bbc9a9f4ad3dcb7152e8c1130e4050eddcb7d"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:89d19a9f7899e8eb0656a2b3a08e0da04c720a06db6e0033eab5928aabe60fa9"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:612b965daee295ae2da8f8218ce1d274645dc76ef3f1abf6a0a94fd57eff876d"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b9a339b79d37c1b45f3235265f07cdeb0cb5ad7acd2ac7720a5920989c17c24e"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3471336e1acfd9c7fe507b8bad5af9317b6a89294f9eb37bd9a030bb7bebcdc6"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7af18183109e23502c8b2ae7f6926c0882766f35b5175a4cd737ad825e4d7a1b"}, + {file = "psycopg2_binary-2.9.12-cp312-cp312-win_amd64.whl", hash = "sha256:398fcd4db988c7d7d3713e2b8e18939776fd3fb447052daae4f24fa39daede4c"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7c729a73c7b1b84de3582f73cdd27d905121dc2c531f3d9a3c32a3011033b965"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4413d0caef93c5cf50b96863df4c2efe8c269bf2267df353225595e7e15e8df7"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:4dfcf8e45ebb0c663be34a3442f65e17311f3367089cd4e5e3a3e8e62c978777"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c41321a14dd74aceb6a9a643b9253a334521babfa763fa873e33d89cfa122fb5"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83946ba43979ebfdc99a3cd0ee775c89f221df026984ba19d46133d8d75d3cd9"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:411e85815652d13560fbe731878daa5d92378c4995a22302071890ec3397d019"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c8ad4c08e00f7679559eaed7aff1edfffc60c086b976f93972f686384a95e2c"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:00814e40fa23c2b37ef0a1e3c749d89982c73a9cb5046137f0752a22d432e82f"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:98062447aebc20ed20add1f547a364fd0ef8933640d5372ff1873f8deb9b61be"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:66a7685d7e548f10fb4ce32fb01a7b7f4aa702134de92a292c7bd9e0d3dbd290"}, + {file = "psycopg2_binary-2.9.12-cp313-cp313-win_amd64.whl", hash = "sha256:b6937f5fe4e180aeee87de907a2fa982ded6f7f15d7218f78a083e4e1d68f2a0"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:6f3b3de8a74ef8db215f22edffb19e32dc6fa41340456de7ec99efdc8a7b3ec2"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1006fb62f0f0bc5ce256a832356c6262e91be43f5e4eb15b5eaf38079464caf2"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:840066105706cd2eb29b9a1c2329620056582a4bf3e8169dec5c447042d0869f"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:863f5d12241ebe1c76a72a04c2113b6dc905f90b9cef0e9be0efd994affd9354"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a99eaab34a9010f1a086b126de467466620a750634d114d20455f3a824aae033"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ffdd7dc5463ccd61845ac37b7012d0f35a1548df9febe14f8dd549be4a0bc81e"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:54a0dfecab1b48731f934e06139dfe11e24219fb6d0ceb32177cf0375f14c7b5"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:96937c9c5d891f772430f418a7a8b4691a90c3e6b93cf72b5bd7cad8cbca32a5"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:77b348775efd4cdab410ec6609d81ccecd1139c90265fa583a7255c8064bc03d"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:527e6342b3e44c2f0544f6b8e927d60de7f163f5723b8f1dfa7d2a84298738cd"}, + {file = "psycopg2_binary-2.9.12-cp314-cp314-win_amd64.whl", hash = "sha256:f12ae41fcafadb39b2785e64a40f9db05d6de2ac114077457e0e7c597f3af980"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ee2d84ef5eb6c04702d2e9c372ad557fb027f26a5d82804f749dfb14c7fdd2ab"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cfa2517c94ea3af6deb46f81e1bbd884faa63e28481eb2f889989dd8d95e5f03"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:ba3df2fc42a1cfa45b72cf096d4acb2b885937eedc61461081d53538d4a82a86"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:718e1fc18edf573b02cb8aea868de8d8d33f99ce9620206aa9144b67b0985e94"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c7cb4cbf894a1d36c720d713de507952c7c58f66d30834708f03dbe5c822ccf"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:049366c6d884bdcd65d66e6ca1fdbebe670b56c6c9ba46f164e6667e90881964"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb1828cf3da68f99e45ebce1355d65d2d12b6a78fb5dfb16247aad6bdef5f5d2"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:127467c6e476dd876634f17c3d870530e73ff454ff99bff73d36e80af28e1115"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:ace94261f43850e9e79f6c56636c5e0147978ab79eda5e5e5ebf13ae146fc8fe"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a7e39a65b7d2a20e4ba2e0aaad1960b61cc2888d6ab047769f8347bd3c9ad915"}, + {file = "psycopg2_binary-2.9.12-cp39-cp39-win_amd64.whl", hash = "sha256:f625abb7020e4af3432d95342daa1aa0db3fa369eed19807aa596367ba791b10"}, + {file = "psycopg2_binary-2.9.12.tar.gz", hash = "sha256:5ac9444edc768c02a6b6a591f070b8aae28ff3a99be57560ac996001580f294c"}, ] -[[package]] -name = "pyasn1" -version = "0.6.3" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde"}, - {file = "pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.2" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, - {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, -] - -[package.dependencies] -pyasn1 = ">=0.6.1,<0.7.0" - [[package]] name = "pycparser" version = "3.0" description = "C parser in Python" +category = "main" optional = false python-versions = ">=3.10" -groups = ["main"] -markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" files = [ {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, @@ -974,168 +1080,182 @@ files = [ [[package]] name = "pydantic" -version = "2.12.5" +version = "2.13.3" description = "Data validation using Python type hints" +category = "main" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ - {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, - {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, + {file = "pydantic-2.13.3-py3-none-any.whl", hash = "sha256:6db14ac8dfc9a1e57f87ea2c0de670c251240f43cb0c30a5130e9720dc612927"}, + {file = "pydantic-2.13.3.tar.gz", hash = "sha256:af09e9d1d09f4e7fe37145c1f577e1d61ceb9a41924bf0094a36506285d0a84d"}, ] [package.dependencies] annotated-types = ">=0.6.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.41.5" +pydantic-core = "2.46.3" typing-extensions = ">=4.14.1" typing-inspection = ">=0.4.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.41.5" +version = "2.46.3" description = "Core functionality for Pydantic validation and serialization" +category = "main" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ - {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, - {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, - {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, - {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, - {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, - {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, - {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, - {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, - {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, - {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, - {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, - {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, - {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, - {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, - {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, - {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, - {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, - {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, - {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, - {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, - {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, - {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, - {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, - {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, - {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, - {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, - {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, - {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, - {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, - {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, - {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, - {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, - {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, - {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, - {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, - {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, - {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, - {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, - {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, - {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, - {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, - {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, - {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, - {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, - {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, - {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, - {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, - {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, - {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, - {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, - {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, - {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, - {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, - {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, - {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, - {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, - {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, - {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, - {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, - {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, - {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, - {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, - {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, - {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, - {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, - {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, - {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, - {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, - {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, - {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, - {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, - {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, - {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, - {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, - {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, - {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, - {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, - {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, - {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, - {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, - {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, - {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, - {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, - {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, - {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, - {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, - {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, - {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, - {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, - {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, - {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, - {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, - {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, - {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, - {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, - {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, - {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, - {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, - {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, - {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, - {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, - {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, - {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, - {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, - {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, - {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, - {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, - {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, + {file = "pydantic_core-2.46.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1da3786b8018e60349680720158cc19161cc3b4bdd815beb0a321cd5ce1ad5b1"}, + {file = "pydantic_core-2.46.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc0988cb29d21bf4a9d5cf2ef970b5c0e38d8d8e107a493278c05dc6c1dda69f"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f9067c3bfadd04c55484b89c0d267981b2f3512850f6f66e1e74204a4e4ce3"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a642ac886ecf6402d9882d10c405dcf4b902abeb2972cd5fb4a48c83cd59279a"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79f561438481f28681584b89e2effb22855e2179880314bcddbf5968e935e807"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57a973eae4665352a47cf1a99b4ee864620f2fe663a217d7a8da68a1f3a5bfda"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83d002b97072a53ea150d63e0a3adfae5670cef5aa8a6e490240e482d3b22e57"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:b40ddd51e7c44b28cfaef746c9d3c506d658885e0a46f9eeef2ee815cbf8e045"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac5ec7fb9b87f04ee839af2d53bcadea57ded7d229719f56c0ed895bff987943"}, + {file = "pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a3b11c812f61b3129c4905781a2601dfdfdea5fe1e6c1cfb696b55d14e9c054f"}, + {file = "pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1108da631e602e5b3c38d6d04fe5bb3bfa54349e6918e3ca6cf570b2e2b2f9d4"}, + {file = "pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:de885175515bcfa98ae618c1df7a072f13d179f81376c8007112af20567fd08a"}, + {file = "pydantic_core-2.46.3-cp310-cp310-win32.whl", hash = "sha256:d11058e3201527d41bc6b545c79187c9e4bf85e15a236a6007f0e991518882b7"}, + {file = "pydantic_core-2.46.3-cp310-cp310-win_amd64.whl", hash = "sha256:3612edf65c8ea67ac13616c4d23af12faef1ae435a8a93e5934c2a0cbbdd1fd6"}, + {file = "pydantic_core-2.46.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ab124d49d0459b2373ecf54118a45c28a1e6d4192a533fbc915e70f556feb8e5"}, + {file = "pydantic_core-2.46.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cca67d52a5c7a16aed2b3999e719c4bcf644074eac304a5d3d62dd70ae7d4b2c"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c024e08c0ba23e6fd68c771a521e9d6a792f2ebb0fa734296b36394dc30390e"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6645ce7eec4928e29a1e3b3d5c946621d105d3e79f0c9cddf07c2a9770949287"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a712c7118e6c5ea96562f7b488435172abb94a3c53c22c9efc1412264a45cbbe"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69a868ef3ff206343579021c40faf3b1edc64b1cc508ff243a28b0a514ccb050"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc7e8c32db809aa0f6ea1d6869ebc8518a65d5150fdfad8bcae6a49ae32a22e2"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:3481bd1341dc85779ee506bc8e1196a277ace359d89d28588a9468c3ecbe63fa"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8690eba565c6d68ffd3a8655525cbdd5246510b44a637ee2c6c03a7ebfe64d3c"}, + {file = "pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4de88889d7e88d50d40ee5b39d5dac0bcaef9ba91f7e536ac064e6b2834ecccf"}, + {file = "pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:e480080975c1ef7f780b8f99ed72337e7cc5efea2e518a20a692e8e7b278eb8b"}, + {file = "pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de3a5c376f8cd94da9a1b8fd3dd1c16c7a7b216ed31dc8ce9fd7a22bf13b836e"}, + {file = "pydantic_core-2.46.3-cp311-cp311-win32.whl", hash = "sha256:fc331a5314ffddd5385b9ee9d0d2fee0b13c27e0e02dad71b1ae5d6561f51eeb"}, + {file = "pydantic_core-2.46.3-cp311-cp311-win_amd64.whl", hash = "sha256:b5b9c6cf08a8a5e502698f5e153056d12c34b8fb30317e0c5fd06f45162a6346"}, + {file = "pydantic_core-2.46.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dfd51cf457482f04ec49491811a2b8fd5b843b64b11eecd2d7a1ee596ea78a6"}, + {file = "pydantic_core-2.46.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b11b59b3eee90a80a36701ddb4576d9ae31f93f05cb9e277ceaa09e6bf074a67"}, + {file = "pydantic_core-2.46.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af8653713055ea18a3abc1537fe2ebc42f5b0bbb768d1eb79fd74eb47c0ac089"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a519dab6d63c514f3a81053e5266c549679e4aa88f6ec57f2b7b854aceb1b0"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6cd87cb1575b1ad05ba98894c5b5c96411ef678fa2f6ed2576607095b8d9789"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f80a55484b8d843c8ada81ebf70a682f3f00a3d40e378c06cf17ecb44d280d7d"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3861f1731b90c50a3266316b9044f5c9b405eecb8e299b0a7120596334e4fe9c"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb528e295ed31570ac3dcc9bfdd6e0150bc11ce6168ac87a8082055cf1a67395"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:367508faa4973b992b271ba1494acaab36eb7e8739d1e47be5035fb1ea225396"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ad3c826fe523e4becf4fe39baa44286cff85ef137c729a2c5e269afbfd0905d"}, + {file = "pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ec638c5d194ef8af27db69f16c954a09797c0dc25015ad6123eb2c73a4d271ca"}, + {file = "pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:28ed528c45446062ee66edb1d33df5d88828ae167de76e773a3c7f64bd14e976"}, + {file = "pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aed19d0c783886d5bd86d80ae5030006b45e28464218747dcf83dabfdd092c7b"}, + {file = "pydantic_core-2.46.3-cp312-cp312-win32.whl", hash = "sha256:06d5d8820cbbdb4147578c1fe7ffcd5b83f34508cb9f9ab76e807be7db6ff0a4"}, + {file = "pydantic_core-2.46.3-cp312-cp312-win_amd64.whl", hash = "sha256:c3212fda0ee959c1dd04c60b601ec31097aaa893573a3a1abd0a47bcac2968c1"}, + {file = "pydantic_core-2.46.3-cp312-cp312-win_arm64.whl", hash = "sha256:f1f8338dd7a7f31761f1f1a3c47503a9a3b34eea3c8b01fa6ee96408affb5e72"}, + {file = "pydantic_core-2.46.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:12bc98de041458b80c86c56b24df1d23832f3e166cbaff011f25d187f5c62c37"}, + {file = "pydantic_core-2.46.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85348b8f89d2c3508b65b16c3c33a4da22b8215138d8b996912bb1532868885f"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1105677a6df914b1fb71a81b96c8cce7726857e1717d86001f29be06a25ee6f8"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87082cd65669a33adeba5470769e9704c7cf026cc30afb9cc77fd865578ebaad"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e5f66e12c4f5212d08522963380eaaeac5ebd795826cfd19b2dfb0c7a52b9c"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6cdf19bf84128d5e7c37e8a73a0c5c10d51103a650ac585d42dd6ae233f2b7f"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031bb17f4885a43773c8c763089499f242aee2ea85cf17154168775dccdecf35"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:bcf2a8b2982a6673693eae7348ef3d8cf3979c1d63b54fca7c397a635cc68687"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28e8cf2f52d72ced402a137145923a762cbb5081e48b34312f7a0c8f55928ec3"}, + {file = "pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:17eaface65d9fc5abb940003020309c1bf7a211f5f608d7870297c367e6f9022"}, + {file = "pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:93fd339f23408a07e98950a89644f92c54d8729719a40b30c0a30bb9ebc55d23"}, + {file = "pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:23cbdb3aaa74dfe0837975dbf69b469753bbde8eacace524519ffdb6b6e89eb7"}, + {file = "pydantic_core-2.46.3-cp313-cp313-win32.whl", hash = "sha256:610eda2e3838f401105e6326ca304f5da1e15393ae25dacae5c5c63f2c275b13"}, + {file = "pydantic_core-2.46.3-cp313-cp313-win_amd64.whl", hash = "sha256:68cc7866ed863db34351294187f9b729964c371ba33e31c26f478471c52e1ed0"}, + {file = "pydantic_core-2.46.3-cp313-cp313-win_arm64.whl", hash = "sha256:f64b5537ac62b231572879cd08ec05600308636a5d63bcbdb15063a466977bec"}, + {file = "pydantic_core-2.46.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:afa3aa644f74e290cdede48a7b0bee37d1c35e71b05105f6b340d484af536d9b"}, + {file = "pydantic_core-2.46.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ced3310e51aa425f7f77da8bbbb5212616655bedbe82c70944320bc1dbe5e018"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e29908922ce9da1a30b4da490bd1d3d82c01dcfdf864d2a74aacee674d0bfa34"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c9ff69140423eea8ed2d5477df3ba037f671f5e897d206d921bc9fdc39613e7"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b675ab0a0d5b1c8fdb81195dc5bcefea3f3c240871cdd7ff9a2de8aa50772eb2"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0087084960f209a9a4af50ecd1fb063d9ad3658c07bb81a7a53f452dacbfb2ba"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed42e6cc8e1b0e2b9b96e2276bad70ae625d10d6d524aed0c93de974ae029f9f"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:f1771ce258afb3e4201e67d154edbbae712a76a6081079fe247c2f53c6322c22"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7610b6a5242a6c736d8ad47fd5fff87fcfe8f833b281b1c409c3d6835d9227f"}, + {file = "pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:ff5e7783bcc5476e1db448bf268f11cb257b1c276d3e89f00b5727be86dd0127"}, + {file = "pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:9d2e32edcc143bc01e95300671915d9ca052d4f745aa0a49c48d4803f8a85f2c"}, + {file = "pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d83d1c6b87fa56b521479cff237e626a292f3b31b6345c15a99121b454c1"}, + {file = "pydantic_core-2.46.3-cp314-cp314-win32.whl", hash = "sha256:07bc6d2a28c3adb4f7c6ae46aa4f2d2929af127f587ed44057af50bf1ce0f505"}, + {file = "pydantic_core-2.46.3-cp314-cp314-win_amd64.whl", hash = "sha256:8940562319bc621da30714617e6a7eaa6b98c84e8c685bcdc02d7ed5e7c7c44e"}, + {file = "pydantic_core-2.46.3-cp314-cp314-win_arm64.whl", hash = "sha256:5dcbbcf4d22210ced8f837c96db941bdb078f419543472aca5d9a0bb7cddc7df"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:d0fe3dce1e836e418f912c1ad91c73357d03e556a4d286f441bf34fed2dbeecf"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9ce92e58abc722dac1bf835a6798a60b294e48eb0e625ec9fd994b932ac5feee"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03e6467f0f5ab796a486146d1b887b2dc5e5f9b3288898c1b1c3ad974e53e4a"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2798b6ba041b9d70acfb9071a2ea13c8456dd1e6a5555798e41ba7b0790e329c"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9be3e221bdc6d69abf294dcf7aff6af19c31a5cdcc8f0aa3b14be29df4bd03b1"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f13936129ce841f2a5ddf6f126fea3c43cd128807b5a59588c37cf10178c2e64"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28b5f2ef03416facccb1c6ef744c69793175fd27e44ef15669201601cf423acb"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:830d1247d77ad23852314f069e9d7ddafeec5f684baf9d7e7065ed46a049c4e6"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0793c90c1a3c74966e7975eaef3ed30ebdff3260a0f815a62a22adc17e4c01c"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d2d0aead851b66f5245ec0c4fb2612ef457f8bbafefdf65a2bf9d6bac6140f47"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:2f40e4246676beb31c5ce77c38a55ca4e465c6b38d11ea1bd935420568e0b1ab"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:cf489cf8986c543939aeee17a09c04d6ffb43bfef8ca16fcbcc5cfdcbed24dba"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-win32.whl", hash = "sha256:ffe0883b56cfc05798bf994164d2b2ff03efe2d22022a2bb080f3b626176dd56"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-win_amd64.whl", hash = "sha256:706d9d0ce9cf4593d07270d8e9f53b161f90c57d315aeec4fb4fd7a8b10240d8"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-win_arm64.whl", hash = "sha256:77706aeb41df6a76568434701e0917da10692da28cb69d5fb6919ce5fdb07374"}, + {file = "pydantic_core-2.46.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:fa3eb7c2995aa443687a825bc30395c8521b7c6ec201966e55debfd1128bcceb"}, + {file = "pydantic_core-2.46.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d08782c4045f90724b44c95d35ebec0d67edb8a957a2ac81d5a8e4b8a200495"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:831eb19aa789a97356979e94c981e5667759301fb708d1c0d5adf1bc0098b873"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4335e87c7afa436a0dfa899e138d57a72f8aad542e2cf19c36fb428461caabd0"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99421e7684a60f7f3550a1d159ade5fdff1954baedb6bdd407cba6a307c9f27d"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd81f6907932ebac3abbe41378dac64b2380db1287e2aa64d8d88f78d170f51a"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f247596366f4221af52beddd65af1218797771d6989bc891a0b86ccaa019168"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:6dff8cc884679df229ebc6d8eb2321ea6f8e091bc7d4886d4dc2e0e71452843c"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68ef2f623dda6d5a9067ac014e406c020c780b2a358930a7e5c1b73702900720"}, + {file = "pydantic_core-2.46.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d56bdb4af1767cc15b0386b3c581fdfe659bb9ee4a4f776e92c1cd9d074000d6"}, + {file = "pydantic_core-2.46.3-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:91249bcb7c165c2fb2a2f852dbc5c91636e2e218e75d96dfdd517e4078e173dd"}, + {file = "pydantic_core-2.46.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b068543bdb707f5d935dab765d99227aa2545ef2820935f2e5dd801795c7dbd"}, + {file = "pydantic_core-2.46.3-cp39-cp39-win32.whl", hash = "sha256:dcda6583921c05a40533f982321532f2d8db29326c7b95c4026941fa5074bd79"}, + {file = "pydantic_core-2.46.3-cp39-cp39-win_amd64.whl", hash = "sha256:a35cc284c8dd7edae8a31533713b4d2467dfe7c4f1b5587dd4031f28f90d1d13"}, + {file = "pydantic_core-2.46.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:9715525891ed524a0a1eb6d053c74d4d4ad5017677fb00af0b7c2644a31bae46"}, + {file = "pydantic_core-2.46.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:9d2f400712a99a013aff420ef1eb9be077f8189a36c1e3ef87660b4e1088a874"}, + {file = "pydantic_core-2.46.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd2aab0e2e9dc2daf36bd2686c982535d5e7b1d930a1344a7bb6e82baab42a76"}, + {file = "pydantic_core-2.46.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9d76736da5f362fabfeea6a69b13b7f2be405c6d6966f06b2f6bfff7e64531"}, + {file = "pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b12dd51f1187c2eb489af8e20f880362db98e954b54ab792fa5d92e8bcc6b803"}, + {file = "pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f00a0961b125f1a47af7bcc17f00782e12f4cd056f83416006b30111d941dfa3"}, + {file = "pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57697d7c056aca4bbb680200f96563e841a6386ac1129370a0102592f4dddff5"}, + {file = "pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd35aa21299def8db7ef4fe5c4ff862941a9a158ca7b63d61e66fe67d30416b4"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:13afdd885f3d71280cf286b13b310ee0f7ccfefd1dbbb661514a474b726e2f25"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f91c0aff3e3ee0928edd1232c57f643a7a003e6edf1860bc3afcdc749cb513f3"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6529d1d128321a58d30afcc97b49e98836542f68dd41b33c2e972bb9e5290536"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:975c267cff4f7e7272eacbe50f6cc03ca9a3da4c4fbd66fffd89c94c1e311aa1"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2b8e4f2bbdf71415c544b4b1138b8060db7b6611bc927e8064c769f64bed651c"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e61ea8e9fff9606d09178f577ff8ccdd7206ff73d6552bcec18e1033c4254b85"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b504bda01bafc69b6d3c7a0c7f039dcf60f47fab70e06fe23f57b5c75bdc82b8"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b00b76f7142fc60c762ce579bd29c8fa44aaa56592dd3c54fab3928d0d4ca6ff"}, + {file = "pydantic_core-2.46.3.tar.gz", hash = "sha256:41c178f65b8c29807239d47e6050262eb6bf84eb695e41101e62e38df4a5bc2c"}, ] [package.dependencies] typing-extensions = ">=4.14.1" +[[package]] +name = "pygments" +version = "2.20.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176"}, + {file = "pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pyjwt" version = "2.12.1" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "pyjwt-2.12.1-py3-none-any.whl", hash = "sha256:28ca37c070cad8ba8cd9790cd940535d40274d22f80ab87f3ac6a713e6e8454c"}, {file = "pyjwt-2.12.1.tar.gz", hash = "sha256:c74a7a2adf861c04d002db713dd85f84beb242228e671280bf709d765b03672b"}, @@ -1152,34 +1272,35 @@ tests = ["coverage[toml] (==7.10.7)", "pytest (>=8.4.2,<9.0.0)"] [[package]] name = "pytest" -version = "7.4.4" +version = "9.0.3" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false -python-versions = ">=3.7" -groups = ["dev"] +python-versions = ">=3.10" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9"}, + {file = "pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1.0.1" +packaging = ">=22" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1190,14 +1311,14 @@ six = ">=1.5" [[package]] name = "python-discovery" -version = "1.2.1" +version = "1.2.2" description = "Python interpreter discovery" +category = "dev" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "python_discovery-1.2.1-py3-none-any.whl", hash = "sha256:b6a957b24c1cd79252484d3566d1b49527581d46e789aaf43181005e56201502"}, - {file = "python_discovery-1.2.1.tar.gz", hash = "sha256:180c4d114bff1c32462537eac5d6a332b768242b76b69c0259c7d14b1b680c9e"}, + {file = "python_discovery-1.2.2-py3-none-any.whl", hash = "sha256:e1ae95d9af875e78f15e19aed0c6137ab1bb49c200f21f5061786490c9585c7a"}, + {file = "python_discovery-1.2.2.tar.gz", hash = "sha256:876e9c57139eb757cb5878cbdd9ae5379e5d96266c99ef731119e04fffe533bb"}, ] [package.dependencies] @@ -1210,14 +1331,14 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.5.4)", "pytest (>=8.3.5)", "pyt [[package]] name = "python-dotenv" -version = "0.21.1" +version = "1.2.2" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = false -python-versions = ">=3.7" -groups = ["main"] +python-versions = ">=3.10" files = [ - {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, - {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, + {file = "python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a"}, + {file = "python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3"}, ] [package.extras] @@ -1227,10 +1348,9 @@ cli = ["click (>=5.0)"] name = "pywin32" version = "311" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" -groups = ["main"] -markers = "sys_platform == \"win32\"" files = [ {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, @@ -1258,9 +1378,9 @@ files = [ name = "pyyaml" version = "6.0.3" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, @@ -1341,9 +1461,9 @@ files = [ name = "requests" version = "2.33.1" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ {file = "requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a"}, {file = "requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517"}, @@ -1363,9 +1483,9 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<8)"] name = "requests-oauthlib" version = "2.0.0" description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=3.4" -groups = ["main"] files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -1380,38 +1500,39 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "ruff" -version = "0.1.15" +version = "0.15.11" description = "An extremely fast Python linter and code formatter, written in Rust." +category = "dev" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, - {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, - {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, - {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, - {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, + {file = "ruff-0.15.11-py3-none-linux_armv6l.whl", hash = "sha256:e927cfff503135c558eb581a0c9792264aae9507904eb27809cdcff2f2c847b7"}, + {file = "ruff-0.15.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7a1b5b2938d8f890b76084d4fa843604d787a912541eae85fd7e233398bbb73e"}, + {file = "ruff-0.15.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d4176f3d194afbdaee6e41b9ccb1a2c287dba8700047df474abfbe773825d1cb"}, + {file = "ruff-0.15.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b17c886fb88203ced3afe7f14e8d5ae96e9d2f4ccc0ee66aa19f2c2675a27e4"}, + {file = "ruff-0.15.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:49fafa220220afe7758a487b048de4c8f9f767f37dfefad46b9dd06759d003eb"}, + {file = "ruff-0.15.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2ab8427e74a00d93b8bda1307b1e60970d40f304af38bccb218e056c220120d"}, + {file = "ruff-0.15.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:195072c0c8e1fc8f940652073df082e37a5d9cb43b4ab1e4d0566ab8977a13b7"}, + {file = "ruff-0.15.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3a0996d486af3920dec930a2e7daed4847dfc12649b537a9335585ada163e9e"}, + {file = "ruff-0.15.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bef2cb556d509259f1fe440bb9cd33c756222cf0a7afe90d15edf0866702431"}, + {file = "ruff-0.15.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:030d921a836d7d4a12cf6e8d984a88b66094ccb0e0f17ddd55067c331191bf19"}, + {file = "ruff-0.15.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0e783b599b4577788dbbb66b9addcef87e9a8832f4ce0c19e34bf55543a2f890"}, + {file = "ruff-0.15.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ae90592246625ba4a34349d68ec28d4400d75182b71baa196ddb9f82db025ef5"}, + {file = "ruff-0.15.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1f111d62e3c983ed20e0ca2e800f8d77433a5b1161947df99a5c2a3fb60514f0"}, + {file = "ruff-0.15.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:06f483d6646f59eaffba9ae30956370d3a886625f511a3108994000480621d1c"}, + {file = "ruff-0.15.11-py3-none-win32.whl", hash = "sha256:476a2aa56b7da0b73a3ee80b6b2f0e19cce544245479adde7baa65466664d5f3"}, + {file = "ruff-0.15.11-py3-none-win_amd64.whl", hash = "sha256:8b6756d88d7e234fb0c98c91511aae3cd519d5e3ed271cae31b20f39cb2a12a3"}, + {file = "ruff-0.15.11-py3-none-win_arm64.whl", hash = "sha256:063fed18cc1bbe0ee7393957284a6fe8b588c6a406a285af3ee3f46da2391ee4"}, + {file = "ruff-0.15.11.tar.gz", hash = "sha256:f092b21708bf0e7437ce9ada249dfe688ff9a0954fc94abab05dcea7dcd29c33"}, ] [[package]] name = "six" version = "1.17.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1419,75 +1540,75 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.48" +version = "2.0.49" description = "Database Abstraction Library" +category = "main" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ - {file = "sqlalchemy-2.0.48-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7001dc9d5f6bb4deb756d5928eaefe1930f6f4179da3924cbd95ee0e9f4dce89"}, - {file = "sqlalchemy-2.0.48-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a89ce07ad2d4b8cfc30bd5889ec40613e028ed80ef47da7d9dd2ce969ad30e0"}, - {file = "sqlalchemy-2.0.48-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10853a53a4a00417a00913d270dddda75815fcb80675874285f41051c094d7dd"}, - {file = "sqlalchemy-2.0.48-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fac0fa4e4f55f118fd87177dacb1c6522fe39c28d498d259014020fec9164c29"}, - {file = "sqlalchemy-2.0.48-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3713e21ea67bca727eecd4a24bf68bcd414c403faae4989442be60994301ded0"}, - {file = "sqlalchemy-2.0.48-cp310-cp310-win32.whl", hash = "sha256:d404dc897ce10e565d647795861762aa2d06ca3f4a728c5e9a835096c7059018"}, - {file = "sqlalchemy-2.0.48-cp310-cp310-win_amd64.whl", hash = "sha256:841a94c66577661c1f088ac958cd767d7c9bf507698f45afffe7a4017049de76"}, - {file = "sqlalchemy-2.0.48-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b4c575df7368b3b13e0cebf01d4679f9a28ed2ae6c1cd0b1d5beffb6b2007dc"}, - {file = "sqlalchemy-2.0.48-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e83e3f959aaa1c9df95c22c528096d94848a1bc819f5d0ebf7ee3df0ca63db6c"}, - {file = "sqlalchemy-2.0.48-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f7b7243850edd0b8b97043f04748f31de50cf426e939def5c16bedb540698f7"}, - {file = "sqlalchemy-2.0.48-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:82745b03b4043e04600a6b665cb98697c4339b24e34d74b0a2ac0a2488b6f94d"}, - {file = "sqlalchemy-2.0.48-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5e088bf43f6ee6fec7dbf1ef7ff7774a616c236b5c0cb3e00662dd71a56b571"}, - {file = "sqlalchemy-2.0.48-cp311-cp311-win32.whl", hash = "sha256:9c7d0a77e36b5f4b01ca398482230ab792061d243d715299b44a0b55c89fe617"}, - {file = "sqlalchemy-2.0.48-cp311-cp311-win_amd64.whl", hash = "sha256:583849c743e0e3c9bb7446f5b5addeacedc168d657a69b418063dfdb2d90081c"}, - {file = "sqlalchemy-2.0.48-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:348174f228b99f33ca1f773e85510e08927620caa59ffe7803b37170df30332b"}, - {file = "sqlalchemy-2.0.48-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53667b5f668991e279d21f94ccfa6e45b4e3f4500e7591ae59a8012d0f010dcb"}, - {file = "sqlalchemy-2.0.48-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34634e196f620c7a61d18d5cf7dc841ca6daa7961aed75d532b7e58b309ac894"}, - {file = "sqlalchemy-2.0.48-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:546572a1793cc35857a2ffa1fe0e58571af1779bcc1ffa7c9fb0839885ed69a9"}, - {file = "sqlalchemy-2.0.48-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:07edba08061bc277bfdc772dd2a1a43978f5a45994dd3ede26391b405c15221e"}, - {file = "sqlalchemy-2.0.48-cp312-cp312-win32.whl", hash = "sha256:908a3fa6908716f803b86896a09a2c4dde5f5ce2bb07aacc71ffebb57986ce99"}, - {file = "sqlalchemy-2.0.48-cp312-cp312-win_amd64.whl", hash = "sha256:68549c403f79a8e25984376480959975212a670405e3913830614432b5daa07a"}, - {file = "sqlalchemy-2.0.48-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e3070c03701037aa418b55d36532ecb8f8446ed0135acb71c678dbdf12f5b6e4"}, - {file = "sqlalchemy-2.0.48-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2645b7d8a738763b664a12a1542c89c940daa55196e8d73e55b169cc5c99f65f"}, - {file = "sqlalchemy-2.0.48-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b19151e76620a412c2ac1c6f977ab1b9fa7ad43140178345136456d5265b32ed"}, - {file = "sqlalchemy-2.0.48-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b193a7e29fd9fa56e502920dca47dffe60f97c863494946bd698c6058a55658"}, - {file = "sqlalchemy-2.0.48-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:36ac4ddc3d33e852da9cb00ffb08cea62ca05c39711dc67062ca2bb1fae35fd8"}, - {file = "sqlalchemy-2.0.48-cp313-cp313-win32.whl", hash = "sha256:389b984139278f97757ea9b08993e7b9d1142912e046ab7d82b3fbaeb0209131"}, - {file = "sqlalchemy-2.0.48-cp313-cp313-win_amd64.whl", hash = "sha256:d612c976cbc2d17edfcc4c006874b764e85e990c29ce9bd411f926bbfb02b9a2"}, - {file = "sqlalchemy-2.0.48-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69f5bc24904d3bc3640961cddd2523e361257ef68585d6e364166dfbe8c78fae"}, - {file = "sqlalchemy-2.0.48-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd08b90d211c086181caed76931ecfa2bdfc83eea3cfccdb0f82abc6c4b876cb"}, - {file = "sqlalchemy-2.0.48-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1ccd42229aaac2df431562117ac7e667d702e8e44afdb6cf0e50fa3f18160f0b"}, - {file = "sqlalchemy-2.0.48-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0dcbc588cd5b725162c076eb9119342f6579c7f7f55057bb7e3c6ff27e13121"}, - {file = "sqlalchemy-2.0.48-cp313-cp313t-win32.whl", hash = "sha256:9764014ef5e58aab76220c5664abb5d47d5bc858d9debf821e55cfdd0f128485"}, - {file = "sqlalchemy-2.0.48-cp313-cp313t-win_amd64.whl", hash = "sha256:e2f35b4cccd9ed286ad62e0a3c3ac21e06c02abc60e20aa51a3e305a30f5fa79"}, - {file = "sqlalchemy-2.0.48-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e2d0d88686e3d35a76f3e15a34e8c12d73fc94c1dea1cd55782e695cc14086dd"}, - {file = "sqlalchemy-2.0.48-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49b7bddc1eebf011ea5ab722fdbe67a401caa34a350d278cc7733c0e88fecb1f"}, - {file = "sqlalchemy-2.0.48-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:426c5ca86415d9b8945c7073597e10de9644802e2ff502b8e1f11a7a2642856b"}, - {file = "sqlalchemy-2.0.48-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:288937433bd44e3990e7da2402fabc44a3c6c25d3704da066b85b89a85474ae0"}, - {file = "sqlalchemy-2.0.48-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8183dc57ae7d9edc1346e007e840a9f3d6aa7b7f165203a99e16f447150140d2"}, - {file = "sqlalchemy-2.0.48-cp314-cp314-win32.whl", hash = "sha256:1182437cb2d97988cfea04cf6cdc0b0bb9c74f4d56ec3d08b81e23d621a28cc6"}, - {file = "sqlalchemy-2.0.48-cp314-cp314-win_amd64.whl", hash = "sha256:144921da96c08feb9e2b052c5c5c1d0d151a292c6135623c6b2c041f2a45f9e0"}, - {file = "sqlalchemy-2.0.48-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5aee45fd2c6c0f2b9cdddf48c48535e7471e42d6fb81adfde801da0bd5b93241"}, - {file = "sqlalchemy-2.0.48-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7cddca31edf8b0653090cbb54562ca027c421c58ddde2c0685f49ff56a1690e0"}, - {file = "sqlalchemy-2.0.48-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7a936f1bb23d370b7c8cc079d5fce4c7d18da87a33c6744e51a93b0f9e97e9b3"}, - {file = "sqlalchemy-2.0.48-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e004aa9248e8cb0a5f9b96d003ca7c1c0a5da8decd1066e7b53f59eb8ce7c62b"}, - {file = "sqlalchemy-2.0.48-cp314-cp314t-win32.whl", hash = "sha256:b8438ec5594980d405251451c5b7ea9aa58dda38eb7ac35fb7e4c696712ee24f"}, - {file = "sqlalchemy-2.0.48-cp314-cp314t-win_amd64.whl", hash = "sha256:d854b3970067297f3a7fbd7a4683587134aa9b3877ee15aa29eea478dc68f933"}, - {file = "sqlalchemy-2.0.48-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8649a14caa5f8a243628b1d61cf530ad9ae4578814ba726816adb1121fc493e"}, - {file = "sqlalchemy-2.0.48-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6bb85c546591569558571aa1b06aba711b26ae62f111e15e56136d69920e1616"}, - {file = "sqlalchemy-2.0.48-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6b764fb312bd35e47797ad2e63f0d323792837a6ac785a4ca967019357d2bc7"}, - {file = "sqlalchemy-2.0.48-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:7c998f2ace8bf76b453b75dbcca500d4f4b9dd3908c13e89b86289b37784848b"}, - {file = "sqlalchemy-2.0.48-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d64177f443594c8697369c10e4bbcac70ef558e0f7921a1de7e4a3d1734bcf67"}, - {file = "sqlalchemy-2.0.48-cp38-cp38-win32.whl", hash = "sha256:01f6bbd4308b23240cf7d3ef117557c8fd097ec9549d5d8a52977544e35b40ad"}, - {file = "sqlalchemy-2.0.48-cp38-cp38-win_amd64.whl", hash = "sha256:858e433f12b0e5b3ed2f8da917433b634f4937d0e8793e5cb33c54a1a01df565"}, - {file = "sqlalchemy-2.0.48-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4599a95f9430ae0de82b52ff0d27304fe898c17cb5f4099f7438a51b9998ac77"}, - {file = "sqlalchemy-2.0.48-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f27f9da0a7d22b9f981108fd4b62f8b5743423388915a563e651c20d06c1f457"}, - {file = "sqlalchemy-2.0.48-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8fcccbbc0c13c13702c471da398b8cd72ba740dca5859f148ae8e0e8e0d3e7e"}, - {file = "sqlalchemy-2.0.48-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a5b429eb84339f9f05e06083f119ad814e6d85e27ecbdf9c551dfdbb128eaf8a"}, - {file = "sqlalchemy-2.0.48-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bcb8ebbf2e2c36cfe01a94f2438012c6a9d494cf80f129d9753bcdf33bfc35a6"}, - {file = "sqlalchemy-2.0.48-cp39-cp39-win32.whl", hash = "sha256:e214d546c8ecb5fc22d6e6011746082abf13a9cf46eefb45769c7b31407c97b5"}, - {file = "sqlalchemy-2.0.48-cp39-cp39-win_amd64.whl", hash = "sha256:b8fc3454b4f3bd0a368001d0e968852dad45a873f8b4babd41bc302ec851a099"}, - {file = "sqlalchemy-2.0.48-py3-none-any.whl", hash = "sha256:a66fe406437dd65cacd96a72689a3aaaecaebbcd62d81c5ac1c0fdbeac835096"}, - {file = "sqlalchemy-2.0.48.tar.gz", hash = "sha256:5ca74f37f3369b45e1f6b7b06afb182af1fd5dde009e4ffd831830d98cbe5fe7"}, + {file = "sqlalchemy-2.0.49-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:42e8804962f9e6f4be2cbaedc0c3718f08f60a16910fa3d86da5a1e3b1bfe60f"}, + {file = "sqlalchemy-2.0.49-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc992c6ed024c8c3c592c5fc9846a03dd68a425674900c70122c77ea16c5fb0b"}, + {file = "sqlalchemy-2.0.49-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6eb188b84269f357669b62cb576b5b918de10fb7c728a005fa0ebb0b758adce1"}, + {file = "sqlalchemy-2.0.49-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:62557958002b69699bdb7f5137c6714ca1133f045f97b3903964f47db97ea339"}, + {file = "sqlalchemy-2.0.49-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da9b91bca419dc9b9267ffadde24eae9b1a6bffcd09d0a207e5e3af99a03ce0d"}, + {file = "sqlalchemy-2.0.49-cp310-cp310-win32.whl", hash = "sha256:5e61abbec255be7b122aa461021daa7c3f310f3e743411a67079f9b3cc91ece3"}, + {file = "sqlalchemy-2.0.49-cp310-cp310-win_amd64.whl", hash = "sha256:0c98c59075b890df8abfcc6ad632879540f5791c68baebacb4f833713b510e75"}, + {file = "sqlalchemy-2.0.49-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5070135e1b7409c4161133aa525419b0062088ed77c92b1da95366ec5cbebbe"}, + {file = "sqlalchemy-2.0.49-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ac7a3e245fd0310fd31495eb61af772e637bdf7d88ee81e7f10a3f271bff014"}, + {file = "sqlalchemy-2.0.49-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d4e5a0ceba319942fa6b585cf82539288a61e314ef006c1209f734551ab9536"}, + {file = "sqlalchemy-2.0.49-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3ddcb27fb39171de36e207600116ac9dfd4ae46f86c82a9bf3934043e80ebb88"}, + {file = "sqlalchemy-2.0.49-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:32fe6a41ad97302db2931f05bb91abbcc65b5ce4c675cd44b972428dd2947700"}, + {file = "sqlalchemy-2.0.49-cp311-cp311-win32.whl", hash = "sha256:46d51518d53edfbe0563662c96954dc8fcace9832332b914375f45a99b77cc9a"}, + {file = "sqlalchemy-2.0.49-cp311-cp311-win_amd64.whl", hash = "sha256:951d4a210744813be63019f3df343bf233b7432aadf0db54c75802247330d3af"}, + {file = "sqlalchemy-2.0.49-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4bbccb45260e4ff1b7db0be80a9025bb1e6698bdb808b83fff0000f7a90b2c0b"}, + {file = "sqlalchemy-2.0.49-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb37f15714ec2652d574f021d479e78cd4eb9d04396dca36568fdfffb3487982"}, + {file = "sqlalchemy-2.0.49-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb9ec6436a820a4c006aad1ac351f12de2f2dbdaad171692ee457a02429b672"}, + {file = "sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8d6efc136f44a7e8bc8088507eaabbb8c2b55b3dbb63fe102c690da0ddebe55e"}, + {file = "sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e06e617e3d4fd9e51d385dfe45b077a41e9d1b033a7702551e3278ac597dc750"}, + {file = "sqlalchemy-2.0.49-cp312-cp312-win32.whl", hash = "sha256:83101a6930332b87653886c01d1ee7e294b1fe46a07dd9a2d2b4f91bcc88eec0"}, + {file = "sqlalchemy-2.0.49-cp312-cp312-win_amd64.whl", hash = "sha256:618a308215b6cececb6240b9abde545e3acdabac7ae3e1d4e666896bf5ba44b4"}, + {file = "sqlalchemy-2.0.49-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df2d441bacf97022e81ad047e1597552eb3f83ca8a8f1a1fdd43cd7fe3898120"}, + {file = "sqlalchemy-2.0.49-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e20e511dc15265fb433571391ba313e10dd8ea7e509d51686a51313b4ac01a2"}, + {file = "sqlalchemy-2.0.49-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47604cb2159f8bbd5a1ab48a714557156320f20871ee64d550d8bf2683d980d3"}, + {file = "sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:22d8798819f86720bc646ab015baff5ea4c971d68121cb36e2ebc2ee43ead2b7"}, + {file = "sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9b1c058c171b739e7c330760044803099c7fff11511e3ab3573e5327116a9c33"}, + {file = "sqlalchemy-2.0.49-cp313-cp313-win32.whl", hash = "sha256:a143af2ea6672f2af3f44ed8f9cd020e9cc34c56f0e8db12019d5d9ecf41cb3b"}, + {file = "sqlalchemy-2.0.49-cp313-cp313-win_amd64.whl", hash = "sha256:12b04d1db2663b421fe072d638a138460a51d5a862403295671c4f3987fb9148"}, + {file = "sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24bd94bb301ec672d8f0623eba9226cc90d775d25a0c92b5f8e4965d7f3a1518"}, + {file = "sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a51d3db74ba489266ef55c7a4534eb0b8db9a326553df481c11e5d7660c8364d"}, + {file = "sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:55250fe61d6ebfd6934a272ee16ef1244e0f16b7af6cd18ab5b1fc9f08631db0"}, + {file = "sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:46796877b47034b559a593d7e4b549aba151dae73f9e78212a3478161c12ab08"}, + {file = "sqlalchemy-2.0.49-cp313-cp313t-win32.whl", hash = "sha256:9c4969a86e41454f2858256c39bdfb966a20961e9b58bf8749b65abf447e9a8d"}, + {file = "sqlalchemy-2.0.49-cp313-cp313t-win_amd64.whl", hash = "sha256:b9870d15ef00e4d0559ae10ee5bc71b654d1f20076dbe8bc7ed19b4c0625ceba"}, + {file = "sqlalchemy-2.0.49-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:233088b4b99ebcbc5258c755a097aa52fbf90727a03a5a80781c4b9c54347a2e"}, + {file = "sqlalchemy-2.0.49-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57ca426a48eb2c682dae8204cd89ea8ab7031e2675120a47924fabc7caacbc2a"}, + {file = "sqlalchemy-2.0.49-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:685e93e9c8f399b0c96a624799820176312f5ceef958c0f88215af4013d29066"}, + {file = "sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e0400fa22f79acc334d9a6b185dc00a44a8e6578aa7e12d0ddcd8434152b187"}, + {file = "sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a05977bffe9bffd2229f477fa75eabe3192b1b05f408961d1bebff8d1cd4d401"}, + {file = "sqlalchemy-2.0.49-cp314-cp314-win32.whl", hash = "sha256:0f2fa354ba106eafff2c14b0cc51f22801d1e8b2e4149342023bd6f0955de5f5"}, + {file = "sqlalchemy-2.0.49-cp314-cp314-win_amd64.whl", hash = "sha256:77641d299179c37b89cf2343ca9972c88bb6eef0d5fc504a2f86afd15cd5adf5"}, + {file = "sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dc3368794d522f43914e03312202523cc89692f5389c32bea0233924f8d977"}, + {file = "sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c821c47ecfe05cc32140dcf8dc6fd5d21971c86dbd56eabfe5ba07a64910c01"}, + {file = "sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9c04bff9a5335eb95c6ecf1c117576a0aa560def274876fd156cfe5510fccc61"}, + {file = "sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7f605a456948c35260e7b2a39f8952a26f077fd25653c37740ed186b90aaa68a"}, + {file = "sqlalchemy-2.0.49-cp314-cp314t-win32.whl", hash = "sha256:6270d717b11c5476b0cbb21eedc8d4dbb7d1a956fd6c15a23e96f197a6193158"}, + {file = "sqlalchemy-2.0.49-cp314-cp314t-win_amd64.whl", hash = "sha256:275424295f4256fd301744b8f335cff367825d270f155d522b30c7bf49903ee7"}, + {file = "sqlalchemy-2.0.49-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8a97ac839c2c6672c4865e48f3cbad7152cee85f4233fb4ca6291d775b9b954a"}, + {file = "sqlalchemy-2.0.49-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c338ec6ec01c0bc8e735c58b9f5d51e75bacb6ff23296658826d7cfdfdb8678a"}, + {file = "sqlalchemy-2.0.49-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:566df36fd0e901625523a5a1835032f1ebdd7f7886c54584143fa6c668b4df3b"}, + {file = "sqlalchemy-2.0.49-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d99945830a6f3e9638d89a28ed130b1eb24c91255e4f24366fbe699b983f29e4"}, + {file = "sqlalchemy-2.0.49-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01146546d84185f12721a1d2ce0c6673451a7894d1460b592d378ca4871a0c72"}, + {file = "sqlalchemy-2.0.49-cp38-cp38-win32.whl", hash = "sha256:69469ce8ce7a8df4d37620e3163b71238719e1e2e5048d114a1b6ce0fbf8c662"}, + {file = "sqlalchemy-2.0.49-cp38-cp38-win_amd64.whl", hash = "sha256:b95b2f470c1b2683febd2e7eab1d3f0e078c91dbdd0b00e9c645d07a413bb99f"}, + {file = "sqlalchemy-2.0.49-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43d044780732d9e0381ac8d5316f95d7f02ef04d6e4ef6dc82379f09795d993f"}, + {file = "sqlalchemy-2.0.49-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d6be30b2a75362325176c036d7fb8d19e8846c77e87683ffaa8177b35135613"}, + {file = "sqlalchemy-2.0.49-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d898cc2c76c135ef65517f4ddd7a3512fb41f23087b0650efb3418b8389a3cd1"}, + {file = "sqlalchemy-2.0.49-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:059d7151fff513c53a4638da8778be7fce81a0c4854c7348ebd0c4078ddf28fe"}, + {file = "sqlalchemy-2.0.49-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:334edbcff10514ad1d66e3a70b339c0a29886394892490119dbb669627b17717"}, + {file = "sqlalchemy-2.0.49-cp39-cp39-win32.whl", hash = "sha256:74ab4ee7794d7ed1b0c37e7333640e0f0a626fc7b398c07a7aef52f484fddde3"}, + {file = "sqlalchemy-2.0.49-cp39-cp39-win_amd64.whl", hash = "sha256:88690f4e1f0fbf5339bedbb127e240fec1fd3070e9934c0b7bef83432f779d2f"}, + {file = "sqlalchemy-2.0.49-py3-none-any.whl", hash = "sha256:ec44cfa7ef1a728e88ad41674de50f6db8cfdb3e2af84af86e0041aaf02d43d0"}, + {file = "sqlalchemy-2.0.49.tar.gz", hash = "sha256:d15950a57a210e36dd4cec1aac22787e2a4d57ba9318233e2ef8b2daf9ff2d5f"}, ] [package.dependencies] @@ -1521,30 +1642,30 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.36.3" +version = "1.0.0" description = "The little ASGI library that shines." +category = "main" optional = false -python-versions = ">=3.8" -groups = ["main"] +python-versions = ">=3.10" files = [ - {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, - {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, + {file = "starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b"}, + {file = "starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149"}, ] [package.dependencies] -anyio = ">=3.4.0,<5" +anyio = ">=3.6.2,<5" +typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""} [package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] [[package]] name = "tomli" version = "2.4.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version == \"3.10\"" files = [ {file = "tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30"}, {file = "tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a"}, @@ -1599,9 +1720,9 @@ files = [ name = "truststore" version = "0.10.4" description = "Verify certificates using native system trust stores" +category = "main" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ {file = "truststore-0.10.4-py3-none-any.whl", hash = "sha256:adaeaecf1cbb5f4de3b1959b42d41f6fab57b2b1666adb59e89cb0b53361d981"}, {file = "truststore-0.10.4.tar.gz", hash = "sha256:9d91bd436463ad5e4ee4aba766628dd6cd7010cf3e2461756b3303710eebc301"}, @@ -1611,22 +1732,21 @@ files = [ name = "typing-extensions" version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" +category = "main" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] -markers = {dev = "python_version == \"3.10\""} [[package]] name = "typing-inspection" version = "0.4.2" description = "Runtime typing introspection tools" +category = "main" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, @@ -1637,31 +1757,32 @@ typing-extensions = ">=4.12.0" [[package]] name = "urllib3" -version = "1.26.20" +version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -groups = ["main"] +python-versions = ">=3.9" files = [ - {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, - {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, + {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, + {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, ] [package.extras] -brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.2.0)", "brotlicffi (>=1.2.0.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["backports-zstd (>=1.0.0)"] [[package]] name = "uvicorn" -version = "0.27.1" +version = "0.44.0" description = "The lightning-fast ASGI server." +category = "main" optional = false -python-versions = ">=3.8" -groups = ["main"] +python-versions = ">=3.10" files = [ - {file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"}, - {file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"}, + {file = "uvicorn-0.44.0-py3-none-any.whl", hash = "sha256:ce937c99a2cc70279556967274414c087888e8cec9f9c94644dfca11bd3ced89"}, + {file = "uvicorn-0.44.0.tar.gz", hash = "sha256:6c942071b68f07e178264b9152f1f16dfac5da85880c4ce06366a96d70d4f31e"}, ] [package.dependencies] @@ -1670,34 +1791,34 @@ h11 = ">=0.8" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1)", "watchfiles (>=0.20)", "websockets (>=10.4)"] [[package]] name = "virtualenv" -version = "21.2.0" +version = "21.2.4" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "virtualenv-21.2.0-py3-none-any.whl", hash = "sha256:1bd755b504931164a5a496d217c014d098426cddc79363ad66ac78125f9d908f"}, - {file = "virtualenv-21.2.0.tar.gz", hash = "sha256:1720dc3a62ef5b443092e3f499228599045d7fea4c79199770499df8becf9098"}, + {file = "virtualenv-21.2.4-py3-none-any.whl", hash = "sha256:29d21e941795206138d0f22f4e45ff7050e5da6c6472299fb7103318763861ac"}, + {file = "virtualenv-21.2.4.tar.gz", hash = "sha256:b294ef68192638004d72524ce7ef303e9d0cf5a44c95ce2e54a7500a6381cada"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = {version = ">=3.24.2,<4", markers = "python_version >= \"3.10\""} platformdirs = ">=3.9.1,<5" -python-discovery = ">=1" +python-discovery = ">=1.2.2" typing-extensions = {version = ">=4.13.2", markers = "python_version < \"3.11\""} [[package]] name = "websocket-client" version = "1.9.0" description = "WebSocket client for Python with low level API options" +category = "main" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef"}, {file = "websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98"}, @@ -1709,6 +1830,6 @@ optional = ["python-socks", "wsaccel"] test = ["pytest", "websockets"] [metadata] -lock-version = "2.1" +lock-version = "2.0" python-versions = ">=3.10,<4.0" -content-hash = "3fa365f32b710e5b3d3ba837a48edb8767aab297bc196f65375917cf0f57e101" +content-hash = "89ba097b2444a09843cabd2693b3ee180bc527e3cfe81430f493978ffa495792" diff --git a/pyproject.toml b/pyproject.toml index 6777b1f..f4ff622 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "Node-Pod-Orchestration" -version = "0.4.0.1" +version = "0.5.0" description = "" authors = ["Alexander Röhl ", "David Hieber "] license = "Apache 2.0" @@ -9,23 +9,24 @@ packages = [{ include = "src" }] [tool.poetry.dependencies] python = ">=3.10,<4.0" -kubernetes = "^28.1.0" -fastapi = "^0.109.0" -uvicorn = "^0.27.0" +kubernetes = "^35.0.0" +fastapi = "^0.135.3" +uvicorn = "^0.44.0" docker = "^7.0.0" psycopg2-binary = "^2.9.9" sqlalchemy = "^2.0.26" PyJWT = "^2.10.1" -python-dotenv = "^0.21.0" +python-dotenv = "^1.2.2" kong-admin-client = {git = "https://github.com/PrivateAIM/kong-admin-python-client.git"} flame-hub-client = "^0.2.13" -cryptography="^44.0.3" +cryptography="^46.0.7" truststore = "^0.10.4" [tool.poetry.group.dev.dependencies] -pytest = "^7.4.3" -ruff = "^0.1.8" -pre-commit = "^3.6.0" +pytest = "^9.0.3" +ruff = "^0.15.10" +pre-commit = "^4.5.1" +coverage = "^7.13.5" [build-system] requires = ["poetry-core"] diff --git a/src/api/api.py b/src/api/api.py index 4f3e926..be62605 100644 --- a/src/api/api.py +++ b/src/api/api.py @@ -20,10 +20,33 @@ delete_analysis, cleanup, stream_logs) +from src.utils.po_logging import get_logger +logger = get_logger() class PodOrchestrationAPI: + """FastAPI application exposing the Pod Orchestration REST endpoints. + + Constructs a FastAPI app, wires up all routes under the ``/po`` prefix, + enables CORS, initializes the FLAME Hub client used for status/log + forwarding, and finally blocks on ``uvicorn.run``. All endpoints except + ``/po/healthz`` require a valid Keycloak access token. + + Attributes: + database: Database wrapper used for persistence. + hub_client: Initialized FLAME Hub core client (``None`` on failure). + node_id: This node's id in the FLAME Hub, resolved from the client id. + enable_hub_logging: Whether logs are forwarded to the Hub. + namespace: Kubernetes namespace the API operates within. + """ + def __init__(self, database: Database, namespace: str = 'default'): + """Build the FastAPI app, register routes, and start the uvicorn server. + + Args: + database: Database wrapper used by all handlers. + namespace: Kubernetes namespace the API operates within. + """ self.database = database client_id, client_secret, hub_url_core, hub_auth, enable_hub_logging, http_proxy, https_proxy = extract_hub_envs() @@ -43,7 +66,7 @@ def __init__(self, database: Database, namespace: str = 'default'): openapi_url="/api/v1/openapi.json") origins = [ - "http://localhost:8080/", + "http://localhost:8080", ] app.add_middleware( @@ -139,63 +162,171 @@ def __init__(self, database: Database, namespace: str = 'default'): prefix="/po", ) - uvicorn.run(app, host="0.0.0.0", port=8000) + uvicorn.run(app, host="0.0.0.0", port=8000, log_config=None) def create_analysis_call(self, body: CreateAnalysis): + """``POST /po/`` — create and start a new analysis deployment. + + Args: + body: Payload describing the analysis to create (image, registry + credentials, Kong token, etc.). + + Returns: + A mapping of ``{analysis_id: status}`` for the newly started run. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return create_analysis(body, self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error creating analysis: {e}") + logger.error(f"Error creating analysis: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error creating analysis (see po logs).") def retrieve_all_history_call(self): + """``GET /po/history`` — return archived logs for every analysis. + + Returns: + Nested mapping ``{'analysis': {...}, 'nginx': {...}}`` keyed by + analysis id containing the persisted log snapshots. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return retrieve_history('all', self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving ALL history data: {e}") + logger.error(f"Error retrieving ALL history data: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error retrieving ALL history data (see po logs).") def retrieve_history_call(self, analysis_id: str): + """``GET /po/history/{analysis_id}`` — return archived logs for a single analysis. + + Args: + analysis_id: UUID of the analysis to query. + + Returns: + Nested mapping ``{'analysis': {...}, 'nginx': {...}}`` containing + the persisted log snapshots for ``analysis_id``. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return retrieve_history(analysis_id, self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving history data: {e}") + logger.error(f"Error retrieving history data: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error retrieving history data (see po logs).") def retrieve_all_logs_call(self): + """``GET /po/logs`` — return live pod logs for every executing analysis. + + Returns: + Nested mapping of analysis and nginx logs keyed by analysis id. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return retrieve_logs('all', self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving ALL logs data: {e}") + logger.error(f"Error retrieving ALL logs data: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error retrieving ALL logs data (see po logs).") def retrieve_logs_call(self, analysis_id: str): + """``GET /po/logs/{analysis_id}`` — return live pod logs for a single analysis. + + Args: + analysis_id: UUID of the analysis to query. + + Returns: + Nested mapping of analysis and nginx logs for ``analysis_id``. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return retrieve_logs(analysis_id, self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving logs data: {e}") + logger.error(f"Error retrieving logs data: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error retrieving logs data (see po logs).") def get_all_status_and_progress_call(self): + """``GET /po/status`` — return status and progress for every analysis. + + Returns: + Mapping ``{analysis_id: {'status': str, 'progress': int}}``. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return get_status_and_progress('all', self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving ALL status data: {e}") + logger.error(f"Error retrieving ALL status data: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error retrieving ALL status data (see po logs).") def get_status_and_progress_call(self, analysis_id: str): + """``GET /po/status/{analysis_id}`` — return status and progress for a single analysis. + + Args: + analysis_id: UUID of the analysis to query. + + Returns: + Mapping ``{analysis_id: {'status': str, 'progress': int}}``. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return get_status_and_progress(analysis_id, self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving status data: {e}") + logger.error(f"Error retrieving status data: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error retrieving status data (see po logs).") def get_all_pods_call(self): + """``GET /po/pods`` — return the pod ids backing every analysis deployment. + + Returns: + Mapping ``{analysis_id: [pod_id, ...]}``. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return get_pods('all', self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving ALL pod names: {e}") + logger.error(f"Error retrieving ALL pod names: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error retrieving ALL pod names (see po logs).") def get_pods_call(self, analysis_id: str): + """``GET /po/pods/{analysis_id}`` — return pod ids for a single analysis. + + Args: + analysis_id: UUID of the analysis to query. + + Returns: + Mapping ``{analysis_id: [pod_id, ...]}``. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return get_pods(analysis_id, self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving pod name: {e}") + logger.error(f"Error retrieving pod name: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error retrieving pod name (see po logs).") def stop_all_analysis_call(self): + """``PUT /po/stop`` — stop every analysis and push a stop log to the Hub. + + Returns: + Mapping ``{analysis_id: status}`` reflecting the final status of + each stopped analysis. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: response = stop_analysis('all', self.database) for analysis_id in self.database.get_analysis_ids(): @@ -206,9 +337,21 @@ def stop_all_analysis_call(self): self.hub_client) return response except Exception as e: - raise HTTPException(status_code=500, detail=f"Error stopping ALL analyzes: {e}") + logger.error(f"Error stopping ALL analyzes: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error stopping ALL analyzes (see po logs).") def stop_analysis_call(self, analysis_id: str): + """``PUT /po/stop/{analysis_id}`` — stop a single analysis and push a stop log to the Hub. + + Args: + analysis_id: UUID of the analysis to stop. + + Returns: + Mapping ``{analysis_id: status}`` reflecting the final status. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: response = stop_analysis(analysis_id, self.database) stream_logs(AnalysisStoppedLog(analysis_id), @@ -218,33 +361,92 @@ def stop_analysis_call(self, analysis_id: str): self.hub_client) return response except Exception as e: - raise HTTPException(status_code=500, detail=f"Error stopping analysis: {e}") + logger.error(f"Error stopping analysis: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error stopping analysis (see po logs).") def delete_all_analysis_call(self): + """``DELETE /po/delete`` — stop and permanently remove every analysis. + + Removes each analysis from the database and deletes its Keycloak client. + + Returns: + Mapping ``{analysis_id: None}`` acknowledging the deletions. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return delete_analysis('all', self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error deleting ALL analyzes: {e}") + logger.error(f"Error deleting ALL analyzes: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting ALL analyzes (see po logs).") def delete_analysis_call(self, analysis_id: str): + """``DELETE /po/delete/{analysis_id}`` — stop and permanently remove a single analysis. + + Args: + analysis_id: UUID of the analysis to delete. + + Returns: + Mapping ``{analysis_id: None}`` acknowledging the deletion. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return delete_analysis(analysis_id, self.database) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error deleting analysis: {e}") + logger.error(f"Error deleting analysis: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting analysis (see po logs).") def cleanup_call(self, cleanup_type: str): + """``DELETE /po/cleanup/{cleanup_type}`` — run a targeted cleanup pass. + + Args: + cleanup_type: Cleanup selector; one or more (comma-separated) of + ``all``, ``analyzes``, ``services``, ``mb``, ``rs``, + ``keycloak``, or ``zombies``. + + Returns: + Mapping ``{cleanup_type: summary_string}`` describing what was done. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return cleanup(cleanup_type, self.database, self.namespace) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error cleaning up: {e}") + logger.error(f"Error cleaning up: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error cleaning up (see po logs).") def stream_logs_call(self, body: CreateLogEntity): + """``POST /po/stream_logs`` — accept a log line from an analysis pod. + + Persists the log to the database, optionally forwards it to the FLAME + Hub, and updates the Hub with the latest status and progress. + + Args: + body: Structured log entry posted by the analysis via the nginx + sidecar. + + Raises: + HTTPException: 500 on any downstream failure (details in logs). + """ try: return stream_logs(body, self.node_id, self.enable_hub_logging, self.database, self.hub_client) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error streaming logs: {e}") + logger.error(f"Error streaming logs: {repr(e)}") + raise HTTPException(status_code=500, detail=f"Error streaming logs (see po logs).") def health_call(self): + """``GET /po/healthz`` — unauthenticated liveness probe. + + Returns: + ``{'status': 'ok'}`` when the main thread is alive. + + Raises: + RuntimeError: If the main thread has died. + """ main_alive = threading.main_thread().is_alive() if not main_alive: raise RuntimeError("Main thread is not alive.") diff --git a/src/api/oauth.py b/src/api/oauth.py index 6adee59..8af0896 100644 --- a/src/api/oauth.py +++ b/src/api/oauth.py @@ -6,21 +6,40 @@ import jwt from typing import Annotated -oauth2_scheme = OAuth2AuthorizationCodeBearer( - tokenUrl=os.getenv('KEYCLOAK_URL') + "/realms/flame/protocol/openid-connect/token", - authorizationUrl=os.getenv('KEYCLOAK_URL') + "/realms/flame/protocol/openid-connect/auth", - refreshUrl=os.getenv('KEYCLOAK_URL') + "/realms/flame/protocol/openid-connect/token", + +_KEYCLOAK_URL = os.getenv("KEYCLOAK_URL") +_REALM = os.getenv("KEYCLOAK_REALM", "flame") +_REALM_BASE = f"{_KEYCLOAK_URL}/realms/{_REALM}/protocol/openid-connect" + + +_oauth2_scheme = OAuth2AuthorizationCodeBearer( + tokenUrl=f"{_REALM_BASE}/token", + authorizationUrl=f"{_REALM_BASE}/auth", + refreshUrl=f"{_REALM_BASE}/token", ) -async def valid_access_token(token: Annotated[str, Depends(oauth2_scheme)]) -> dict: - url = os.getenv('KEYCLOAK_URL') + "/realms/flame/protocol/openid-connect/certs" - jwks_client = PyJWKClient(url) +def valid_access_token(token: Annotated[str, Depends(_oauth2_scheme)]) -> dict: + """FastAPI dependency that validates a Keycloak-issued OAuth2 bearer token. + + Fetches the Keycloak realm's signing keys via JWKS and verifies the token's + signature and expiration. Audience validation is intentionally disabled. + + Args: + token: The bearer token extracted from the ``Authorization`` header by + the OAuth2 scheme. + + Returns: + The decoded JWT claims as a dictionary. + Raises: + HTTPException: 401 if the token is invalid, expired, or cannot be + verified against the realm's signing keys. + """ try: - sig_key = jwks_client.get_signing_key_from_jwt(token) + sig_key = PyJWKClient(f"{_REALM_BASE}/certs").get_signing_key_from_jwt(token) return jwt.decode(token, key=sig_key, - options={'verify_signature': True, 'verify_aud': False, 'exp': True}) + options={'verify_signature': True, 'verify_aud': False, 'verify_exp': True}) except jwt.exceptions.InvalidTokenError: raise HTTPException(status_code=401, detail="Not authenticated") diff --git a/src/k8s/kubernetes.py b/src/k8s/kubernetes.py index ce78dab..5aec805 100644 --- a/src/k8s/kubernetes.py +++ b/src/k8s/kubernetes.py @@ -1,3 +1,4 @@ +import os import time import json import base64 @@ -7,9 +8,12 @@ from kubernetes import client from src.resources.database.entity import Database -from src.k8s.utils import find_k8s_resources, delete_k8s_resource +from src.k8s.utils import find_k8s_resources +from src.utils.po_logging import get_logger +logger = get_logger() + PORTS = {'nginx': [80], 'analysis': [8000], 'service': [80]} @@ -20,12 +24,28 @@ def create_harbor_secret(host_address: str, password: str, name: str = 'flame-harbor-credentials', namespace: str = 'default') -> None: + """Create (or recreate) the dockerconfigjson secret used to pull analysis images. + + If a secret with the same name already exists it is deleted and recreated + to ensure the credentials are up to date. + + Args: + host_address: Harbor registry hostname (e.g. ``harbor.example.com``). + user: Registry username. + password: Registry password. + name: Name of the Kubernetes secret to create. + namespace: Namespace in which to create the secret. + + Raises: + Exception: If the conflict cannot be resolved or an unexpected API + error occurs. + """ core_client = client.CoreV1Api() secret_metadata = client.V1ObjectMeta(name=name, namespace=namespace) secret = client.V1Secret(metadata=secret_metadata, type='kubernetes.io/dockerconfigjson', string_data={'docker-server': host_address, - 'docker-username': user.replace('$', '\$'), + 'docker-username': user.replace('$', '\\$'), 'docker-password': password, '.dockerconfigjson': json.dumps({'auths': {host_address: @@ -36,21 +56,40 @@ def create_harbor_secret(host_address: str, try: core_client.create_namespaced_secret(namespace=namespace, body=secret) except client.exceptions.ApiException: + logger.warning(f"Harbor secret already exists in namespace {namespace}, attempting to resolve conflict by " + f"deleting and recreating the secret.") try: core_client.delete_namespaced_secret(name=name, namespace=namespace) core_client.create_namespaced_secret(namespace=namespace, body=secret) except client.exceptions.ApiException as e: if e.reason != 'Conflict': - raise e + logger.error(f"Unknown error during harbor secret creation: {repr(e)}") + raise Exception(f"Unknown error during harbor secret creation (see po logs)") else: - print("Conflict remains unresolved!") - raise e + logger.error(f"Conflict in harbor secret creation remains unresolved: {repr(e)}") + raise Exception(f"Conflict in harbor secret creation remains unresolved (see po logs)") def create_analysis_deployment(name: str, image: str, - env: dict[str, str] = {}, + env: Optional[dict[str, str]] = None, namespace: str = 'default') -> list[str]: + """Deploy an analysis pod along with its nginx sidecar, service, and network policy. + + Creates the analysis ``Deployment`` using the Harbor pull secret, exposes + it via a ``Service``, and then provisions the companion nginx deployment + that reverse-proxies egress to node-local services. + + Args: + name: Deployment name (typically ``analysis-{analysis_id}-{restart_counter}``). + image: Fully qualified container image reference. + env: Optional environment variables to inject into the analysis + container. + namespace: Namespace in which to create the resources. + + Returns: + List of pod names that belong to the new analysis deployment. + """ app_client = client.AppsV1Api() containers = [] @@ -58,7 +97,8 @@ def create_analysis_deployment(name: str, image=image, image_pull_policy='IfNotPresent', ports=[client.V1ContainerPort(PORTS['analysis'][0])], - env=[client.V1EnvVar(name=key, value=val) for key, val in env.items()]) + env=[client.V1EnvVar(name=key, value=val) for key, val in env.items()] + if env is not None else []) containers.append(container) labels = {'app': name, 'component': "flame-analysis"} @@ -88,38 +128,60 @@ def create_analysis_deployment(name: str, def delete_deployment(deployment_name: str, namespace: str = 'default') -> None: - print(f"PO ACTION - Deleting deployment {deployment_name} in namespace {namespace} at {time.strftime('%Y-%m-%d %H:%M:%S')}") + """Tear down an analysis and its companion nginx resources. + + Deletes both the analysis and ``nginx-{name}`` deployments with their + services, as well as the associated network policy and nginx ConfigMap. + Missing resources are logged and ignored. + + Args: + deployment_name: Name of the analysis deployment to remove. + namespace: Namespace the resources live in. + """ + logger.action(f"Deleting deployment {deployment_name} in namespace {namespace} at {time.strftime('%Y-%m-%d %H:%M:%S')}") app_client = client.AppsV1Api() for name in [deployment_name, f'nginx-{deployment_name}']: try: app_client.delete_namespaced_deployment(async_req=False, name=name, namespace=namespace) _delete_service(name, namespace) except client.exceptions.ApiException as e: - if e.reason != 'Not Found': - print(f"Error: Not Found {name}") + if e.reason == 'Not Found': + logger.warning(f"Could not find {name} for deletion") + else: + logger.error(f"Unknown error when attempting to delete {name} (reason={e.reason})") network_client = client.NetworkingV1Api() try: network_client.delete_namespaced_network_policy(name=f'nginx-to-{deployment_name}-policy', namespace=namespace) except client.exceptions.ApiException as e: - if e.reason != 'Not Found': - print(f"Error: Not Found nginx-to-{deployment_name}-policy") + if e.reason == 'Not Found': + logger.error(f"Could not find nginx-to-{deployment_name}-policy for deletion") + else: + logger.error(f"Unknown error when attempting to delete nginx-to-{deployment_name}-policy (reason={e.reason})") core_client = client.CoreV1Api() try: core_client.delete_namespaced_config_map(name=f"nginx-{deployment_name}-config", namespace=namespace) except client.exceptions.ApiException as e: - if e.reason != 'Not Found': - print(f"Error: Not Found {deployment_name}-config") + if e.reason == 'Not Found': + logger.error(f"Could not find {deployment_name}-config for deletion") + else: + logger.error(f"Unknown error when attempting to delete {deployment_name}-config (reason={e.reason})") def get_analysis_logs(deployment_names: dict[str, str], database: Database, namespace: str = 'default') -> dict[str, dict[str, list[str]]]: - """ - get logs for both the analysis and nginx deployment - :param deployment_names: - :param database: - :param namespace: - :return: + """Collect pod logs for the analysis and nginx deployments. + + Args: + deployment_names: Mapping ``{analysis_id: deployment_name}`` to fetch + logs for. + database: Database wrapper used to look up recorded pod ids so that + only pods belonging to the tracked deployment are read. + namespace: Namespace the deployments live in. + + Returns: + Nested mapping ``{'analysis': {analysis_id: [log, ...]}, + 'nginx': {analysis_id: [log, ...]}}``. """ return {'analysis': {analysis_id: _get_logs(name=deployment_name, pod_ids=database.get_deployment_pod_ids(deployment_name), @@ -132,6 +194,16 @@ def get_analysis_logs(deployment_names: dict[str, str], def get_pod_status(deployment_name: str, namespace: str = 'default') -> Optional[dict[str, dict[str, str]]]: + """Return readiness and (if not ready) failure details for each pod in a deployment. + + Args: + deployment_name: Value of the ``app`` label selecting the deployment. + namespace: Namespace to search in. + + Returns: + Mapping ``{pod_name: {'ready': bool, 'reason': str, 'message': str}}``, + or ``None`` when no pods or no container statuses are available. + """ core_client = client.CoreV1Api() # get pods in deployment @@ -142,9 +214,10 @@ def get_pod_status(deployment_name: str, namespace: str = 'default') -> Optional for pod in pods: if pod is not None: name = pod.metadata.name - status = pod.status.container_statuses[0] + status = pod.status.container_statuses - if status is not None: + if status and status[0]: + status = status[0] pod_status[name] = {} pod_status[name]['ready'] = status.ready if status.ready: @@ -170,8 +243,25 @@ def get_pod_status(deployment_name: str, namespace: str = 'default') -> Optional def _create_analysis_nginx_deployment(analysis_name: str, analysis_service_name: str, - analysis_env: dict[str, str] = {}, + analysis_env: Optional[dict[str, str]] = None, namespace: str = 'default') -> tuple[str, str]: + """Deploy the nginx reverse-proxy sidecar for an analysis. + + Builds the nginx ConfigMap, starts the ``nginx-{analysis_name}`` deployment + with a liveness probe on ``/healthz``, creates its service, and installs + the network policy locking egress/ingress to the analysis pod. + + Args: + analysis_name: Name of the analysis deployment this nginx sidecar fronts. + analysis_service_name: Service name of the analysis deployment used as + the nginx upstream. + analysis_env: Analysis config (must include ``ANALYSIS_ID`` and + ``PROJECT_ID``) used to template the nginx config. + namespace: Namespace in which to create the resources. + + Returns: + Tuple ``(nginx_deployment_name, nginx_service_name)``. + """ app_client = client.AppsV1Api() containers = [] nginx_name = f"nginx-{analysis_name}" @@ -204,9 +294,8 @@ def _create_analysis_nginx_deployment(analysis_name: str, mount_path="/etc/nginx/nginx.conf", sub_path="nginx.conf" ) - container = client.V1Container(name=nginx_name, - image="nginx:1.29.3", # TODO + image=os.getenv('NGINX_IMAGE', 'nginx:1.29.8'), image_pull_policy="IfNotPresent", ports=[client.V1ContainerPort(PORTS['nginx'][0])], liveness_probe=liveness_probe, @@ -242,28 +331,51 @@ def _create_analysis_nginx_deployment(analysis_name: str, def _create_nginx_config_map(analysis_name: str, analysis_service_name: str, nginx_name: str, - analysis_env: dict[str, str] = {}, + analysis_env: Optional[dict[str, str]] = None, namespace: str = 'default') -> str: + """Build and create the nginx ConfigMap scoped to a single analysis. + + Discovers the message broker, pod orchestration, hub adapter, kong, and + storage services at runtime, waits until each of their pods has an IP, and + renders an ``nginx.conf`` that whitelists the analysis pod for egress and + the message broker / pod orchestrator for ingress. + + Args: + analysis_name: Name of the analysis deployment. + analysis_service_name: Upstream service for ``/analysis`` ingress. + nginx_name: Name of the nginx deployment (used to prefix the config + map name). + analysis_env: Analysis config containing ``ANALYSIS_ID`` and + ``PROJECT_ID`` used in location matches. + namespace: Namespace in which to create the ConfigMap. + + Returns: + Name of the created ConfigMap (``{nginx_name}-config``). + + Raises: + ValueError: If ``analysis_env`` is ``None`` or the pod orchestration + pod cannot be found. + """ + if analysis_env is None: + logger.error(f"Error creating an nginx failed since no analysis_env containing analysis and poject id was provided.") + raise ValueError(f"Error creating an nginx failed since no analysis_env containing analysis and poject id was provided.") core_client = client.CoreV1Api() # get the service name of the message broker message_broker_service_name = find_k8s_resources('service', 'label', 'component=flame-message-broker', - namespace=namespace) + namespace=namespace)[0] # await and get the pod id and name of the message broker message_broker_pod_name = find_k8s_resources('pod', 'label', 'component=flame-message-broker', - namespace=namespace) + namespace=namespace)[0] message_broker_pod = None while message_broker_pod is None: - try: - message_broker_pod = core_client.read_namespaced_pod(name=message_broker_pod_name, - namespace=namespace) - except: - raise ValueError(f"Could not find message broker pod with name {message_broker_pod_name} in namespace {namespace}. ") + message_broker_pod = core_client.read_namespaced_pod(name=message_broker_pod_name, + namespace=namespace) if message_broker_pod is not None: message_broker_ip = message_broker_pod.status.pod_ip time.sleep(1) @@ -272,13 +384,13 @@ def _create_nginx_config_map(analysis_name: str, po_service_name = find_k8s_resources('service', 'label', 'component=flame-po', - namespace=namespace) + namespace=namespace)[0] # await and get the pod ip and name of the pod orchestrator pod_orchestration_name = find_k8s_resources('pod', 'label', 'component=flame-po', - namespace=namespace) + namespace=namespace)[0] pod_orchestration_pod = None while pod_orchestration_pod is None: try: @@ -305,16 +417,16 @@ def _create_nginx_config_map(analysis_name: str, hub_adapter_service_name = find_k8s_resources('service', 'label', 'component=flame-hub-adapter', - namespace=namespace) + namespace=namespace)[0] kong_proxy_name = find_k8s_resources('service', 'label', 'app.kubernetes.io/name=kong', manual_name_selector='proxy', - namespace=namespace) + namespace=namespace)[0] storage_service_name = find_k8s_resources('service', 'label', 'component=flame-storage-service', - namespace=namespace) + namespace=namespace)[0] # generate config map data = { @@ -436,6 +548,18 @@ def _create_service(name: str, target_ports: list[int], meta_data_labels: dict[str, str] = None, namespace: str = 'default') -> str: + """Create a ClusterIP service selecting pods by the ``app={name}`` label. + + Args: + name: Service and selector name. + ports: Service-side ports. + target_ports: Matching container-side ports (zipped with ``ports``). + meta_data_labels: Optional metadata labels; defaults to ``{'app': name}``. + namespace: Namespace in which to create the service. + + Returns: + The service name (equal to ``name``). + """ if meta_data_labels is None: meta_data_labels = {'app': name} @@ -452,6 +576,16 @@ def _create_service(name: str, def _create_analysis_network_policy(analysis_name: str, nginx_name: str, namespace: str = 'default') -> None: + """Install the network policy that isolates an analysis pod. + + Allows egress only to the nginx sidecar and kube-dns, and ingress only + from the nginx sidecar. + + Args: + analysis_name: Target analysis deployment (pod selector). + nginx_name: Companion nginx deployment name used in the peer selectors. + namespace: Namespace in which to create the policy. + """ network_client = client.NetworkingV1Api() # egress to nginx and kube-dns pod (kube dns' namespace has to be specified) @@ -491,30 +625,42 @@ def _create_analysis_network_policy(analysis_name: str, nginx_name: str, namespa def _delete_service(name: str, namespace: str = 'default') -> None: + """Delete a Kubernetes service by name. + + Args: + name: Service name. + namespace: Namespace the service lives in. + """ core_client = client.CoreV1Api() core_client.delete_namespaced_service(async_req=False, name=name, namespace=namespace) def _get_logs(name: str, pod_ids: Optional[list[str]] = None, namespace: str = 'default') -> list[str]: + """Retrieve and sanitize logs for the pods matching ``app={name}``. + + Filters out INFO lines and routine health/webhook access lines, and strips + non-printable characters. + + Args: + name: Value of the pods' ``app`` label. + pod_ids: Optional allowlist; pods not in this list are skipped. + namespace: Namespace to search in. + + Returns: + One sanitized log string per matched pod. + """ core_client = client.CoreV1Api() # get pods in deployment pods = core_client.list_namespaced_pod(namespace=namespace, label_selector=f'app={name}') - if pod_ids is not None: - try: - pod_logs = [core_client.read_namespaced_pod_log(pod.metadata.name, namespace) - for pod in pods.items if pod.metadata.name in pod_ids] - except client.exceptions.ApiException as e: - print(f"Error: APIException while trying to retrieve pod logs (pod_ids in list)\n{e}") - return [] - else: - try: - pod_logs = [core_client.read_namespaced_pod_log(pod.metadata.name, namespace) - for pod in pods.items] - except client.exceptions.ApiException as e: - print(f"Error: APIException while trying to retrieve pod logs (pod_ids=None)\n{e}") - return [] - + pod_logs = [] + for pod in pods.items: + if (pod_ids is None) or (pod.metadata.name in pod_ids): + try: + pod_logs.append(core_client.read_namespaced_pod_log(pod.metadata.name, namespace)) + except client.exceptions.ApiException as e: + logger.error(f"APIException while trying to retrieve pod logs for pod_name={pod.metadata.name}: " + f"{repr(e)}") # sanitize pod logs final_logs = [] for log in pod_logs: @@ -528,6 +674,15 @@ def _get_logs(name: str, pod_ids: Optional[list[str]] = None, namespace: str = ' def _get_pods(name: str, namespace: str = 'default') -> list[str]: + """Return pod names matching the ``app={name}`` label selector. + + Args: + name: Value of the pods' ``app`` label. + namespace: Namespace to search in. + + Returns: + List of matching pod names. + """ core_client = client.CoreV1Api() return [pod.metadata.name for pod in core_client.list_namespaced_pod(namespace=namespace, label_selector=f'app={name}').items] diff --git a/src/k8s/utils.py b/src/k8s/utils.py index 936b009..5954e47 100644 --- a/src/k8s/utils.py +++ b/src/k8s/utils.py @@ -3,12 +3,26 @@ from kubernetes import config, client +from src.utils.po_logging import get_logger + + +logger = get_logger() + def load_cluster_config(): + """Load the in-cluster Kubernetes configuration for the official client.""" config.load_incluster_config() def get_current_namespace() -> str: + """Return the namespace this pod is running in. + + Reads the namespace from the service account token mount; falls back to + ``'default'`` if the file is missing (e.g. outside the cluster). + + Returns: + The current namespace. + """ namespace_file = '/var/run/secrets/kubernetes.io/serviceaccount/namespace' try: with open(namespace_file, 'r') as file: @@ -23,7 +37,26 @@ def find_k8s_resources(resource_type: str, selector_type: Optional[Literal['label', 'field']] = None, selector_arg: Optional[str] = None, manual_name_selector: Optional[str] = None, - namespace: str = "default") -> Optional[Union[str, list[str]]]: + namespace: str = "default") -> list[Optional[str]]: + """List names of Kubernetes resources of a given type, optionally filtered. + + Args: + resource_type: One of ``deployment``, ``pod``, ``service``, + ``networkpolicy``, ``configmap``, or ``job``. + selector_type: Whether ``selector_arg`` is a ``label`` or ``field`` + selector. + selector_arg: Selector expression (required if ``selector_type`` is set). + manual_name_selector: Optional substring that resource names must + contain to be included. + namespace: Namespace to search in. + + Returns: + List of matching resource names. When no resources are found, returns + ``[None]`` (historical quirk preserved by callers). + + Raises: + ValueError: On an unknown ``resource_type`` or missing ``selector_arg``. + """ if resource_type not in ['deployment', 'pod', 'service', 'networkpolicy', 'configmap', 'job']: raise ValueError("For k8s resource search: resource_type must be one of 'deployment', 'pod', 'service', " "'networkpolicy', 'configmap', or 'job") @@ -33,7 +66,7 @@ def find_k8s_resources(resource_type: str, raise ValueError("For k8s resource search: if given a resource_type, selector_arg must not be None") kwargs = {'namespace': namespace} - if selector_type: + if (selector_type is not None) and isinstance(selector_arg, str): kwargs[f'{selector_type}_selector'] = selector_arg if resource_type == 'deployment': @@ -48,77 +81,76 @@ def find_k8s_resources(resource_type: str, resources = core_client.list_namespaced_service(**kwargs) elif resource_type == 'configmap': resources = core_client.list_namespaced_config_map(**kwargs) + else: + raise RuntimeError("Undefined resource") elif resource_type == 'job': resources = client.BatchV1Api().list_namespaced_job(**kwargs) else: raise ValueError(f"Uncaptured resource type discovered! Message the Devs... (found={resource_type})") - - if not resources: - return None - else: - resource_names = [resource.metadata.name for resource in resources.items] - if len(resource_names) > 1: - if manual_name_selector is not None: - resource_names = [name for name in resource_names if manual_name_selector in name] - return resource_names if len(resource_names) > 1 else resource_names[0] - else: - return resource_names - else: - if len(resource_names) == 1: - return resource_names[0] - else: - return None + if not resources.items: + return [None] + resource_names = [resource.metadata.name for resource in resources.items] + if manual_name_selector is not None: + resource_names = [name for name in resource_names if manual_name_selector in name] + return resource_names def delete_k8s_resource(name: str, resource_type: str, namespace: str = 'default') -> None: + """Delete a Kubernetes resource by name and type. + + ``Not Found`` errors are swallowed silently; other API errors are logged. + + Args: + name: Name of the resource to delete. + resource_type: One of ``deployment``, ``service``, ``pod``, + ``configmap``, ``networkpolicy``, or ``job``. + namespace: Namespace the resource lives in. + + Raises: + ValueError: If ``resource_type`` is not supported. """ - Deletes a Kubernetes resource by name and type. - :param name: Name of the resource to delete. - :param resource_type: Type of the resource (e.g., 'deployment', 'service', 'pod', 'configmap', 'job'). - :param namespace: Namespace in which the resource exists. - """ - print(f"PO ACTION - Deleting resource: {name} of type {resource_type} in namespace {namespace} at {time.strftime('%Y-%m-%d %H:%M:%S')}") + logger.action(f"Deleting resource: {name} of type {resource_type} in namespace {namespace} at {time.strftime('%Y-%m-%d %H:%M:%S')}") if resource_type == 'deployment': try: app_client = client.AppsV1Api() app_client.delete_namespaced_deployment(name=name, namespace=namespace, propagation_policy='Foreground') except client.exceptions.ApiException as e: if e.reason != 'Not Found': - print(f"Error: Not Found {name} deployment") + logger.error(f"Not Found {name} deployment") elif resource_type == 'service': try: core_client = client.CoreV1Api() core_client.delete_namespaced_service(name=name, namespace=namespace) except client.exceptions.ApiException as e: if e.reason != 'Not Found': - print(f"Error: Not Found {name} service") + logger.error(f"Not Found {name} service") elif resource_type == 'pod': try: core_client = client.CoreV1Api() core_client.delete_namespaced_pod(name=name, namespace=namespace) except client.exceptions.ApiException as e: if e.reason != 'Not Found': - print(f"Error: Not Found {name} pod") + logger.error(f"Not Found {name} pod") elif resource_type == 'configmap': try: core_client = client.CoreV1Api() core_client.delete_namespaced_config_map(name=name, namespace=namespace) except client.exceptions.ApiException as e: if e.reason != 'Not Found': - print(f"Error: Not Found {name} configmap") + logger.error(f"Not Found {name} configmap") elif resource_type == 'networkpolicy': try: network_client = client.NetworkingV1Api() network_client.delete_namespaced_network_policy(name=name, namespace=namespace) except client.exceptions.ApiException as e: if e.reason != 'Not Found': - print(f"Error: Not Found {name} networkpolicy") + logger.error(f"Not Found {name} networkpolicy") elif resource_type == 'job': try: batch_client = client.BatchV1Api() batch_client.delete_namespaced_job(name=name, namespace=namespace, propagation_policy='Foreground') except client.exceptions.ApiException as e: if e.reason != 'Not Found': - print(f"Error: Not Found {name} job") + logger.error(f"Not Found {name} job") else: raise ValueError(f"Unsupported resource type: {resource_type}") diff --git a/src/main.py b/src/main.py index c8331cc..b92746e 100644 --- a/src/main.py +++ b/src/main.py @@ -2,20 +2,33 @@ from threading import Thread from dotenv import load_dotenv, find_dotenv +# load env +load_dotenv(find_dotenv()) + from src.resources.database.entity import Database from src.api.api import PodOrchestrationAPI from src.k8s.utils import get_current_namespace, load_cluster_config from src.status.status import status_loop +from src.utils.po_logging import get_logger + + +logger = get_logger() def main(): - # load env - load_dotenv(find_dotenv()) + """Entry point for the Pod Orchestration service. + Loads the in-cluster Kubernetes configuration, initializes the database, + spawns the FastAPI server in a background thread, and starts the blocking + status monitoring loop on the main thread. + """ # load cluster config load_cluster_config() + if not os.getenv('NGINX_IMAGE'): + logger.warning("Environment variable 'NGINX_IMAGE' is not set, defaulting to 'nginx:1.29.8'.") + # init database database = Database() @@ -23,15 +36,19 @@ def main(): api_thread.start() # start status loop - if not os.getenv('STATUS_LOOP_INTERVAL'): - os.environ['STATUS_LOOP_INTERVAL'] = '10' - status_loop(database, int(os.getenv('STATUS_LOOP_INTERVAL'))) + status_loop(database, int(os.getenv('STATUS_LOOP_INTERVAL', '10'))) def start_po_api(database: Database, namespace: str): + """Instantiate and run the Pod Orchestration FastAPI server. + + Args: + database: Initialized database wrapper used by the API for persistence. + namespace: Kubernetes namespace the API will operate within. + """ PodOrchestrationAPI(database, namespace) if __name__ == '__main__': - print("Starting server") + logger.info("Starting server") main() diff --git a/src/resources/analysis/entity.py b/src/resources/analysis/entity.py index 28268c0..2c52ba9 100644 --- a/src/resources/analysis/entity.py +++ b/src/resources/analysis/entity.py @@ -11,6 +11,13 @@ class Analysis(BaseModel): + """Runtime model describing a single analysis deployment. + + Combines the user-supplied creation payload with runtime-derived fields + (deployment name, Keycloak/Kong tokens, pod ids, current status) and + exposes ``start`` / ``stop`` helpers that drive the Kubernetes resources. + """ + analysis_id: str project_id: str registry_url: str @@ -30,6 +37,16 @@ class Analysis(BaseModel): pod_ids: Optional[list[str]] = None def start(self, database: Database, namespace: str = 'default') -> None: + """Deploy the analysis on Kubernetes and persist it in the database. + + Generates the deployment name, mints the Kong and Keycloak tokens, + assembles the analysis env, creates the Kubernetes resources, and then + writes an ``AnalysisDB`` row tracking the new deployment. + + Args: + database: Database wrapper used to persist the new deployment. + namespace: Namespace the Kubernetes resources are created in. + """ self.status = AnalysisStatus.STARTED.value self.deployment_name = "analysis-" + self.analysis_id + "-" + str(self.restart_counter) self.tokens = create_analysis_tokens(kong_token=self.kong_token, analysis_id=self.analysis_id) @@ -48,6 +65,7 @@ def start(self, database: Database, namespace: str = 'default') -> None: project_id=self.project_id, pod_ids=self.pod_ids, status=self.status, + log=self.log, registry_url=self.registry_url, image_url=self.image_url, registry_user=self.registry_user, @@ -61,6 +79,13 @@ def stop(self, database: Database, log: Optional[str] = None, status: str = AnalysisStatus.STOPPED.value) -> None: + """Tear down the Kubernetes deployment and update the database row. + + Args: + database: Database wrapper used to persist the final status/log. + log: Optional log snapshot to persist before deletion. + status: Terminal status to record (defaults to ``STOPPED``). + """ if log is not None: self.log = log self.status = status @@ -72,6 +97,10 @@ def stop(self, def read_db_analysis(analysis: AnalysisDB) -> Analysis: + """Convert a persisted :class:`AnalysisDB` row into a runtime :class:`Analysis`. + + Decodes the JSON-encoded ``pod_ids`` column back into a Python list. + """ return Analysis(analysis_id=analysis.analysis_id, deployment_name=analysis.deployment_name, project_id=analysis.project_id, @@ -89,12 +118,14 @@ def read_db_analysis(analysis: AnalysisDB) -> Analysis: class CreateAnalysis(BaseModel): - analysis_id: str = 'analysis_id' - project_id: str = 'project_id' - registry_url: str = 'harbor.privateaim' - image_url: str = 'harbor.privateaim/node_id/analysis_id' - registry_user: str = 'robot_user' - registry_password: str = 'default_pw' - kong_token: str = 'default_kong_token' + """Request body accepted by ``POST /po/`` to create a new analysis.""" + + analysis_id: str + project_id: str + registry_url: str + image_url: str + registry_user: str + registry_password: str + kong_token: str restart_counter: int = 0 progress: int = 0 diff --git a/src/resources/database/db_models.py b/src/resources/database/db_models.py index de17536..3c8747f 100644 --- a/src/resources/database/db_models.py +++ b/src/resources/database/db_models.py @@ -5,16 +5,21 @@ @as_declarative() class Base: + """SQLAlchemy declarative base with an auto-generated ``__tablename__``.""" + id: Any __name__: str # Generate __tablename__ automatically @declared_attr def __tablename__(cls) -> str: + """Derive the SQL table name from the lowercased class name.""" return cls.__name__.lower() class AnalysisDB(Base): + """ORM model tracking the current state of an analysis deployment.""" + __tablename__ = "analysis" id = Column(Integer, primary_key=True, index=True) deployment_name = Column(String, unique=True, index=True) @@ -36,6 +41,8 @@ class AnalysisDB(Base): class ArchiveDB(Base): + """ORM model mirroring :class:`AnalysisDB` for completed analyses kept for history.""" + __tablename__ = "archive" id = Column(Integer, primary_key=True, index=True) deployment_name = Column(String, unique=True, index=True) diff --git a/src/resources/database/entity.py b/src/resources/database/entity.py index cd75a5f..1b509a3 100644 --- a/src/resources/database/entity.py +++ b/src/resources/database/entity.py @@ -6,18 +6,32 @@ from sqlalchemy.orm import sessionmaker from src.status.constants import AnalysisStatus -from .db_models import Base, AnalysisDB +from src.resources.database.db_models import Base, AnalysisDB +from src.utils.po_logging import get_logger + + +logger = get_logger() class Database: + """Thin CRUD wrapper around the PostgreSQL-backed analysis database. + + Each method opens a short-lived SQLAlchemy session via the ``SessionLocal`` + factory and commits before returning. ``pool_pre_ping`` and a one-hour + recycle window guard against stale connections. + """ + def __init__(self) -> None: + """Connect to PostgreSQL using ``POSTGRES_*`` env vars and create tables.""" host = os.getenv('POSTGRES_HOST') port = "5432" user = os.getenv('POSTGRES_USER') password = os.getenv('POSTGRES_PASSWORD') database = os.getenv('POSTGRES_DB') conn_uri = f"postgresql+psycopg2://{user}:{password}@{host}:{port}/{database}" - print(conn_uri) + + logger.debug(f"Connecting to database at postgresql+psycopg2://{user}:*******@{host}:{port}/{database}") + self.engine = create_engine(conn_uri, pool_pre_ping=True, pool_recycle=3600) @@ -25,21 +39,26 @@ def __init__(self) -> None: Base.metadata.create_all(bind=self.engine) def reset_db(self) -> None: + """Drop and recreate all tables. Destructive — wipes all analyses.""" Base.metadata.drop_all(bind=self.engine) Base.metadata.create_all(bind=self.engine) def get_deployment(self, deployment_name: str) -> Optional[AnalysisDB]: + """Return the deployment row with the given unique name, or ``None``.""" with self.SessionLocal() as session: return session.query(AnalysisDB).filter_by(**{'deployment_name': deployment_name}).first() def get_latest_deployment(self, analysis_id: str) -> Optional[AnalysisDB]: + """Return the most recently created deployment for an analysis, or ``None``.""" with self.SessionLocal() as session: - deployments = session.query(AnalysisDB).filter_by(**{'analysis_id': analysis_id}).all() - if deployments: - return deployments[-1] - return None + deployment = session.query(AnalysisDB).filter_by(**{'analysis_id': analysis_id}).order_by(AnalysisDB.time_created.desc()).first() + return deployment def analysis_is_running(self, analysis_id: str) -> bool: + """Return True if the latest deployment is not in a terminal status. + + Terminal statuses are ``EXECUTED``, ``STOPPED``, and ``FAILED``. + """ latest_deployment = self.get_latest_deployment(analysis_id) if latest_deployment is not None: return latest_deployment.status not in [AnalysisStatus.EXECUTED.value, @@ -48,6 +67,7 @@ def analysis_is_running(self, analysis_id: str) -> bool: return False def get_deployments(self, analysis_id: str) -> list[AnalysisDB]: + """Return every deployment row recorded for an analysis (all restarts).""" with self.SessionLocal() as session: return session.query(AnalysisDB).filter_by(**{'analysis_id': analysis_id}).all() @@ -55,8 +75,9 @@ def create_analysis(self, analysis_id: str, deployment_name: str, project_id: str, - pod_ids: list[str], + pod_ids: Optional[list[str]], status: str, + log: Optional[str], registry_url: str, image_url: str, registry_user: str, @@ -65,11 +86,17 @@ def create_analysis(self, restart_counter: int, progress: int, namespace: str = 'default') -> AnalysisDB: + """Insert a new analysis deployment row and return the persisted object. + + ``pod_ids`` is stored JSON-encoded and ``time_created`` is stamped with + the current Unix time. + """ analysis = AnalysisDB(analysis_id=analysis_id, deployment_name=deployment_name, project_id=project_id, pod_ids=json.dumps(pod_ids), status=status, + log=log, registry_url=registry_url, image_url=image_url, registry_user=registry_user, @@ -86,6 +113,15 @@ def create_analysis(self, return analysis def update_analysis(self, analysis_id: str, **kwargs) -> list[AnalysisDB]: + """Apply ``kwargs`` as column updates to every deployment for an analysis. + + Args: + analysis_id: Analysis whose deployment rows should be updated. + **kwargs: Column/value pairs to ``setattr`` on each row. + + Returns: + The list of updated deployment rows. + """ with self.SessionLocal() as session: analysis = session.query(AnalysisDB).filter_by(**{'analysis_id': analysis_id}).all() for deployment in analysis: @@ -97,6 +133,15 @@ def update_analysis(self, analysis_id: str, **kwargs) -> list[AnalysisDB]: return analysis def update_deployment(self, deployment_name: str, **kwargs) -> AnalysisDB: + """Apply ``kwargs`` as column updates to a single deployment row. + + Args: + deployment_name: Unique deployment name to update. + **kwargs: Column/value pairs to ``setattr`` on the row. + + Returns: + The updated deployment row. + """ with self.SessionLocal() as session: deployment = session.query(AnalysisDB).filter_by(**{'deployment_name': deployment_name}).first() for key, value in kwargs.items(): @@ -105,6 +150,7 @@ def update_deployment(self, deployment_name: str, **kwargs) -> AnalysisDB: return deployment def delete_analysis(self, analysis_id: str) -> None: + """Delete every deployment row belonging to an analysis.""" with self.SessionLocal() as session: analysis = session.query(AnalysisDB).filter_by(**{'analysis_id': analysis_id}).all() for deployment in analysis: @@ -113,6 +159,7 @@ def delete_analysis(self, analysis_id: str) -> None: session.commit() def delete_deployment(self, deployment_name: str) -> None: + """Delete a single deployment row by its unique name.""" with self.SessionLocal() as session: deployment = session.query(AnalysisDB).filter_by(deployment_name=deployment_name).first() if deployment: @@ -120,24 +167,30 @@ def delete_deployment(self, deployment_name: str) -> None: session.commit() def close(self) -> None: + """Open and immediately close a session to flush pooled connections.""" with self.SessionLocal() as session: session.close() def get_analysis_ids(self) -> list[str]: + """Return every analysis id currently tracked in the database.""" with self.SessionLocal() as session: return [analysis.analysis_id for analysis in session.query(AnalysisDB).all() if analysis is not None] def get_deployment_ids(self) -> list[str]: + """Return every deployment name currently tracked in the database.""" with self.SessionLocal() as session: return [analysis.deployment_name for analysis in session.query(AnalysisDB).all() if analysis is not None] def get_deployment_pod_ids(self, deployment_name: str) -> list[str]: + """Return the JSON-encoded pod id list recorded for a single deployment.""" return self.get_deployment(deployment_name).pod_ids def get_analysis_pod_ids(self, analysis_id: str) -> list[str]: + """Return the JSON-encoded pod id list for each deployment of an analysis.""" return [deployment.pod_ids for deployment in self.get_deployments(analysis_id) if deployment is not None] def get_analysis_log(self, analysis_id: str) -> str: + """Return the accumulated log string for the latest deployment, or ``""``.""" deployment = self.get_latest_deployment(analysis_id) if deployment is not None: log = deployment.log @@ -146,6 +199,7 @@ def get_analysis_log(self, analysis_id: str) -> str: return "" def get_analysis_progress(self, analysis_id: str) -> Optional[int]: + """Return the latest recorded progress (0-100), or ``None``.""" deployment = self.get_latest_deployment(analysis_id) if deployment is not None: progress = deployment.progress @@ -154,31 +208,45 @@ def get_analysis_progress(self, analysis_id: str) -> Optional[int]: return None def update_analysis_log(self, analysis_id: str, log: str) -> None: + """Append ``log`` to the existing log column for every deployment of an analysis.""" latest = self.get_analysis_log(analysis_id) if latest: log = latest + "\n" + log self.update_analysis(analysis_id, log=log) def progress_valid(self, analysis_id: str, progress: int) -> bool: + """Return True if ``progress`` is strictly greater than stored progress and ``<= 100``.""" latest = self.get_analysis_progress(analysis_id) if (latest is not None) and (latest < progress <= 100): return True return False def update_analysis_progress(self, analysis_id: str, progress: int) -> None: + """Set the progress column for every deployment of an analysis.""" self.update_analysis(analysis_id, progress=progress) def update_analysis_status(self, analysis_id: str, status: str) -> None: + """Set the status column for every deployment of an analysis.""" self.update_analysis(analysis_id, status=status) def update_deployment_status(self, deployment_name: str, status: str) -> None: - print(f"PO ACTION - Updating deployment {deployment_name} to status {status}") + """Set the status column for a single deployment (logged at ACTION level).""" + logger.action(f"Updating deployment {deployment_name} to status {status}") self.update_deployment(deployment_name, status=status) def stop_analysis(self, analysis_id: str) -> None: + """Mark every deployment of an analysis as ``STOPPED`` in the database.""" self.update_analysis_status(analysis_id, status=AnalysisStatus.STOPPED.value) def extract_analysis_body(self, analysis_id: str) -> Optional[dict]: + """Return the subset of fields needed to recreate an analysis from the first stored deployment. + + Used when unstucking an analysis to rebuild a ``CreateAnalysis`` body. + + Returns: + Dict with registry/namespace/token fields and ``progress=0``, or + ``None`` when the analysis is unknown. + """ analysis = self.get_deployments(analysis_id) if analysis: analysis = analysis[0] @@ -195,6 +263,11 @@ def extract_analysis_body(self, analysis_id: str) -> Optional[dict]: return None def delete_old_deployments_from_db(self, analysis_id: str) -> None: + """Keep only the most recent deployment for an analysis; delete the rest. + + Used after a restart/unstuck so history does not accumulate stale + deployment rows. + """ deployments = self.get_deployments(analysis_id) deployments = sorted(deployments, key=lambda x: x.time_created, reverse=True) for deployment in deployments[1:]: diff --git a/src/resources/log/entity.py b/src/resources/log/entity.py index 3ea2317..5ac3369 100644 --- a/src/resources/log/entity.py +++ b/src/resources/log/entity.py @@ -9,17 +9,22 @@ class LogEntity(BaseModel): + """A persisted log line with an id and ISO-ish timestamp.""" + log: str log_type: Literal['emerg', 'alert', 'crit', 'error', 'warn', 'notice', 'info', 'debug'] - id: str = str(uuid.uuid4()) - created_at: str = str(datetime.now()) + id: str + created_at: str def __str__(self) -> str: + """Render the log entity as the string stored in the database column.""" return f"LogEntity(id={self.id}, log={self.log}, log_type={self.log_type}, created_at={self.created_at})" class CreateLogEntity(BaseModel): + """Request body accepted by ``POST /po/stream_logs`` from analysis pods.""" + log: str log_type: Literal['emerg', 'alert', 'crit', 'error', 'warn', 'notice', 'info', 'debug'] @@ -28,17 +33,40 @@ class CreateLogEntity(BaseModel): progress: int def to_log_entity(self) -> LogEntity: - return LogEntity(log=self.log, - log_type=self.log_type) + """Materialize a :class:`LogEntity` with a fresh uuid and timestamp.""" + return LogEntity( + log=self.log, + log_type=self.log_type, + id=str(uuid.uuid4()), + created_at=str(datetime.now()) + ) class CreateStartUpErrorLog(CreateLogEntity): + """Pre-formatted error log emitted when an analysis fails to start. + + Covers three error categories: ``stuck`` (cannot reach node services), + ``slow`` (exceeded startup budget), and ``k8s`` (Kubernetes deployment + error). The message includes the current restart count and whether the + analysis will be terminated. + """ + def __init__(self, restart_num: int, error_type: Literal['stuck', 'slow', 'k8s'], analysis_id: str, status: str, k8s_error_msg: str = '') -> None: + """Build the error log message. + + Args: + restart_num: Current restart counter (0-indexed attempt number). + error_type: One of ``stuck``, ``slow``, ``k8s``. + analysis_id: Analysis the log belongs to. + status: Current analysis status to forward to the Hub. + k8s_error_msg: Optional Kubernetes error reason appended for the + ``k8s`` error type. + """ term_msg = "" if restart_num < _MAX_RESTARTS else " -> Terminating analysis as failed." if error_type == "stuck": log = (f"[flame -- POAPI: ANALYSISSTARTUPERROR -- " @@ -64,7 +92,10 @@ def __init__(self, class AnalysisStoppedLog(CreateLogEntity): + """Pre-formatted info log emitted whenever an analysis is stopped.""" + def __init__(self, analysis_id: str) -> None: + """Build the stop log for ``analysis_id`` with status ``stopped``.""" log = (f"[flame -- POAPI: ANALYSISSTOPPED -- " f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())}] " f"Info: The analysis was stopped either locally, or externally on another node.") diff --git a/src/resources/utils.py b/src/resources/utils.py index 3923744..06723ab 100644 --- a/src/resources/utils.py +++ b/src/resources/utils.py @@ -2,6 +2,7 @@ import time from typing import Union +from fastapi import HTTPException from flame_hub import CoreClient from src.resources.database.entity import Database @@ -16,9 +17,33 @@ update_hub_status, get_node_analysis_id) from src.utils.other import resource_name_to_analysis +from src.utils.po_logging import get_logger +from src.utils.other import is_uuid + + +logger = get_logger() def create_analysis(body: Union[CreateAnalysis, str], database: Database) -> dict[str, str]: + """Create and start a new analysis deployment. + + Validates the UUIDs, provisions the Harbor pull secret, constructs the + :class:`Analysis` model, deploys it, and pushes the ``STARTED`` status to + the FLAME Hub. + + Args: + body: Either a :class:`CreateAnalysis` payload or an analysis id used + to rebuild the payload from the database (restart case). + database: Database wrapper used for persistence. + + Returns: + Mapping ``{analysis_id: status}`` for the newly started deployment. + When the analysis id cannot be resolved from the database, returns + ``{'status': 'Analysis ID not found in database.'}``. + + Raises: + HTTPException: 400 if ``analysis_id`` or ``project_id`` is not a UUID. + """ namespace = get_current_namespace() if isinstance(body, str): @@ -28,6 +53,10 @@ def create_analysis(body: Union[CreateAnalysis, str], database: Database) -> dic else: body = CreateAnalysis(**body) + if not(is_uuid(body.analysis_id) or is_uuid(body.project_id)): + logger.error(f"Received request to create analysis with ID {body.analysis_id} for project {body.project_id}") + raise HTTPException(status_code=400, detail="Analysis ID and Project ID must be valid UUIDs.") + create_harbor_secret(body.registry_url, body.registry_user, body.registry_password, namespace=namespace) analysis = Analysis( @@ -51,11 +80,18 @@ def create_analysis(body: Union[CreateAnalysis, str], database: Database) -> dic def retrieve_history(analysis_id_str: str, database: Database) -> dict[str, dict[str, list[str]]]: - """ - Retrieve the history of logs for a given analysis - :param analysis_id_str: - :param database: - :return: + """Return the persisted analysis and nginx logs for terminated analyses. + + Only deployments in ``STOPPED``, ``EXECUTED``, or ``FAILED`` are included. + The stored log column is parsed back into a dictionary via ``ast.literal_eval``. + + Args: + analysis_id_str: Specific analysis id or the literal string ``"all"``. + database: Database wrapper used for the lookup. + + Returns: + Nested mapping ``{'analysis': {analysis_id: [...]}, + 'nginx': {analysis_id: [...]}}``. """ if analysis_id_str == 'all': analysis_ids = database.get_analysis_ids() @@ -82,6 +118,16 @@ def retrieve_history(analysis_id_str: str, database: Database) -> dict[str, dict def retrieve_logs(analysis_id_str: str, database: Database) -> dict[str, dict[str, list[str]]]: + """Return live pod logs for analyses currently in ``EXECUTING``. + + Args: + analysis_id_str: Specific analysis id or the literal string ``"all"``. + database: Database wrapper used to resolve deployment names. + + Returns: + Nested mapping ``{'analysis': {...}, 'nginx': {...}}`` returned by + :func:`get_analysis_logs`. + """ if analysis_id_str == 'all': analysis_ids = database.get_analysis_ids() else: @@ -98,6 +144,15 @@ def retrieve_logs(analysis_id_str: str, database: Database) -> dict[str, dict[st def get_status_and_progress(analysis_id_str: str, database: Database) -> dict[str, dict[str, str]]: + """Return the latest status and progress for one or all analyses. + + Args: + analysis_id_str: Specific analysis id or the literal string ``"all"``. + database: Database wrapper used for the lookup. + + Returns: + Mapping ``{analysis_id: {'status': str, 'progress': int}}``. + """ if analysis_id_str == 'all': analysis_ids = database.get_analysis_ids() else: @@ -114,6 +169,15 @@ def get_status_and_progress(analysis_id_str: str, database: Database) -> dict[st def get_pods(analysis_id_str: str, database: Database) -> dict[str, list[str]]: + """Return the recorded pod ids for one or all analyses. + + Args: + analysis_id_str: Specific analysis id or the literal string ``"all"``. + database: Database wrapper used for the lookup. + + Returns: + Mapping ``{analysis_id: [pod_id, ...]}``. + """ if analysis_id_str == 'all': analysis_ids = database.get_analysis_ids() else: @@ -122,6 +186,24 @@ def get_pods(analysis_id_str: str, database: Database) -> dict[str, list[str]]: def stop_analysis(analysis_id_str: str, database: Database) -> dict[str, str]: + """Stop one or all analyses, persisting logs and forwarding status to the Hub. + + For each analysis: + + * snapshots the current logs into the DB (so they are still retrievable + via ``/po/history``); + * deletes the Kubernetes deployment; + * preserves a terminal status (``FAILED``/``EXECUTED``/``STARTED``) if one + is already recorded, otherwise transitions to ``STOPPED``; + * pushes the final status to the FLAME Hub. + + Args: + analysis_id_str: Specific analysis id or the literal string ``"all"``. + database: Database wrapper used for persistence. + + Returns: + Mapping ``{analysis_id: final_status}``. + """ if analysis_id_str == 'all': analysis_ids = database.get_analysis_ids() else: @@ -133,7 +215,6 @@ def stop_analysis(analysis_id_str: str, database: Database) -> dict[str, str]: if deployment is not None: deployments[analysis_id] = read_db_analysis(deployment) - final_status = None for analysis_id, deployment in deployments.items(): # save logs as string to database (will be read as dict in retrieve_history) log = str(get_analysis_logs({analysis_id: deployment.deployment_name}, database=database)) @@ -141,21 +222,28 @@ def stop_analysis(analysis_id_str: str, database: Database) -> dict[str, str]: AnalysisStatus.EXECUTED.value, AnalysisStatus.STARTED.value]: deployment.stop(database, log=log, status=deployment.status) - final_status = deployment.status else: deployment.stop(database, log=log) - # set final status (finished overwrites any other case) - if final_status is None: - final_status = AnalysisStatus.STOPPED.value - # update hub status - init_hub_client_and_update_hub_status_with_client(analysis_id, final_status) + init_hub_client_and_update_hub_status_with_client(analysis_id, deployment.status) return {analysis_id: deployment.status for analysis_id, deployment in deployments.items()} -def delete_analysis(analysis_id_str: str, database: Database) -> dict[str, str]: +def delete_analysis(analysis_id_str: str, database: Database) -> dict[str, None]: + """Stop and permanently remove one or all analyses. + + In addition to :func:`stop_analysis`, this deletes the matching Keycloak + client and removes the analysis rows from the database. + + Args: + analysis_id_str: Specific analysis id or the literal string ``"all"``. + database: Database wrapper used for persistence. + + Returns: + Mapping ``{analysis_id: None}`` acknowledging the deletions. + """ if analysis_id_str == 'all': analysis_ids = database.get_analysis_ids() else: @@ -168,17 +256,19 @@ def delete_analysis(analysis_id_str: str, database: Database) -> dict[str, str]: deployments[analysis_id] = read_db_analysis(deployment) for analysis_id, deployment in deployments.items(): - if deployment.status != AnalysisStatus.STOPPED.value: - deployment.stop(database, log='') - deployment.status = AnalysisStatus.STOPPED.value - + deployment.stop(database, log='') delete_keycloak_client(analysis_id) database.delete_analysis(analysis_id) - return {analysis_id: deployment.status for analysis_id, deployment in deployments.items()} + return {analysis_id: None for analysis_id, deployment in deployments.items()} def unstuck_analysis_deployments(analysis_id: str, database: Database) -> None: + """Stop and restart an analysis to recover from a stuck/slow state. + + Waits 10 seconds between stop and recreate to let Kubernetes settle, then + prunes historical deployment rows so only the latest one remains. + """ if database.get_latest_deployment(analysis_id) is not None: stop_analysis(analysis_id, database) time.sleep(10) # wait for k8s to update status @@ -189,11 +279,31 @@ def unstuck_analysis_deployments(analysis_id: str, database: Database) -> None: def cleanup(cleanup_type: str, database: Database, namespace: str = 'default') -> dict[str, str]: - cleanup_types = set(cleanup_type.split(',')) if ',' in cleanup_type else [cleanup_type] + """Run one or more targeted cleanup passes. + + Supported selectors (comma-separated allowed): + + * ``all`` — resets the database and reinitializes message broker, storage + service, and Keycloak clients. + * ``analyzes`` — resets the analysis database. + * ``services`` / ``mb`` / ``rs`` — restart FLAME helper pods. + * ``keycloak`` — delete Keycloak clients without a matching analysis. + + :func:`clean_up_the_rest` is always appended under the ``zombies`` key. + + Args: + cleanup_type: Selector or comma-separated selectors. + database: Database wrapper used for persistence. + namespace: Namespace to search in. + + Returns: + Mapping ``{selector: summary_string}``. + """ + cleanup_types = cleanup_type.split(',') if ',' in cleanup_type else [cleanup_type] response_content = {} for cleanup_type in cleanup_types: - if cleanup_type in ['zombies', 'all', 'analyzes', 'services', 'mb', 'rs', 'keycloak']: + if cleanup_type in ['all', 'analyzes', 'services', 'mb', 'rs', 'keycloak']: # Analysis cleanup if cleanup_type in ['all', 'analyzes']: # cleanup all analysis deployments, associated services, policies and configmaps @@ -206,7 +316,7 @@ def cleanup(cleanup_type: str, message_broker_pod_name = find_k8s_resources('pod', 'label', "component=flame-message-broker", - namespace=namespace) + namespace=namespace)[0] delete_k8s_resource(message_broker_pod_name, 'pod', namespace) response_content[cleanup_type] = "Reset message broker" if cleanup_type in ['all', 'services', 'rs']: @@ -214,7 +324,7 @@ def cleanup(cleanup_type: str, storage_service_name = find_k8s_resources('pod', 'label', "component=flame-storage-service", - namespace=namespace) + namespace=namespace)[0] delete_k8s_resource(storage_service_name, 'pod', namespace) response_content[cleanup_type] = "Reset storage service" if cleanup_type in ['all', 'keycloak']: @@ -233,6 +343,20 @@ def cleanup(cleanup_type: str, def clean_up_the_rest(database: Database, namespace: str = 'default') -> str: + """Delete orphaned Kubernetes resources whose analysis is no longer tracked. + + Iterates over deployments, pods, services, network policies, and config + maps labelled as FLAME analysis resources, and removes any whose analysis + id is not present in the database. + + Args: + database: Database wrapper used to look up the known analysis ids. + namespace: Namespace to search in. + + Returns: + A human-readable newline-separated summary counting the zombies + deleted per resource type. + """ known_analysis_ids = database.get_analysis_ids() result_str = "" @@ -243,23 +367,35 @@ def clean_up_the_rest(database: Database, namespace: str = 'default') -> str: 'configmap': (["component=flame-nginx-analysis-config-map"], 2)}.items(): for selector_arg in selector_args: resources = find_k8s_resources(res, 'label', selector_arg, namespace=namespace) - resources = [resources] if type(resources) == str else resources - if resources is not None: - zombie_resources = [r for r in resources - if resource_name_to_analysis(r, max_r_split) not in known_analysis_ids] - for z in zombie_resources: - delete_k8s_resource(z, res, namespace=namespace) - result_str += f"Deleted {len(zombie_resources)} zombie " + \ - f"{'' if '-nginx' not in selector_arg else 'nginx-'}{res}s\n" + zombie_resources = [r for r in resources + if (r is not None) and (resource_name_to_analysis(r, max_r_split) not in known_analysis_ids)] + for z in zombie_resources: + delete_k8s_resource(z, res, namespace=namespace) + result_str += f"Deleted {len(zombie_resources)} zombie " + \ + f"{'' if '-nginx' not in selector_arg else 'nginx-'}{res}s\n" return result_str def stream_logs(log_entity: CreateLogEntity, node_id: str, enable_hub_logging: bool, database: Database, hub_core_client: CoreClient) -> None: + """Persist a log line and mirror status/progress into the FLAME Hub. + + * Appends the serialized log to the analysis row in the database. + * If ``enable_hub_logging`` is set, pushes the log to the Hub. + * If the reported progress is newer than what is stored, updates both the + DB progress and the Hub status+progress; otherwise only the Hub status + is refreshed. + + Args: + log_entity: Structured log body posted by the analysis. + node_id: This node's id in the FLAME Hub. + enable_hub_logging: Whether to forward logs to the Hub. + database: Database wrapper used for persistence. + hub_core_client: Initialized Hub core client. + """ try: database.update_analysis_log(log_entity.analysis_id, str(log_entity.to_log_entity())) - #database.update_analysis_status(log_entity.analysis_id, log_entity.status) # TODO: Implement this? except IndexError as e: - print(f"Error: Failed to update analysis log in database\n{e}") + logger.error(f"Failed to update analysis log in database: {repr(e)}") # log to hub if enable_hub_logging: diff --git a/src/status/constants.py b/src/status/constants.py index f0f9acd..5a389a9 100644 --- a/src/status/constants.py +++ b/src/status/constants.py @@ -8,6 +8,13 @@ class AnalysisStatus(Enum): + """Canonical status values tracked for an analysis. + + Includes both persisted statuses (``STARTING``, ``STARTED``, + ``EXECUTING``, ``EXECUTED``, ``STOPPED``, ``FAILED``) and the transient + ``STUCK`` status that is only observed via the internal health endpoint. + """ + STARTING = 'starting' STARTED = 'started' diff --git a/src/status/status.py b/src/status/status.py index 6755214..9ca4038 100644 --- a/src/status/status.py +++ b/src/status/status.py @@ -23,13 +23,28 @@ from src.utils.other import extract_hub_envs from src.utils.token import get_keycloak_token from src.status.constants import _MAX_RESTARTS, _INTERNAL_STATUS_TIMEOUT +from src.utils.po_logging import get_logger + + +logger = get_logger() def status_loop(database: Database, status_loop_interval: int) -> None: - """ - Send the status of the analysis to the HUB, kill deployment if analysis finished + """Run the blocking background loop that reconciles analyses with the Hub. + + On each iteration the loop: - :return: + * (re)initializes the Hub client if needed; + * iterates every running analysis, fetches its node-analysis id and the + partner node statuses; + * queries the internal analysis health endpoint, decides whether the + analysis is stuck, newly running, or finishing, and applies the + matching transition (restart, status update, or deletion); + * submits the final Hub status for the iteration. + + Args: + database: Database wrapper used for all persistence. + status_loop_interval: Seconds between iterations. """ hub_client = None node_id = None @@ -40,42 +55,57 @@ def status_loop(database: Database, status_loop_interval: int) -> None: # Enter lifecycle loop while True: if hub_client is None: - hub_client = init_hub_client_with_client(client_id, - client_secret, - hub_url_core, - hub_auth, - http_proxy, - https_proxy) - node_id = get_node_id_by_client(hub_client, client_id) + node_id = None + client_params = (client_id, + client_secret, + hub_url_core, + hub_auth, + http_proxy, + https_proxy) + if all(p is not None for p in client_params): + hub_client = init_hub_client_with_client(*client_params) + else: + logger.error(f"One or more hub client initialization parameters are None.\n" + f"Check values file for given parameters:\n" + f"\t* HUB_CLIENT_ID={client_id}{'' if client_id is not None else ' <- review this'}\n" + f"\t* HUB_CLIENT_SECRET={client_secret}{'' if client_secret is not None else ' <- review this'}\n" + f"\t* HUB_URL_CORE={hub_url_core}{'' if hub_url_core is not None else ' <- review this'}\n" + f"\t* HUB_URL_AUTH={hub_auth}{'' if hub_auth is not None else ' <- review this'}\n" + f"\t* PO_HTTP_PROXY={http_proxy}{'' if http_proxy is not None else ' <- review this'}\n" + f"\t* PO_HTTPS_PROXY={https_proxy}{'' if https_proxy is not None else ' <- review this'}") + raise ValueError("One or more hub client initialization parameters are None.") + if all(p is not None for p in (hub_client, client_id)): + node_id = get_node_id_by_client(hub_client, client_id) # Catch unresponsive hub client if node_id is None: - print("PO ACTION - Resetting hub client...") + logger.action("Resetting hub client...") hub_client = None + time.sleep(status_loop_interval) continue else: # If running analyzes exist, enter status loop running_analyzes = [analysis_id for analysis_id in database.get_analysis_ids() if database.analysis_is_running(analysis_id)] - print(f"PO ACTION - Checking for running analyzes...{running_analyzes}") + logger.action(f"Checking for running analyzes...{running_analyzes}") if running_analyzes: hub_client_issues = 0 for analysis_id in running_analyzes: - print(f"PO STATUS LOOP - Current analysis id: {analysis_id}") + logger.status_loop(f"Current analysis id: {analysis_id}") # Get node analysis id if analysis_id not in node_analysis_ids.keys(): node_analysis_id = get_node_analysis_id(hub_client, analysis_id, node_id) if node_analysis_id is not None: node_analysis_ids[analysis_id] = node_analysis_id else: - print(f"Error: Retrieving node_analysis id for malformed analysis returned None " - f"(analysis_id={analysis_id})... Skipping") + logger.warning(f"Retrieving node_analysis id for malformed analysis returned None " + f"(analysis_id={analysis_id})... Skipping") hub_client_issues += 1 continue else: node_analysis_id = node_analysis_ids[analysis_id] # If node analysis id found - print(f"\tNode analysis id: {node_analysis_id}") + logger.info(f"\tNode analysis id: {node_analysis_id}") if node_analysis_id is not None: try: # Inform local analysis of partner node statuses @@ -84,18 +114,19 @@ def status_loop(database: Database, status_loop_interval: int) -> None: analysis_id, node_analysis_id) except Exception as e: - print(f"\tPO STATUS LOOP - Error when attempting to access partner_status endpoint of {analysis_id} ({repr(e)})") + logger.status_loop(f"Error when attempting to access partner_status endpoint of " + f"{analysis_id} ({repr(e)})") # Retrieve analysis status (skip iteration if analysis is not deployed) analysis_status = _get_analysis_status(analysis_id, database) if analysis_status is None: continue - print(f"\tDatabase status: {analysis_status['db_status']}") - print(f"\tInternal status: {analysis_status['int_status']}") + logger.debug(f"Database status: {analysis_status['db_status']}") + logger.debug(f"Internal status: {analysis_status['int_status']}") # Fix stuck analyzes if analysis_status['status_action'] == 'unstuck': - print(f"\tUnstuck analysis with internal status: {analysis_status['int_status']}") + logger.info(f"Unstuck analysis with internal status: {analysis_status['int_status']}") _fix_stuck_status(database, analysis_status, node_id, enable_hub_logging, hub_client) # Update analysis status (skip iteration if analysis is not deployed) analysis_status = _get_analysis_status(analysis_id, database) @@ -104,7 +135,7 @@ def status_loop(database: Database, status_loop_interval: int) -> None: # Update created to running status if analysis_status['status_action'] == 'running': - print(f"\tUpdate created-to-running database status: {analysis_status['db_status']}") + logger.info(f"Update created-to-running database status: {analysis_status['db_status']}") _update_running_status(database, analysis_status) # Update analysis status (skip iteration if analysis is not deployed) analysis_status = _get_analysis_status(analysis_id, database) @@ -113,7 +144,7 @@ def status_loop(database: Database, status_loop_interval: int) -> None: # Update running to finished status if analysis_status['status_action'] == 'finishing': - print(f"\tUpdate running-to-finished database status: {analysis_status['db_status']}") + logger.info(f"Update running-to-finished database status: {analysis_status['db_status']}") _update_finished_status(database, analysis_status) # Update analysis status (skip iteration if analysis is not deployed) analysis_status = _get_analysis_status(analysis_id, database) @@ -122,37 +153,64 @@ def status_loop(database: Database, status_loop_interval: int) -> None: # Submit analysis_status to hub analysis_hub_status = _set_analysis_hub_status(hub_client, node_analysis_id, analysis_status) - print(f"\tSet Hub analysis status with node_analysis={node_analysis_id}, " - f"db_status={analysis_status['db_status']}, " - f"internal_status={analysis_status['int_status']} " - f"to {analysis_hub_status}") + logger.info(f"Set Hub analysis status with node_analysis={node_analysis_id}, " + f"db_status={analysis_status['db_status']}, " + f"internal_status={analysis_status['int_status']} " + f"to {analysis_hub_status}") time.sleep(status_loop_interval) - print(f"PO STATUS LOOP - Status loop iteration completed. Sleeping for {status_loop_interval} seconds.") + logger.status_loop(f"Iteration completed. Sleeping for {status_loop_interval} seconds.") + def inform_analysis_of_partner_statuses(database: Database, hub_client: flame_hub.CoreClient, analysis_id: str, node_analysis_id: str) -> Optional[dict[str, str]]: + """Push partner-node statuses into the local analysis' ``/partner_status`` endpoint. + + Args: + database: Database wrapper used to look up the deployment name. + hub_client: Initialized Hub core client. + analysis_id: Analysis to update. + node_analysis_id: The local node's analysis id in the Hub. + + Returns: + The analysis response parsed as JSON, or ``None`` when the analysis + API is not (yet) reachable. + """ node_statuses = get_partner_node_statuses(hub_client, analysis_id, node_analysis_id) deployment_name = database.get_latest_deployment(analysis_id).deployment_name + client = Client(base_url=f"http://nginx-{deployment_name}:{PORTS['nginx'][0]}") try: # try except, in case analysis api is not yet ready - response = Client(base_url=f"http://nginx-{deployment_name}:{PORTS['nginx'][0]}").post(url="/analysis/partner_status", - headers=[('Connection', 'close')], - json={'partner_status': node_statuses}) + response = client.post(url="/analysis/partner_status", + headers=[('Connection', 'close')], + json={'partner_status': node_statuses}) response.raise_for_status() + client.close() return response.json() except HTTPStatusError as e: - print(f"\tError whilst trying to access analysis partner_status endpoint: {e}") + logger.warning(f"Error whilst trying to access analysis partner_status endpoint: {repr(e)}") except ConnectError as e: - print(f"\tConnection to http://nginx-{deployment_name}:{PORTS['nginx'][0]} yielded an error: {e}") + logger.warning(f"Connection to http://nginx-{deployment_name}:{PORTS['nginx'][0]} yielded an error: {repr(e)}") except ConnectTimeout as e: - print(f"\tConnection to http://nginx-{deployment_name}:{PORTS['nginx'][0]} timed out: {e}") + logger.warning(f"Connection to http://nginx-{deployment_name}:{PORTS['nginx'][0]} timed out: {repr(e)}") + client.close() return None def _get_analysis_status(analysis_id: str, database: Database) -> Optional[dict[str, str]]: + """Combine DB and internal status for an analysis and pick the next action. + + Args: + analysis_id: Analysis to inspect. + database: Database wrapper used for persistence. + + Returns: + Dict with ``analysis_id``, ``db_status``, ``int_status``, and + ``status_action`` (one of ``unstuck``, ``running``, ``finishing``, or + ``None``). Returns ``None`` when the analysis has no deployment. + """ analysis = database.get_latest_deployment(analysis_id) if analysis is not None: db_status = analysis.status @@ -171,13 +229,18 @@ def _get_analysis_status(analysis_id: str, database: Database) -> Optional[dict[ def _decide_status_action(db_status: str, int_status: str) -> Optional[str]: - is_stuck = int_status == AnalysisStatus.STUCK.value - is_slow = ((db_status in [AnalysisStatus.STARTED.value]) and (int_status in [AnalysisStatus.FAILED.value])) - newly_running = ((db_status in [AnalysisStatus.STARTED.value]) and (int_status in [AnalysisStatus.EXECUTING.value])) - speedy_finished = ((db_status in [AnalysisStatus.STARTED.value]) and (int_status in [AnalysisStatus.EXECUTED.value])) + """Map the (db_status, int_status) pair to a reconciliation action. + + Returns one of ``'unstuck'``, ``'running'``, ``'finishing'``, or ``None`` + when no action is needed. + """ + is_stuck = (db_status not in [AnalysisStatus.FAILED.value]) and (int_status in [AnalysisStatus.STUCK.value]) + is_slow = (db_status in [AnalysisStatus.STARTED.value]) and (int_status in [AnalysisStatus.FAILED.value]) + newly_running = (db_status in [AnalysisStatus.STARTED.value]) and (int_status in [AnalysisStatus.EXECUTING.value]) + speedy_finished = (db_status in [AnalysisStatus.STARTED.value]) and (int_status in [AnalysisStatus.EXECUTED.value]) newly_ended = ((db_status in [AnalysisStatus.EXECUTING.value, AnalysisStatus.FAILED.value]) and (int_status in [AnalysisStatus.EXECUTED.value, AnalysisStatus.FAILED.value])) - firmly_stuck = ((db_status in [AnalysisStatus.FAILED.value]) and (int_status in [AnalysisStatus.STUCK.value])) + firmly_stuck = (db_status in [AnalysisStatus.FAILED.value]) and (int_status in [AnalysisStatus.STUCK.value]) was_stopped = int_status == AnalysisStatus.STOPPED.value if is_stuck or is_slow: return 'unstuck' @@ -190,25 +253,41 @@ def _decide_status_action(db_status: str, int_status: str) -> Optional[str]: def _get_internal_deployment_status(deployment_name: str, analysis_id: str) -> str: + """Poll the analysis ``/healthz`` endpoint and derive the internal status. + + Retries on connection errors until ``_INTERNAL_STATUS_TIMEOUT`` is hit, at + which point ``FAILED`` is returned. Also refreshes the Keycloak token + when the analysis reports it is close to expiry. + + Args: + deployment_name: Name of the analysis deployment (used to resolve + the nginx sidecar URL). + analysis_id: Analysis id used to mint a refreshed Keycloak token. + + Returns: + One of ``EXECUTED``, ``EXECUTING``, ``STUCK``, or ``FAILED``. + """ # Attempt to retrieve internal analysis status via health endpoint start_time = time.time() + client = Client(base_url=f"http://nginx-{deployment_name}:{PORTS['nginx'][0]}") while True: try: - response = Client(base_url=f"http://nginx-{deployment_name}:{PORTS['nginx'][0]}").get("/analysis/healthz", - headers=[('Connection', 'close')]) + response = client.get("/analysis/healthz", headers=[('Connection', 'close')]) response.raise_for_status() + client.close() break except HTTPStatusError as e: - print(f"\tError whilst retrieving internal deployment status: {e}") + logger.warning(f"Error whilst retrieving internal deployment status: {repr(e)}") except ConnectError as e: - print(f"\tConnection to http://nginx-{deployment_name}:{PORTS['nginx'][0]} yielded an error: {e}") + logger.warning(f"Connection to http://nginx-{deployment_name}:{PORTS['nginx'][0]} yielded an error: {repr(e)}") except ConnectTimeout as e: - print(f"\tConnection to http://nginx-{deployment_name}:{PORTS['nginx'][0]} timed out: {e}") + logger.warning(f"Connection to http://nginx-{deployment_name}:{PORTS['nginx'][0]} timed out: {repr(e)}") elapsed_time = time.time() - start_time - time.sleep(1) if elapsed_time > _INTERNAL_STATUS_TIMEOUT: - print(f"\tTimeout getting internal deployment status after {elapsed_time} seconds") + logger.error(f"Timeout getting internal deployment status after {elapsed_time:.1f} seconds") + client.close() return AnalysisStatus.FAILED.value + time.sleep(1) # Extract fields from response analysis_status, analysis_token_remaining_time = (response.json()['status'], @@ -231,19 +310,29 @@ def _get_internal_deployment_status(deployment_name: str, analysis_id: str) -> s def _refresh_keycloak_token(deployment_name: str, analysis_id: str, token_remaining_time: int) -> None: + """Push a fresh Keycloak token to the analysis if the current one is near expiry. + + Refresh is triggered when the remaining lifetime is less than two status + loop intervals plus one second. + + Args: + deployment_name: Name of the analysis deployment (used to resolve + the nginx sidecar URL). + analysis_id: Analysis id used to mint a new Keycloak token. + token_remaining_time: Remaining token lifetime in seconds as reported + by the analysis health endpoint. """ - Refresh the keycloak token - :return: - """ - if token_remaining_time < (int(os.getenv('STATUS_LOOP_INTERVAL')) * 2 + 1): + if token_remaining_time < (int(os.getenv('STATUS_LOOP_INTERVAL', '10')) * 2 + 1): keycloak_token = get_keycloak_token(analysis_id) + client = Client(base_url=f"http://nginx-{deployment_name}:{PORTS['nginx'][0]}") try: - response = Client(base_url=f"http://nginx-{deployment_name}:{PORTS['nginx'][0]}").post("/analysis/token_refresh", - json={'token': keycloak_token}, - headers=[('Connection', 'close')]) + response = client.post("/analysis/token_refresh", + json={'token': keycloak_token}, + headers=[('Connection', 'close')]) response.raise_for_status() except HTTPStatusError as e: - print(f"Error: Failed to refresh keycloak token in deployment {deployment_name}.\n{e}") + logger.error(f"Failed to refresh keycloak token in deployment {deployment_name}: {repr(e)}") + client.close() def _fix_stuck_status(database: Database, @@ -251,6 +340,15 @@ def _fix_stuck_status(database: Database, node_id: str, enable_hub_logging: bool, hub_client: flame_hub.CoreClient) -> None: + """Restart a stuck/slow analysis or mark it failed once ``_MAX_RESTARTS`` is hit. + + Args: + database: Database wrapper used for persistence. + analysis_status: Status dict produced by :func:`_get_analysis_status`. + node_id: This node's id in the FLAME Hub. + enable_hub_logging: Whether to forward the error log to the Hub. + hub_client: Initialized Hub core client. + """ analysis = database.get_latest_deployment(analysis_status['analysis_id']) if analysis is not None: is_slow = ((analysis_status['db_status'] in [AnalysisStatus.STARTED.value]) and @@ -271,6 +369,21 @@ def _stream_stuck_logs(analysis: AnalysisDB, database: Database, hub_client: flame_hub.CoreClient, is_slow: bool) -> None: + """Emit a startup-error log matching the observed failure mode. + + When ``is_slow`` is ``True``, the pod status is inspected to distinguish + a ``slow`` deployment from a ``k8s`` error; otherwise a ``stuck`` log is + streamed. + + Args: + analysis: The deployment row being diagnosed. + node_id: This node's id in the FLAME Hub. + enable_hub_logging: Whether to forward the log to the Hub. + database: Database wrapper used for persistence. + hub_client: Initialized Hub core client. + is_slow: Whether the analysis is classified as slow/failed rather + than stuck. + """ # If is_slow=True differentiate between slow, or kubernetes_error state, else assume stuck state is_k8s_related = False if is_slow: @@ -283,8 +396,8 @@ def _stream_stuck_logs(analysis: AnalysisDB, # ready=True implicates slow state, else assume kubernetes_error state if not ready: is_k8s_related = True - print(f"\tDeployment of analysis={analysis.analysis_id} failed (ready={ready}).\n" - f"\t\t{reason}: {message}") + logger.error(f"Deployment of analysis={analysis.analysis_id} failed (ready={ready}). " + f"{reason}: {message}") # Create and stream POAPIError logs or either slow, stuck, or kubernetes_error state to Hub stream_logs(CreateStartUpErrorLog(analysis.restart_counter, @@ -299,29 +412,45 @@ def _stream_stuck_logs(analysis: AnalysisDB, def _update_running_status(database: Database, analysis_status: dict[str, str]) -> None: + """Transition the latest deployment from ``STARTED`` to ``EXECUTING`` in the DB.""" analysis = database.get_latest_deployment(analysis_status['analysis_id']) if analysis is not None: database.update_deployment_status(analysis.deployment_name, AnalysisStatus.EXECUTING.value) def _update_finished_status(database: Database, analysis_status: dict[str, str]) -> None: + """Record the final internal status and either delete or stop the analysis. + + ``EXECUTED`` triggers a full delete (removing the analysis row and + Keycloak client); anything else triggers a stop that retains the row for + history. + """ analysis = database.get_latest_deployment(analysis_status['analysis_id']) if analysis is not None: - database.update_deployment_status(analysis.deployment_name, analysis_status['int_status']) + finished_status = analysis_status['int_status'] \ + if analysis_status['int_status'] != AnalysisStatus.STUCK.value else AnalysisStatus.FAILED.value + database.update_deployment_status(analysis.deployment_name, finished_status) if analysis_status['int_status'] == AnalysisStatus.EXECUTED.value: - print("\tDelete deployment") - # TODO: final local log save (minio?) # archive logs - # delete_analysis(analysis_status['analysis_id'], database) # delete analysis from database - stop_analysis(analysis_status['analysis_id'], database) # stop analysis TODO: Change to delete in the future (when archive logs implemented) + logger.info("Delete deployment") + delete_analysis(analysis_status['analysis_id'], database) # delete analysis from database else: - print("\tStop deployment") + logger.info("Stop deployment") stop_analysis(analysis_status['analysis_id'], database) # stop analysis def _set_analysis_hub_status(hub_client: flame_hub.CoreClient, node_analysis_id: str, analysis_status: dict[str, str]) -> str: - if analysis_status['db_status'] in [AnalysisStatus.FAILED.value, + """Push the reconciled status to the Hub and return what was submitted. + + Prefers a terminal DB status, otherwise trusts the internal status when + it is executing/executed/failed, otherwise falls back to the DB status. + + Returns: + The status string that was forwarded to the Hub. + """ + if analysis_status['db_status'] in [AnalysisStatus.STARTED.value, + AnalysisStatus.FAILED.value, AnalysisStatus.EXECUTED.value]: analysis_hub_status = analysis_status['db_status'] elif analysis_status['int_status'] in [AnalysisStatus.FAILED.value, diff --git a/src/utils/hub_client.py b/src/utils/hub_client.py index 9f3805f..12f0d4a 100644 --- a/src/utils/hub_client.py +++ b/src/utils/hub_client.py @@ -15,6 +15,11 @@ import flame_hub from src.status.constants import AnalysisStatus +from src.utils.po_logging import get_logger +from src.utils.other import extract_hub_envs + + +logger = get_logger() def init_hub_client_with_client(client_id: str, @@ -23,13 +28,29 @@ def init_hub_client_with_client(client_id: str, hub_auth: str, http_proxy: str, https_proxy: str) -> Optional[flame_hub.CoreClient]: + """Authenticate and build a :class:`flame_hub.CoreClient` talking to the FLAME Hub. + + Honors the ``PO_HTTP_PROXY`` / ``PO_HTTPS_PROXY`` and ``EXTRA_CA_CERTS`` + environment variables via :func:`get_ssl_context`. + + Args: + client_id: OAuth2 client id for the node. + client_secret: OAuth2 client secret for the node. + hub_url_core: Base URL of the Hub core API. + hub_auth: Base URL of the Hub auth service. + http_proxy: HTTP proxy URL (may be empty/None). + https_proxy: HTTPS proxy URL (may be empty/None). + + Returns: + An initialized Hub core client, or ``None`` on authentication failure. + """ # Attempt to init hub client proxies = None ssl_ctx = get_ssl_context() if http_proxy and https_proxy: proxies = { "http://": HTTPTransport(proxy=http_proxy), - "https://": HTTPTransport(proxy=https_proxy, verify=ssl_ctx) + "https://": HTTPTransport(proxy=https_proxy, verify=ssl_ctx) } try: @@ -40,16 +61,21 @@ def init_hub_client_with_client(client_id: str, client = Client(base_url=hub_url_core, mounts=proxies, auth=hub_client, verify=ssl_ctx) hub_client = flame_hub.CoreClient(client=client) - print("PO ACTION - Hub client init successful") + logger.action("Hub client init successful") except Exception as e: hub_client = None - print(f"Error: Failed to authenticate with hub python client library.\n{e}") + logger.error(f"Failed to authenticate with hub python client library: {repr(e)}") return hub_client @lru_cache def get_ssl_context() -> ssl.SSLContext: - """Check if there are additional certificates present and if so, load them.""" + """Return a cached SSL context that trusts the system store plus ``EXTRA_CA_CERTS``. + + Returns: + A :class:`truststore.SSLContext` loaded with the system certificate + store and, if present, the CA bundle pointed to by ``EXTRA_CA_CERTS``. + """ cert_path = os.getenv('EXTRA_CA_CERTS') ctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) if cert_path and Path(cert_path).exists(): @@ -58,20 +84,39 @@ def get_ssl_context() -> ssl.SSLContext: def get_node_id_by_client(hub_client: flame_hub.CoreClient, client_id: str) -> Optional[str]: + """Look up the Hub node id associated with an OAuth2 client id. + + Args: + hub_client: Initialized Hub core client. + client_id: OAuth2 client id of this node. + + Returns: + The node's UUID as a string, or ``None`` on failure. + """ try: node_id_object = hub_client.find_nodes(filter={'client_id': client_id})[0] except (HTTPStatusError, JSONDecodeError, ConnectTimeout, flame_hub._exceptions.HubAPIError, AttributeError) as e: - print(f"Error: Failed to retrieve node id object from hub python client\n{e}") + logger.error(f"Failed to retrieve node id object from hub python client {client_id}: {repr(e)}") node_id_object = None return str(node_id_object.id) if node_id_object is not None else None def get_node_analysis_id(hub_client: flame_hub.CoreClient, analysis_id: str, node_id_object_id: str) -> Optional[str]: + """Look up the Hub analysis-node id for a (analysis, node) pair. + + Args: + hub_client: Initialized Hub core client. + analysis_id: Analysis id to filter by. + node_id_object_id: Hub node id (see :func:`get_node_id_by_client`). + + Returns: + The analysis-node UUID as a string, or ``None`` if none exists. + """ try: node_analyzes = hub_client.find_analysis_nodes(filter={'analysis_id': analysis_id, 'node_id': node_id_object_id}) except (HTTPStatusError, flame_hub._exceptions.HubAPIError, AttributeError) as e: - print(f"Error: Failed to retrieve node analyzes from hub python client\n{e}") + logger.error(f"Failed to retrieve node analyzes from hub python client: {repr(e)}") node_analyzes = None if node_analyzes: @@ -86,8 +131,16 @@ def update_hub_status(hub_client: flame_hub.CoreClient, node_analysis_id: str, run_status: str, run_progress: Optional[int] = None) -> None: - """ - Update the status of the analysis in the hub. + """Update the execution status (and optionally progress) of an analysis-node in the Hub. + + ``STUCK`` is normalized to ``FAILED`` since the Hub does not model a + stuck status. + + Args: + hub_client: Initialized Hub core client. + node_analysis_id: Hub analysis-node id to update. + run_status: New execution status string. + run_progress: Optional execution progress (0-100). """ try: if run_status == AnalysisStatus.STUCK.value: @@ -97,14 +150,24 @@ def update_hub_status(hub_client: flame_hub.CoreClient, else: hub_client.update_analysis_node(node_analysis_id, execution_status=run_status, execution_progress=run_progress) except (HTTPStatusError, ConnectError, flame_hub._exceptions.HubAPIError, AttributeError) as e: - print(f"Error: Failed to update hub status for node_analysis_id {node_analysis_id}\n{e}") + logger.error(f"Failed to update hub status for node_analysis_id {node_analysis_id}: {repr(e)}") def get_analysis_node_statuses(hub_client: flame_hub.CoreClient, analysis_id: str) -> Optional[dict[str, str]]: + """Return the execution status of every node participating in an analysis. + + Args: + hub_client: Initialized Hub core client. + analysis_id: Analysis to query. + + Returns: + Mapping ``{node_analysis_id: execution_status}``, or ``None`` on + lookup failure. + """ try: node_analyzes = hub_client.find_analysis_nodes(filter={'analysis_id': analysis_id}) except (HTTPStatusError, flame_hub._exceptions.HubAPIError, AttributeError) as e: - print(f"Error: Failed to retrieve node analyzes from hub python client\n{e}") + logger.error(f"Failed to retrieve node analyzes from hub python client: {repr(e)}") return None analysis_node_statuses = {} for node in node_analyzes: @@ -115,21 +178,35 @@ def get_analysis_node_statuses(hub_client: flame_hub.CoreClient, analysis_id: st def get_partner_node_statuses(hub_client: flame_hub.CoreClient, analysis_id: str, node_analysis_id: str) -> Optional[dict[str, str]]: + """Return :func:`get_analysis_node_statuses` with the local node filtered out. + + Args: + hub_client: Initialized Hub core client. + analysis_id: Analysis to query. + node_analysis_id: Local node's analysis-node id, excluded from the + result. + + Returns: + Mapping ``{partner_node_analysis_id: execution_status}``, or ``None`` + on lookup failure. + """ analysis_node_statuses = get_analysis_node_statuses(hub_client, analysis_id) return {k : v for k, v in analysis_node_statuses.items() if k != node_analysis_id} \ if analysis_node_statuses is not None else None def init_hub_client_and_update_hub_status_with_client(analysis_id: str, status: str) -> None: + """One-shot convenience that (re)builds a Hub client and pushes a status update. + + Used by API endpoints that do not hold a long-lived Hub client. Logs and + returns silently when any lookup in the chain (client, node id, analysis + node id) fails. + + Args: + analysis_id: Analysis whose Hub status should be updated. + status: New execution status string. """ - Create a hub client for the analysis and update the current status. - """ - client_id, client_secret, hub_url_core, hub_auth, http_proxy, https_proxy = (os.getenv('HUB_CLIENT_ID'), - os.getenv('HUB_CLIENT_SECRET'), - os.getenv('HUB_URL_CORE'), - os.getenv('HUB_URL_AUTH'), - os.getenv('PO_HTTP_PROXY'), - os.getenv('PO_HTTPS_PROXY')) + client_id, client_secret, hub_url_core, hub_auth, _, http_proxy, https_proxy = extract_hub_envs() hub_client = init_hub_client_with_client(client_id, client_secret, hub_url_core, hub_auth, http_proxy, https_proxy) if hub_client is not None: node_id = get_node_id_by_client(hub_client, client_id) @@ -138,8 +215,8 @@ def init_hub_client_and_update_hub_status_with_client(analysis_id: str, status: if node_analysis_id is not None: update_hub_status(hub_client, node_analysis_id, run_status=status) else: - print("Error: Failed to retrieve node_analysis_id from hub client. Cannot update status.") + logger.error("Failed to retrieve node_analysis_id from hub client. Cannot update status.") else: - print("Error: Failed to retrieve node_id from hub client. Cannot update status.") + logger.error("Failed to retrieve node_id from hub client. Cannot update status.") else: - print("Error: Failed to initialize hub client. Cannot update status.") + logger.error(f"Failed to initialize hub client. Cannot update status.") diff --git a/src/utils/other.py b/src/utils/other.py index b794d85..3649230 100644 --- a/src/utils/other.py +++ b/src/utils/other.py @@ -1,9 +1,21 @@ -from httpx import AsyncClient -import asyncio +from typing import Optional, Union import os +import uuid -def extract_hub_envs() -> tuple[str, str, str, str, bool, str, str]: +def extract_hub_envs() -> tuple[Optional[str], + Optional[str], + Optional[str], + Optional[str], + bool, + Optional[str], + Optional[str]]: + """Read the FLAME-Hub related environment variables into a tuple. + + Returns: + Tuple ``(client_id, client_secret, hub_url_core, hub_url_auth, + hub_logging_enabled, http_proxy, https_proxy)``. + """ return (os.getenv('HUB_CLIENT_ID'), os.getenv('HUB_CLIENT_SECRET'), os.getenv('HUB_URL_CORE'), @@ -14,26 +26,50 @@ def extract_hub_envs() -> tuple[str, str, str, str, bool, str, str]: def resource_name_to_analysis(deployment_name: str, max_r_split: int = 1) -> str: - return deployment_name.split("analysis-")[-1].rsplit('-', max_r_split)[0] + """Extract the analysis id from a FLAME analysis resource name. + Resource names follow the ``analysis-{analysis_id}-{restart_counter}`` + pattern (with an optional ``nginx-`` prefix and pod hash suffix); this + helper strips those and returns the analysis id. -def get_project_data_source(keycloak_token, project_id, hub_adapter_service_name, namespace="default") -> dict: + Args: + deployment_name: Kubernetes resource name. + max_r_split: Number of trailing ``-``-separated segments to drop. + + Returns: + The analysis id portion of the name. """ - Get data sources for a project from the node hub adapter service using the keycloak token + return deployment_name.split("analysis-")[-1].rsplit('-', max_r_split)[0] + - :param keycloak_token: - :param project_id: - :param hub_adapter_service_name: - :param namespace: - :return: +def is_uuid(test_str: Union[str, uuid.UUID], version: int = 4): + """Return True if ``test_str`` parses as a UUID of the given version. + + Args: + test_str: String or UUID to validate. + version: UUID version to require (defaults to 4). + + Returns: + ``True`` if ``test_str`` is a syntactically valid UUID; otherwise + ``False``. """ - client = AsyncClient(base_url=f"http://{hub_adapter_service_name}:5000", - headers={'Authorization': f"Bearer {keycloak_token}", - 'accept': "application/json"}) - return asyncio.run(call_sources(client, project_id)) + try: + uuid.UUID(str(test_str), version=version) + return len(rreplace(str(test_str), '-', '', 4)) == 32 + except ValueError: + return False + +def rreplace(string: str, replaced_str: str, replacement_str: str, count: int): + """Replace up to ``count`` occurrences of ``replaced_str`` starting from the right. -async def call_sources(client, project_id) -> list[dict[str, str]]: - response = await client.get(f"/kong/datastore?project_id={project_id}") - response.raise_for_status() - return response.json() + Args: + string: The input string. + replaced_str: Substring to replace. + replacement_str: Substring to insert in its place. + count: Maximum number of rightmost occurrences to replace. + + Returns: + The resulting string. + """ + return str(replacement_str).join(str(string).rsplit(str(replaced_str), int(count))) diff --git a/src/utils/po_logging.py b/src/utils/po_logging.py new file mode 100644 index 0000000..0d515b3 --- /dev/null +++ b/src/utils/po_logging.py @@ -0,0 +1,75 @@ +import json +import logging +import sys + + +class JsonFormatter(logging.Formatter): + """Emit each log record as a single JSON line for structured log ingestion.""" + + def format(self, record: logging.LogRecord) -> str: + """Serialize the log record as a single JSON object on one line. + + Always includes ``timestamp``, ``level``, ``logger``, ``module``, and + ``msg`` fields. When the record carries exception info, a formatted + traceback is added under ``error``. + """ + log = { + "timestamp": self.formatTime(record, "%Y-%m-%dT%H:%M:%S"), + "level": record.levelname, + "logger": record.name, + "module": record.module, + "msg": record.getMessage(), + } + + if record.exc_info: + log["error"] = self.formatException(record.exc_info) + + return json.dumps(log, default=str) # for non-serializable msgs + + +def get_logger() -> logging.Logger: + """Return a process-wide logger configured for JSON output. + + Registers the custom ``ACTION`` (21) and ``STATUS_LOOP`` (22) levels, + installs a single :class:`JsonFormatter` handler on the root logger + (idempotent), and returns a child logger named after this module. + + Returns: + A :class:`logging.Logger` ready for use. + """ + _set_custom_log_level(21, 'ACTION') + _set_custom_log_level(22, 'STATUS_LOOP') + + root = logging.getLogger() + if not any(isinstance(h.formatter, JsonFormatter) for h in root.handlers): + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter(JsonFormatter()) + root.addHandler(handler) + root.setLevel(logging.INFO) + + logger = logging.getLogger(__name__) + return logger + + +def _set_custom_log_level(level, level_name): + """Register a new log level and expose it as a method on ``Logger`` and module function. + + After calling ``_set_custom_log_level(21, 'ACTION')`` you can write + ``logger.action("...")`` and ``logging.action("...")``. + + Args: + level: Integer log level (between existing stdlib levels). + level_name: Human-readable name; used uppercase as the level name and + lowercase as the method/function name. + """ + def logForLevel(self, message, *args, **kws): + if self.isEnabledFor(level): + self._log(level, message, args, **kws) + + def logToRoot(message, *args, **kwargs): + logging.log(level, message, *args, **kwargs) + + logging.addLevelName(level, level_name.upper()) + setattr(logging, level_name.upper(), level) + setattr(logging.getLoggerClass(), level_name.lower(), logForLevel) + setattr(logging, level_name.lower(), logToRoot) diff --git a/src/utils/token.py b/src/utils/token.py index b906436..5c1b920 100644 --- a/src/utils/token.py +++ b/src/utils/token.py @@ -2,17 +2,42 @@ import requests from typing import Optional +from src.utils.po_logging import get_logger + + +logger = get_logger() + _KEYCLOAK_URL = os.getenv('KEYCLOAK_URL') _KEYCLOAK_REALM = os.getenv('KEYCLOAK_REALM') def create_analysis_tokens(kong_token: str, analysis_id: str) -> dict[str, str]: + """Assemble the token env dict injected into the analysis container. + + Args: + kong_token: Opaque Kong token minted for the analysis by the node. + analysis_id: Analysis id used as the Keycloak client id. + + Returns: + Dict with ``DATA_SOURCE_TOKEN`` (the Kong token) and + ``KEYCLOAK_TOKEN`` (a freshly minted service-account token). + """ tokens = {'DATA_SOURCE_TOKEN': kong_token, 'KEYCLOAK_TOKEN': get_keycloak_token(analysis_id)} return tokens def get_keycloak_token(analysis_id: str) -> Optional[str]: + """Obtain a client-credentials access token for an analysis's Keycloak client. + + Creates the Keycloak client on demand if it does not already exist. + + Args: + analysis_id: Analysis id used as the Keycloak client id. + + Returns: + The access token, or ``None`` on HTTP failure. + """ client_secret = _get_keycloak_client_secret(analysis_id) keycloak_url = f"{_KEYCLOAK_URL}/realms/flame/protocol/openid-connect/token" @@ -27,11 +52,12 @@ def get_keycloak_token(analysis_id: str) -> Optional[str]: return response.json()['access_token'] except requests.exceptions.RequestException as e: - print(f"Error: Failed to retrieve keycloak token\n{e}") + logger.error(f"Failed to retrieve keycloak token: {repr(e)}") return None def _get_keycloak_client_secret(analysis_id: str) -> str: + """Return the client secret for an analysis, creating the client if needed.""" admin_token = _get_keycloak_admin_token() if not _keycloak_client_exists(analysis_id, admin_token): @@ -49,6 +75,7 @@ def _get_keycloak_client_secret(analysis_id: str) -> str: def _get_keycloak_admin_token() -> str: + """Mint an admin access token using the ``RESULT_CLIENT_*`` service account.""" keycloak_admin_client_id = os.getenv('RESULT_CLIENT_ID') keycloak_admin_client_secret = os.getenv('RESULT_CLIENT_SECRET') @@ -66,6 +93,7 @@ def _get_keycloak_admin_token() -> str: def _keycloak_client_exists(analysis_id: str, admin_token: str) -> bool: + """Return True if a Keycloak client with the given ``analysis_id`` exists.""" url_get_client = f"{_KEYCLOAK_URL}/admin/realms/{_KEYCLOAK_REALM}/clients?clientId={analysis_id}" headers = {'Authorization': f"Bearer {admin_token}"} @@ -76,6 +104,7 @@ def _keycloak_client_exists(analysis_id: str, admin_token: str) -> bool: def _create_keycloak_client(admin_token: str, analysis_id: str) -> None: + """Create a service-account Keycloak client named ``flame-{analysis_id}``.""" url_create_client = f"{_KEYCLOAK_URL}/admin/realms/{_KEYCLOAK_REALM}/clients" headers = {'Authorization': f"Bearer {admin_token}", 'Content-Type': "application/json"} @@ -87,6 +116,7 @@ def _create_keycloak_client(admin_token: str, analysis_id: str) -> None: response.raise_for_status() def _get_all_keycloak_clients() -> list[dict]: + """Return every Keycloak client in the configured realm as raw JSON dicts.""" admin_token = _get_keycloak_admin_token() url_get_clients = f"{_KEYCLOAK_URL}/admin/realms/{_KEYCLOAK_REALM}/clients" headers = {'Authorization': f"Bearer {admin_token}"} @@ -97,6 +127,10 @@ def _get_all_keycloak_clients() -> list[dict]: return response.json() def delete_keycloak_client(analysis_id: str) -> None: + """Delete the Keycloak client associated with an analysis. + + Logs and returns silently if the client cannot be located. + """ admin_token = _get_keycloak_admin_token() # get client uuid @@ -108,7 +142,7 @@ def delete_keycloak_client(analysis_id: str) -> None: try: uuid = response.json()[0]['id'] except (KeyError, IndexError) as e: - print(f"Error: Keycloak client not found\n{e}") + logger.error(f"Failed to retrieve keycloak client: {repr(e)}") return url_delete_client = f"{_KEYCLOAK_URL}/admin/realms/{_KEYCLOAK_REALM}/clients/{uuid}" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..bee8283 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,305 @@ +""" +Shared test fixtures for node-pod-orchestration. + +IMPORTANT: mock_env_vars is session-scoped and autouse. It sets all required +environment variables BEFORE any src modules are imported, which is critical +because oauth.py and token.py read env vars at module level. + +All src imports are deferred (inside fixture bodies) to ensure env vars are set first. +""" + +import json +import os +from dataclasses import dataclass +from unittest.mock import MagicMock, patch + +import pytest + +# ─── Test Environment Variables ────────────────────────────────────────────── + +_TEST_ENV_VARS = { + "POSTGRES_HOST": "localhost", + "POSTGRES_USER": "test_user", + "POSTGRES_PASSWORD": "test_password", + "POSTGRES_DB": "test_db", + "KEYCLOAK_URL": "http://localhost:8080", + "KEYCLOAK_REALM": "flame", + "RESULT_CLIENT_ID": "test_result_client", + "RESULT_CLIENT_SECRET": "test_result_secret", + "HUB_CLIENT_ID": "test_hub_client", + "HUB_CLIENT_SECRET": "test_hub_secret", + "HUB_URL_CORE": "http://localhost:3000", + "HUB_URL_AUTH": "http://localhost:3001", + "HARBOR_URL": "http://harbor.test", + "HARBOR_USER": "harbor_user", + "HARBOR_PW": "harbor_password", + "NODE_NAME": "test-node", +} + + +# ─── Fixture 1: mock_env_vars ─────────────────────────────────────────────── + +@pytest.fixture(scope="session", autouse=True) +def mock_env_vars(): + """Set all required env vars for the entire test session. + + Uses os.environ directly because monkeypatch is function-scoped only. + Must run before any src module import triggers oauth.py / token.py + module-level env var reads. + """ + original = {} + for key, value in _TEST_ENV_VARS.items(): + original[key] = os.environ.get(key) + os.environ[key] = value + + yield + + for key, orig_value in original.items(): + if orig_value is None: + os.environ.pop(key, None) + else: + os.environ[key] = orig_value + + +# ─── Fixture 3: sample_analysis_db (defined before mock_database) ─────────── + +@pytest.fixture +def sample_analysis_db(): + """Factory fixture: returns a callable that creates AnalysisDB mocks. + + Usage: + def test_something(sample_analysis_db): + default = sample_analysis_db() + custom = sample_analysis_db(status="failed", restart_counter=5) + """ + from src.resources.database.db_models import AnalysisDB + + def _factory(**kwargs): + defaults = { + "id": 1, + "deployment_name": "analysis-analysis_id-0", + "analysis_id": "analysis_id", + "project_id": "project_id", + "registry_url": "harbor.privateaim", + "image_url": "harbor.privateaim/node_id/analysis_id", + "registry_user": "robot_user", + "registry_password": "default_pw", + "status": "started", + "log": None, + "pod_ids": json.dumps(["pod-1"]), + "namespace": "default", + "kong_token": "default_kong_token", + "restart_counter": 0, + "progress": 0, + "time_created": 1700000000.0, + "time_updated": None, + } + defaults.update(kwargs) + mock = MagicMock(spec=AnalysisDB) + for attr, value in defaults.items(): + setattr(mock, attr, value) + return mock + + return _factory + + +# ─── Fixture 2: mock_database ─────────────────────────────────────────────── + +@pytest.fixture +def mock_database(sample_analysis_db): + """MagicMock(spec=Database) with sensible default return values.""" + from src.resources.database.entity import Database + + mock_db = MagicMock(spec=Database) + default_analysis = sample_analysis_db() + + mock_db.get_deployment.return_value = default_analysis + mock_db.get_latest_deployment.return_value = default_analysis + mock_db.get_deployments.return_value = [default_analysis] + mock_db.create_analysis.return_value = default_analysis + mock_db.update_analysis.return_value = [default_analysis] + mock_db.update_deployment.return_value = default_analysis + mock_db.get_analysis_ids.return_value = ["analysis_id"] + mock_db.get_deployment_ids.return_value = ["analysis-analysis_id-0"] + mock_db.get_deployment_pod_ids.return_value = ["pod-1"] + mock_db.get_analysis_pod_ids.return_value = [["pod-1"]] + mock_db.get_analysis_log.return_value = "" + mock_db.get_analysis_progress.return_value = 0 + mock_db.analysis_is_running.return_value = True + mock_db.progress_valid.return_value = True + mock_db.extract_analysis_body.return_value = { + "analysis_id": "analysis_id", + "project_id": "project_id", + "registry_url": "harbor.privateaim", + "image_url": "harbor.privateaim/node_id/analysis_id", + "registry_user": "robot_user", + "registry_password": "default_pw", + "namespace": "default", + "kong_token": "default_kong_token", + "restart_counter": 0, + "progress": 0, + } + + return mock_db + + +# ─── Fixture 4: sample_create_analysis ────────────────────────────────────── + +@pytest.fixture +def sample_create_analysis(): + """CreateAnalysis Pydantic model with all test defaults.""" + from src.resources.analysis.entity import CreateAnalysis + + return CreateAnalysis() + + +# ─── Fixture 5: mock_hub_client ───────────────────────────────────────────── + +@pytest.fixture +def mock_hub_client(): + """MagicMock for flame_hub.CoreClient with sensible defaults.""" + import flame_hub + + mock_client = MagicMock(spec=flame_hub.CoreClient) + + mock_node = MagicMock() + mock_node.id = "test-node-id" + mock_client.find_nodes.return_value = [mock_node] + + mock_analysis_node = MagicMock() + mock_analysis_node.id = "test-node-analysis-id" + mock_analysis_node.execution_status = "started" + mock_analysis_node.node_id = "test-node-id" + mock_client.find_analysis_nodes.return_value = [mock_analysis_node] + + mock_client.update_analysis_node.return_value = None + + return mock_client + + +# ─── Fixture 6: mock_k8s_clients ──────────────────────────────────────────── + +@dataclass +class K8sMocks: + """Named container for mocked Kubernetes API clients.""" + + core_v1: MagicMock + apps_v1: MagicMock + networking_v1: MagicMock + batch_v1: MagicMock + load_config: MagicMock + + +@pytest.fixture +def mock_k8s_clients(): + """Patch all 4 K8s API client classes and load_incluster_config. + + Usage: + def test_something(mock_k8s_clients): + mock_k8s_clients.core_v1.list_namespaced_pod.return_value = ... + """ + mock_core = MagicMock() + mock_apps = MagicMock() + mock_net = MagicMock() + mock_batch = MagicMock() + + with ( + patch("kubernetes.client.CoreV1Api", return_value=mock_core), + patch("kubernetes.client.AppsV1Api", return_value=mock_apps), + patch("kubernetes.client.NetworkingV1Api", return_value=mock_net), + patch("kubernetes.client.BatchV1Api", return_value=mock_batch), + patch("kubernetes.config.load_incluster_config") as mock_load, + ): + yield K8sMocks( + core_v1=mock_core, + apps_v1=mock_apps, + networking_v1=mock_net, + batch_v1=mock_batch, + load_config=mock_load, + ) + + +# ─── Fixture 7: api_test_client ───────────────────────────────────────────── + +@pytest.fixture +def api_test_client(mock_database, mock_hub_client, mock_k8s_clients): + """Capture FastAPI app from PodOrchestrationAPI and return TestClient. + + Patches uvicorn.run to intercept the app argument (since + PodOrchestrationAPI calls uvicorn.run at the end of __init__). + Bypasses OAuth via FastAPI dependency_overrides. + """ + from starlette.testclient import TestClient + from src.api.oauth import valid_access_token + + captured_app = None + + def fake_uvicorn_run(app, **kwargs): + nonlocal captured_app + captured_app = app + + with ( + patch("src.api.api.uvicorn.run", side_effect=fake_uvicorn_run), + patch( + "src.api.api.extract_hub_envs", + return_value=( + "test_hub_client", + "test_hub_secret", + "http://localhost:3000", + "http://localhost:3001", + False, + None, + None, + ), + ), + patch( + "src.api.api.init_hub_client_with_client", + return_value=mock_hub_client, + ), + patch( + "src.api.api.get_node_id_by_client", + return_value="test-node-id", + ), + ): + from src.api.api import PodOrchestrationAPI + + PodOrchestrationAPI(database=mock_database, namespace="default") + + assert captured_app is not None, "Failed to capture FastAPI app from uvicorn.run" + + captured_app.dependency_overrides[valid_access_token] = lambda: { + "sub": "test-user", + "preferred_username": "tester", + } + + # starlette 0.36.x / httpx 0.28.x incompatibility: starlette passes `app=` to + # httpx.Client, but httpx 0.28 removed that parameter. ASGITransport is async-only. + # Use a thin sync wrapper that drives AsyncClient via anyio.run(). + import anyio + import httpx + + class SyncASGIClient: + """Sync test client that drives httpx.AsyncClient with ASGITransport via anyio.""" + + def __init__(self, asgi_app, base_url="http://testserver"): + self.app = asgi_app + self._base_url = base_url + + def _request(self, method: str, url: str, **kwargs): + async def _do(): + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=self.app), + base_url=self._base_url, + ) as client: + return await getattr(client, method)(url, **kwargs) + + return anyio.run(_do) + + def get(self, url, **kw): return self._request("get", url, **kw) + def post(self, url, **kw): return self._request("post", url, **kw) + def put(self, url, **kw): return self._request("put", url, **kw) + def delete(self, url, **kw): return self._request("delete", url, **kw) + + yield SyncASGIClient(captured_app) + + captured_app.dependency_overrides.clear() \ No newline at end of file diff --git a/tests/test_api_api.py b/tests/test_api_api.py new file mode 100644 index 0000000..417d8b3 --- /dev/null +++ b/tests/test_api_api.py @@ -0,0 +1,310 @@ +"""Tests for src/api/api.py — all 16 endpoints via TestClient. + +Uses the api_test_client fixture from conftest.py, which: + - patches uvicorn.run to capture the FastAPI app + - overrides valid_access_token to skip JWT validation + - patches hub client init to avoid real network calls +""" + +from unittest.mock import MagicMock, patch + +import pytest + +from src.status.constants import AnalysisStatus + + +# ─── TestHealthEndpoint ─────────────────────────────────────────────────────── + +class TestHealthEndpoint: + def test_healthz_returns_ok(self, api_test_client): + response = api_test_client.get("/po/healthz") + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + def test_healthz_no_auth_required(self, api_test_client): + """healthz route has no auth dependency — it works even without a token.""" + import anyio + import httpx + + app = api_test_client.app + overrides_backup = dict(app.dependency_overrides) + app.dependency_overrides.clear() + + async def _get(): + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), base_url="http://testserver" + ) as client: + return await client.get("/po/healthz") + + response = anyio.run(_get) + assert response.status_code == 200 + + app.dependency_overrides.update(overrides_backup) + + +# ─── TestUnauthenticated ────────────────────────────────────────────────────── + +class TestUnauthenticated: + def test_unauthenticated_create_returns_4xx(self, api_test_client): + """Without override, OAuth dependency raises 401/403.""" + import anyio + import httpx + + app = api_test_client.app + overrides_backup = dict(app.dependency_overrides) + app.dependency_overrides.clear() + + async def _post(): + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + base_url="http://testserver", + follow_redirects=True, + ) as client: + return await client.post("/po/", json={}) + + response = anyio.run(_post) + assert response.status_code in (401, 403, 422) + + app.dependency_overrides.update(overrides_backup) + + +# ─── TestCreateAnalysis ─────────────────────────────────────────────────────── + +class TestCreateAnalysis: + def test_create_returns_starting_status(self, api_test_client): + with patch("src.api.api.create_analysis", return_value={"analysis_id": AnalysisStatus.STARTING.value}) as mock_create: + response = api_test_client.post("/po/", json={ + "analysis_id": "analysis_id", + "project_id": "project_id", + "registry_url": "harbor.test", + "image_url": "harbor.test/img", + "registry_user": "user", + "registry_password": "pw", + "kong_token": "token", + }) + + assert response.status_code == 200 + data = response.json() + assert "analysis_id" in data + assert data["analysis_id"] == AnalysisStatus.STARTING.value + + +# ─── TestHistoryEndpoints ───────────────────────────────────────────────────── + +class TestHistoryEndpoints: + def test_retrieve_all_history(self, api_test_client): + fake_result = {"analysis": {"analysis_id": []}, "nginx": {"analysis_id": []}} + with patch("src.api.api.retrieve_history", return_value=fake_result) as mock_fn: + response = api_test_client.get("/po/history") + assert response.status_code == 200 + mock_fn.assert_called_once_with("all", mock_fn.call_args[0][1]) + + def test_retrieve_all_history_500_on_exception(self, api_test_client): + with patch("src.api.api.retrieve_history", side_effect=RuntimeError("db error")): + response = api_test_client.get("/po/history") + assert response.status_code == 500 + + def test_retrieve_history_by_id(self, api_test_client): + fake_result = {"analysis": {"analysis_id": []}, "nginx": {"analysis_id": []}} + with patch("src.api.api.retrieve_history", return_value=fake_result) as mock_fn: + response = api_test_client.get("/po/history/analysis_id") + assert response.status_code == 200 + mock_fn.assert_called_once_with("analysis_id", mock_fn.call_args[0][1]) + + def test_retrieve_history_by_id_500_on_exception(self, api_test_client): + with patch("src.api.api.retrieve_history", side_effect=RuntimeError("db error")): + response = api_test_client.get("/po/history/analysis_id") + assert response.status_code == 500 + + +# ─── TestLogsEndpoints ──────────────────────────────────────────────────────── + +class TestLogsEndpoints: + def test_retrieve_all_logs(self, api_test_client): + fake_result = {"analysis_id": {"analysis": [], "nginx": []}} + with patch("src.api.api.retrieve_logs", return_value=fake_result) as mock_fn: + response = api_test_client.get("/po/logs") + assert response.status_code == 200 + mock_fn.assert_called_once_with("all", mock_fn.call_args[0][1]) + + def test_retrieve_all_logs_500_on_exception(self, api_test_client): + with patch("src.api.api.retrieve_logs", side_effect=RuntimeError("err")): + response = api_test_client.get("/po/logs") + assert response.status_code == 500 + + def test_retrieve_logs_by_id(self, api_test_client): + fake_result = {"analysis_id": {"analysis": [], "nginx": []}} + with patch("src.api.api.retrieve_logs", return_value=fake_result) as mock_fn: + response = api_test_client.get("/po/logs/analysis_id") + assert response.status_code == 200 + mock_fn.assert_called_once_with("analysis_id", mock_fn.call_args[0][1]) + + def test_retrieve_logs_by_id_500_on_exception(self, api_test_client): + with patch("src.api.api.retrieve_logs", side_effect=RuntimeError("err")): + response = api_test_client.get("/po/logs/analysis_id") + assert response.status_code == 500 + + +# ─── TestStatusEndpoints ────────────────────────────────────────────────────── + +class TestStatusEndpoints: + def test_get_all_status(self, api_test_client): + fake_result = {"analysis_id": {"status": "started", "progress": 0}} + with patch("src.api.api.get_status_and_progress", return_value=fake_result) as mock_fn: + response = api_test_client.get("/po/status") + assert response.status_code == 200 + mock_fn.assert_called_once_with("all", mock_fn.call_args[0][1]) + + def test_get_all_status_500_on_exception(self, api_test_client): + with patch("src.api.api.get_status_and_progress", side_effect=RuntimeError("err")): + response = api_test_client.get("/po/status") + assert response.status_code == 500 + + def test_get_status_by_id(self, api_test_client): + fake_result = {"analysis_id": {"status": "started", "progress": 0}} + with patch("src.api.api.get_status_and_progress", return_value=fake_result) as mock_fn: + response = api_test_client.get("/po/status/analysis_id") + assert response.status_code == 200 + mock_fn.assert_called_once_with("analysis_id", mock_fn.call_args[0][1]) + + def test_get_status_by_id_500_on_exception(self, api_test_client): + with patch("src.api.api.get_status_and_progress", side_effect=RuntimeError("err")): + response = api_test_client.get("/po/status/analysis_id") + assert response.status_code == 500 + + +# ─── TestPodsEndpoints ──────────────────────────────────────────────────────── + +class TestPodsEndpoints: + def test_get_all_pods(self, api_test_client): + fake_result = {"analysis_id": ["pod-1"]} + with patch("src.api.api.get_pods", return_value=fake_result) as mock_fn: + response = api_test_client.get("/po/pods") + assert response.status_code == 200 + mock_fn.assert_called_once_with("all", mock_fn.call_args[0][1]) + + def test_get_all_pods_500_on_exception(self, api_test_client): + with patch("src.api.api.get_pods", side_effect=RuntimeError("err")): + response = api_test_client.get("/po/pods") + assert response.status_code == 500 + + def test_get_pods_by_id(self, api_test_client): + fake_result = {"analysis_id": ["pod-1"]} + with patch("src.api.api.get_pods", return_value=fake_result) as mock_fn: + response = api_test_client.get("/po/pods/analysis_id") + assert response.status_code == 200 + mock_fn.assert_called_once_with("analysis_id", mock_fn.call_args[0][1]) + + def test_get_pods_by_id_500_on_exception(self, api_test_client): + with patch("src.api.api.get_pods", side_effect=RuntimeError("err")): + response = api_test_client.get("/po/pods/analysis_id") + assert response.status_code == 500 + + +# ─── TestStopEndpoints ──────────────────────────────────────────────────────── + +class TestStopEndpoints: + def test_stop_all(self, api_test_client, mock_database): + fake_stop_result = {"analysis_id": "stopped"} + with ( + patch("src.api.api.stop_analysis", return_value=fake_stop_result) as mock_stop, + patch("src.api.api.stream_logs") as mock_stream, + ): + response = api_test_client.put("/po/stop") + assert response.status_code == 200 + mock_stop.assert_called_once_with("all", mock_database) + # stream_logs called once per analysis_id returned by get_analysis_ids + assert mock_stream.call_count == len(mock_database.get_analysis_ids()) + + def test_stop_all_500_on_exception(self, api_test_client): + with patch("src.api.api.stop_analysis", side_effect=RuntimeError("err")): + response = api_test_client.put("/po/stop") + assert response.status_code == 500 + + def test_stop_by_id(self, api_test_client, mock_database): + fake_stop_result = {"analysis_id": "stopped"} + with ( + patch("src.api.api.stop_analysis", return_value=fake_stop_result) as mock_stop, + patch("src.api.api.stream_logs") as mock_stream, + ): + response = api_test_client.put("/po/stop/analysis_id") + assert response.status_code == 200 + mock_stop.assert_called_once_with("analysis_id", mock_database) + mock_stream.assert_called_once() + + def test_stop_by_id_500_on_exception(self, api_test_client): + with patch("src.api.api.stop_analysis", side_effect=RuntimeError("err")): + response = api_test_client.put("/po/stop/analysis_id") + assert response.status_code == 500 + + +# ─── TestDeleteEndpoints ────────────────────────────────────────────────────── + +class TestDeleteEndpoints: + def test_delete_all(self, api_test_client, mock_database): + fake_result = {"analysis_id": "stopped"} + with patch("src.api.api.delete_analysis", return_value=fake_result) as mock_fn: + response = api_test_client.delete("/po/delete") + assert response.status_code == 200 + mock_fn.assert_called_once_with("all", mock_database) + + def test_delete_all_500_on_exception(self, api_test_client): + with patch("src.api.api.delete_analysis", side_effect=RuntimeError("err")): + response = api_test_client.delete("/po/delete") + assert response.status_code == 500 + + def test_delete_by_id(self, api_test_client, mock_database): + fake_result = {"analysis_id": "stopped"} + with patch("src.api.api.delete_analysis", return_value=fake_result) as mock_fn: + response = api_test_client.delete("/po/delete/analysis_id") + assert response.status_code == 200 + mock_fn.assert_called_once_with("analysis_id", mock_database) + + def test_delete_by_id_500_on_exception(self, api_test_client): + with patch("src.api.api.delete_analysis", side_effect=RuntimeError("err")): + response = api_test_client.delete("/po/delete/analysis_id") + assert response.status_code == 500 + + +# ─── TestCleanupEndpoint ────────────────────────────────────────────────────── + +class TestCleanupEndpoint: + def test_cleanup(self, api_test_client, mock_database): + fake_result = {"analyzes": "Deleted 1 analysis deployments"} + with patch("src.api.api.cleanup", return_value=fake_result) as mock_fn: + response = api_test_client.delete("/po/cleanup/analyzes") + assert response.status_code == 200 + mock_fn.assert_called_once_with("analyzes", mock_database, "default") + + def test_cleanup_500_on_exception(self, api_test_client): + with patch("src.api.api.cleanup", side_effect=RuntimeError("err")): + response = api_test_client.delete("/po/cleanup/analyzes") + assert response.status_code == 500 + + +# ─── TestStreamLogsEndpoint ─────────────────────────────────────────────────── + +class TestStreamLogsEndpoint: + def test_stream_logs(self, api_test_client): + fake_result = {"status": "ok"} + with patch("src.api.api.stream_logs", return_value=fake_result) as mock_fn: + response = api_test_client.post("/po/stream_logs", json={ + "analysis_id": "analysis_id", + "log": "some log line", + "log_type": "info", + "status": "executing", + "progress": 50, + }) + assert response.status_code == 200 + mock_fn.assert_called_once() + + def test_stream_logs_500_on_exception(self, api_test_client): + with patch("src.api.api.stream_logs", side_effect=RuntimeError("err")): + response = api_test_client.post("/po/stream_logs", json={ + "analysis_id": "analysis_id", + "log": "log", + "log_type": "info", + "status": "executing", + "progress": 0, + }) + assert response.status_code == 500 \ No newline at end of file diff --git a/tests/test_api_oauth.py b/tests/test_api_oauth.py new file mode 100644 index 0000000..8303fe7 --- /dev/null +++ b/tests/test_api_oauth.py @@ -0,0 +1,48 @@ +"""Tests for src/api/oauth.py. + +Drives async functions via anyio.run() (no pytest-asyncio required). +Patches module-level env reads (oauth.py reads KEYCLOAK_URL at import time). +""" + +import anyio +import pytest +from unittest.mock import MagicMock, patch +from fastapi import HTTPException + + +# ─── TestValidAccessToken ───────────────────────────────────────────────────── + +class TestValidAccessToken: + def test_valid_token_returns_decoded_payload(self): + from src.api.oauth import valid_access_token + + fake_token = "valid.jwt.token" + fake_payload = {"sub": "user-id", "preferred_username": "testuser"} + + mock_signing_key = MagicMock() + mock_signing_key.key = "fake-key" + + mock_jwks_client = MagicMock() + mock_jwks_client.get_signing_key_from_jwt.return_value = mock_signing_key + + with ( + patch("src.api.oauth.PyJWKClient", return_value=mock_jwks_client), + patch("src.api.oauth.jwt.decode", return_value=fake_payload), + ): + result = valid_access_token(fake_token) + + assert result == fake_payload + + def test_invalid_token_raises_401(self): + import jwt as jwt_lib + from src.api.oauth import valid_access_token + + mock_jwks_client = MagicMock() + mock_jwks_client.get_signing_key_from_jwt.side_effect = jwt_lib.exceptions.InvalidTokenError("bad token") + + with patch("src.api.oauth.PyJWKClient", return_value=mock_jwks_client): + with pytest.raises(HTTPException) as exc_info: + valid_access_token("bad.token.here") + + assert exc_info.value.status_code == 401 + assert "Not authenticated" in exc_info.value.detail \ No newline at end of file diff --git a/tests/test_database_entity.py b/tests/test_database_entity.py new file mode 100644 index 0000000..e722eef --- /dev/null +++ b/tests/test_database_entity.py @@ -0,0 +1,355 @@ +"""Tests for src/resources/database/entity.py — SQLite in-memory backend.""" + +import json +import time +from unittest.mock import patch + +import pytest + +from src.resources.database.db_models import Base + + +# ─── Fixture ───────────────────────────────────────────────────────────────── + + +@pytest.fixture +def db(): + """Database instance backed by SQLite in-memory.""" + from sqlalchemy import create_engine as real_create_engine + + sqlite_engine = real_create_engine( + "sqlite:///:memory:", connect_args={"check_same_thread": False} + ) + + with patch("src.resources.database.entity.create_engine", return_value=sqlite_engine): + from src.resources.database.entity import Database + + database = Database() + + yield database + + Base.metadata.drop_all(bind=sqlite_engine) + + +# ─── Helpers ───────────────────────────────────────────────────────────────── + + +def _insert(db, analysis_id="a1", deployment_name="analysis-a1-0", **kwargs): + """Insert a record with sensible defaults.""" + defaults = dict( + analysis_id=analysis_id, + deployment_name=deployment_name, + project_id="proj1", + pod_ids=["pod-1"], + status="started", + log=None, + registry_url="harbor.test", + image_url="harbor.test/img", + registry_user="user", + registry_password="pw", + kong_token="token", + restart_counter=0, + progress=0, + ) + defaults.update(kwargs) + return db.create_analysis(**defaults) + + +# ─── create_analysis ───────────────────────────────────────────────────────── + + +class TestCreateAnalysis: + def test_returns_record_with_correct_fields(self, db): + record = _insert(db) + assert record.analysis_id == "a1" + assert record.deployment_name == "analysis-a1-0" + assert record.project_id == "proj1" + + def test_sets_time_created(self, db): + before = time.time() + record = _insert(db) + after = time.time() + assert before <= record.time_created <= after + + def test_serializes_pod_ids_as_json_string(self, db): + record = _insert(db, pod_ids=["pod-1", "pod-2"]) + assert record.pod_ids == json.dumps(["pod-1", "pod-2"]) + + def test_respects_custom_namespace(self, db): + record = _insert(db, namespace="custom-ns") + assert record.namespace == "custom-ns" + + def test_default_namespace_is_default(self, db): + record = _insert(db) + assert record.namespace == "default" + + +# ─── get_deployment / get_latest_deployment / get_deployments ──────────────── + + +class TestGetDeployment: + def test_get_deployment_found(self, db): + _insert(db) + record = db.get_deployment("analysis-a1-0") + assert record is not None + assert record.deployment_name == "analysis-a1-0" + + def test_get_deployment_not_found(self, db): + assert db.get_deployment("nonexistent") is None + + def test_get_latest_deployment_found(self, db): + _insert(db) + record = db.get_latest_deployment("a1") + assert record is not None + assert record.analysis_id == "a1" + + def test_get_latest_deployment_not_found(self, db): + assert db.get_latest_deployment("nonexistent") is None + + def test_get_latest_deployment_returns_last(self, db): + _insert(db, deployment_name="analysis-a1-0") + _insert(db, deployment_name="analysis-a1-1") + record = db.get_latest_deployment("a1") + assert record.deployment_name == "analysis-a1-1" + + def test_get_deployments_returns_all(self, db): + _insert(db, deployment_name="analysis-a1-0") + _insert(db, deployment_name="analysis-a1-1") + records = db.get_deployments("a1") + assert len(records) == 2 + + def test_get_deployments_empty(self, db): + assert db.get_deployments("nonexistent") == [] + + +# ─── analysis_is_running ───────────────────────────────────────────────────── + + +class TestAnalysisIsRunning: + def test_started_is_running(self, db): + _insert(db, status="started") + assert db.analysis_is_running("a1") is True + + def test_executed_is_not_running(self, db): + _insert(db, status="executed") + assert db.analysis_is_running("a1") is False + + def test_stopped_is_not_running(self, db): + _insert(db, status="stopped") + assert db.analysis_is_running("a1") is False + + def test_failed_is_not_running(self, db): + _insert(db, status="failed") + assert db.analysis_is_running("a1") is False + + def test_no_deployment_is_not_running(self, db): + assert db.analysis_is_running("nonexistent") is False + + +# ─── update_analysis / update_deployment ───────────────────────────────────── + + +class TestUpdate: + def test_update_analysis_updates_all_deployments(self, db): + _insert(db, deployment_name="analysis-a1-0") + _insert(db, deployment_name="analysis-a1-1") + db.update_analysis("a1", status="executing") + for d in db.get_deployments("a1"): + assert d.status == "executing" + + def test_update_deployment_updates_only_one(self, db): + _insert(db, deployment_name="analysis-a1-0") + _insert(db, deployment_name="analysis-a1-1") + db.update_deployment("analysis-a1-0", status="executing") + assert db.get_deployment("analysis-a1-0").status == "executing" + assert db.get_deployment("analysis-a1-1").status == "started" + + def test_update_analysis_status(self, db): + _insert(db) + db.update_analysis_status("a1", "executing") + assert db.get_latest_deployment("a1").status == "executing" + + def test_update_deployment_status(self, db): + _insert(db) + db.update_deployment_status("analysis-a1-0", "executing") + assert db.get_deployment("analysis-a1-0").status == "executing" + + def test_update_analysis_progress(self, db): + _insert(db) + db.update_analysis_progress("a1", 50) + assert db.get_latest_deployment("a1").progress == 50 + + +# ─── delete_analysis / delete_deployment ───────────────────────────────────── + + +class TestDelete: + def test_delete_analysis_removes_all(self, db): + _insert(db, deployment_name="analysis-a1-0") + _insert(db, deployment_name="analysis-a1-1") + db.delete_analysis("a1") + assert db.get_deployments("a1") == [] + + def test_delete_deployment_removes_one(self, db): + _insert(db, deployment_name="analysis-a1-0") + _insert(db, deployment_name="analysis-a1-1") + db.delete_deployment("analysis-a1-0") + assert db.get_deployment("analysis-a1-0") is None + assert db.get_deployment("analysis-a1-1") is not None + + def test_delete_deployment_not_found_no_error(self, db): + db.delete_deployment("nonexistent") # must not raise + + +# ─── get_analysis_ids / get_deployment_ids / pod_ids ───────────────────────── + + +class TestIds: + def test_get_analysis_ids(self, db): + _insert(db, analysis_id="a1", deployment_name="analysis-a1-0") + _insert(db, analysis_id="a2", deployment_name="analysis-a2-0") + assert set(db.get_analysis_ids()) == {"a1", "a2"} + + def test_get_deployment_ids(self, db): + _insert(db, deployment_name="analysis-a1-0") + _insert(db, deployment_name="analysis-a1-1") + assert set(db.get_deployment_ids()) == {"analysis-a1-0", "analysis-a1-1"} + + def test_get_deployment_pod_ids(self, db): + _insert(db, pod_ids=["pod-1", "pod-2"]) + result = db.get_deployment_pod_ids("analysis-a1-0") + assert result == json.dumps(["pod-1", "pod-2"]) + + def test_get_analysis_pod_ids_returns_list_of_pod_lists(self, db): + _insert(db, deployment_name="analysis-a1-0", pod_ids=["pod-1"]) + _insert(db, deployment_name="analysis-a1-1", pod_ids=["pod-2"]) + result = db.get_analysis_pod_ids("a1") + assert len(result) == 2 + + +# ─── get_analysis_log / update_analysis_log ────────────────────────────────── + + +class TestLog: + def test_get_log_returns_empty_string_when_null(self, db): + _insert(db) + assert db.get_analysis_log("a1") == "" + + def test_get_log_returns_content(self, db): + _insert(db) + db.update_analysis("a1", log="hello") + assert db.get_analysis_log("a1") == "hello" + + def test_get_log_not_found_returns_empty_string(self, db): + assert db.get_analysis_log("nonexistent") == "" + + def test_update_log_sets_first_entry(self, db): + _insert(db) + db.update_analysis_log("a1", "first message") + assert db.get_analysis_log("a1") == "first message" + + def test_update_log_appends_with_newline(self, db): + _insert(db) + db.update_analysis_log("a1", "first") + db.update_analysis_log("a1", "second") + assert db.get_analysis_log("a1") == "first\nsecond" + + +# ─── get_analysis_progress / progress_valid ────────────────────────────────── + + +class TestProgress: + def test_get_progress_returns_value(self, db): + _insert(db, progress=25) + assert db.get_analysis_progress("a1") == 25 + + def test_get_progress_not_found_returns_none(self, db): + assert db.get_analysis_progress("nonexistent") is None + + def test_progress_valid_within_range(self, db): + _insert(db, progress=10) + assert db.progress_valid("a1", 50) is True + + def test_progress_valid_equal_to_current_is_false(self, db): + _insert(db, progress=50) + assert db.progress_valid("a1", 50) is False + + def test_progress_valid_below_current_is_false(self, db): + _insert(db, progress=50) + assert db.progress_valid("a1", 30) is False + + def test_progress_valid_over_100_is_false(self, db): + _insert(db, progress=50) + assert db.progress_valid("a1", 101) is False + + def test_progress_valid_exactly_100_is_valid(self, db): + _insert(db, progress=50) + assert db.progress_valid("a1", 100) is True + + def test_progress_valid_no_deployment_is_false(self, db): + assert db.progress_valid("nonexistent", 50) is False + + +# ─── stop_analysis / extract_analysis_body ─────────────────────────────────── + + +class TestStopAndExtract: + def test_stop_analysis_sets_stopped_status(self, db): + _insert(db) + db.stop_analysis("a1") + assert db.get_latest_deployment("a1").status == "stopped" + + def test_extract_analysis_body_returns_expected_keys(self, db): + _insert(db) + body = db.extract_analysis_body("a1") + assert body is not None + for key in ( + "analysis_id", "project_id", "registry_url", "image_url", + "registry_user", "registry_password", "namespace", + "kong_token", "restart_counter", "progress", + ): + assert key in body + + def test_extract_analysis_body_resets_progress_to_zero(self, db): + _insert(db, progress=99) + body = db.extract_analysis_body("a1") + assert body["progress"] == 0 + + def test_extract_analysis_body_not_found_returns_none(self, db): + assert db.extract_analysis_body("nonexistent") is None + + +# ─── delete_old_deployments_from_db ────────────────────────────────────────── + + +class TestDeleteOldDeployments: + def test_keeps_only_latest_by_time_created(self, db): + _insert(db, deployment_name="analysis-a1-0") + db.update_deployment("analysis-a1-0", time_created=1000.0) + _insert(db, deployment_name="analysis-a1-1") + db.update_deployment("analysis-a1-1", time_created=2000.0) + + db.delete_old_deployments_from_db("a1") + + remaining = db.get_deployments("a1") + assert len(remaining) == 1 + assert remaining[0].deployment_name == "analysis-a1-1" + + def test_single_deployment_unchanged(self, db): + _insert(db) + db.delete_old_deployments_from_db("a1") + assert len(db.get_deployments("a1")) == 1 + + def test_no_deployments_no_error(self, db): + db.delete_old_deployments_from_db("nonexistent") # must not raise + + def test_three_deployments_keeps_newest(self, db): + for i, t in enumerate([1000.0, 2000.0, 3000.0]): + _insert(db, deployment_name=f"analysis-a1-{i}") + db.update_deployment(f"analysis-a1-{i}", time_created=t) + + db.delete_old_deployments_from_db("a1") + + remaining = db.get_deployments("a1") + assert len(remaining) == 1 + assert remaining[0].deployment_name == "analysis-a1-2" \ No newline at end of file diff --git a/tests/test_database_models.py b/tests/test_database_models.py new file mode 100644 index 0000000..731a139 --- /dev/null +++ b/tests/test_database_models.py @@ -0,0 +1,169 @@ +"""Tests for src/resources/database/db_models.py""" + +from sqlalchemy import JSON, Float, Integer, String + +from src.resources.database.db_models import AnalysisDB, ArchiveDB + +_SHARED_COLUMNS = [ + "id", + "deployment_name", + "analysis_id", + "project_id", + "registry_url", + "image_url", + "registry_user", + "registry_password", + "status", + "log", + "pod_ids", + "namespace", + "kong_token", + "restart_counter", + "progress", + "time_created", + "time_updated", +] + + +class TestAnalysisDB: + def test_table_name(self): + assert AnalysisDB.__tablename__ == "analysis" + + def test_all_columns_exist(self): + cols = {c.name for c in AnalysisDB.__table__.columns} + assert set(_SHARED_COLUMNS) == cols + + def test_id_is_primary_key(self): + col = AnalysisDB.__table__.c["id"] + assert col.primary_key + assert isinstance(col.type, Integer) + + def test_deployment_name_is_unique(self): + col = AnalysisDB.__table__.c["deployment_name"] + assert col.unique + assert isinstance(col.type, String) + + def test_analysis_id_is_indexed(self): + col = AnalysisDB.__table__.c["analysis_id"] + assert col.index + assert isinstance(col.type, String) + + def test_project_id_is_indexed(self): + col = AnalysisDB.__table__.c["project_id"] + assert col.index + assert isinstance(col.type, String) + + def test_pod_ids_is_json(self): + col = AnalysisDB.__table__.c["pod_ids"] + assert isinstance(col.type, JSON) + assert col.nullable + + def test_restart_counter_default(self): + col = AnalysisDB.__table__.c["restart_counter"] + assert isinstance(col.type, Integer) + assert col.default.arg == 0 + + def test_progress_default(self): + col = AnalysisDB.__table__.c["progress"] + assert isinstance(col.type, Integer) + assert col.default.arg == 0 + + def test_time_created_is_float(self): + col = AnalysisDB.__table__.c["time_created"] + assert isinstance(col.type, Float) + assert col.nullable + + def test_time_updated_is_float(self): + col = AnalysisDB.__table__.c["time_updated"] + assert isinstance(col.type, Float) + assert col.nullable + + def test_nullable_string_columns(self): + nullable_cols = [ + "registry_url", "image_url", "registry_user", "registry_password", + "status", "log", "namespace", "kong_token", + ] + for name in nullable_cols: + col = AnalysisDB.__table__.c[name] + assert col.nullable, f"{name} should be nullable" + assert isinstance(col.type, String), f"{name} should be String" + + +class TestArchiveDB: + def test_table_name(self): + assert ArchiveDB.__tablename__ == "archive" + + def test_all_columns_exist(self): + cols = {c.name for c in ArchiveDB.__table__.columns} + assert set(_SHARED_COLUMNS) == cols + + def test_id_is_primary_key(self): + col = ArchiveDB.__table__.c["id"] + assert col.primary_key + assert isinstance(col.type, Integer) + + def test_deployment_name_is_unique(self): + col = ArchiveDB.__table__.c["deployment_name"] + assert col.unique + assert isinstance(col.type, String) + + def test_analysis_id_is_indexed(self): + col = ArchiveDB.__table__.c["analysis_id"] + assert col.index + assert isinstance(col.type, String) + + def test_project_id_is_indexed(self): + col = ArchiveDB.__table__.c["project_id"] + assert col.index + assert isinstance(col.type, String) + + def test_pod_ids_is_json(self): + col = ArchiveDB.__table__.c["pod_ids"] + assert isinstance(col.type, JSON) + assert col.nullable + + def test_restart_counter_default(self): + col = ArchiveDB.__table__.c["restart_counter"] + assert isinstance(col.type, Integer) + assert col.default.arg == 0 + + def test_progress_default(self): + col = ArchiveDB.__table__.c["progress"] + assert isinstance(col.type, Integer) + assert col.default.arg == 0 + + def test_time_created_is_float(self): + col = ArchiveDB.__table__.c["time_created"] + assert isinstance(col.type, Float) + assert col.nullable + + def test_time_updated_is_float(self): + col = ArchiveDB.__table__.c["time_updated"] + assert isinstance(col.type, Float) + assert col.nullable + + def test_nullable_string_columns(self): + nullable_cols = [ + "registry_url", "image_url", "registry_user", "registry_password", + "status", "log", "namespace", "kong_token", + ] + for name in nullable_cols: + col = ArchiveDB.__table__.c[name] + assert col.nullable, f"{name} should be nullable" + assert isinstance(col.type, String), f"{name} should be String" + + +class TestSharedSchema: + def test_analysis_and_archive_have_same_columns(self): + analysis_cols = {c.name for c in AnalysisDB.__table__.columns} + archive_cols = {c.name for c in ArchiveDB.__table__.columns} + assert analysis_cols == archive_cols + + def test_analysis_and_archive_have_different_table_names(self): + assert AnalysisDB.__tablename__ != ArchiveDB.__tablename__ + + def test_both_inherit_from_base(self): + from src.resources.database.db_models import Base + + assert issubclass(AnalysisDB, Base) + assert issubclass(ArchiveDB, Base) \ No newline at end of file diff --git a/tests/test_k8s_kubernetes.py b/tests/test_k8s_kubernetes.py new file mode 100644 index 0000000..212087a --- /dev/null +++ b/tests/test_k8s_kubernetes.py @@ -0,0 +1,420 @@ +""" +Tests for src/k8s/kubernetes.py + +Covers: + - create_harbor_secret: success, first-call failure -> delete+retry, Conflict re-raises + - create_analysis_deployment: full chain (deployment + service + nginx + network policy) + - delete_deployment: all resources cleaned up, Not-Found exceptions handled silently + - get_analysis_logs: structure, pod_id filtering, ApiException returns [] + - get_pod_status: ready/waiting/terminated/no pods +""" + +import pytest +from unittest.mock import MagicMock, patch, call +from kubernetes.client.exceptions import ApiException + +from src.k8s.kubernetes import ( + create_harbor_secret, + create_analysis_deployment, + delete_deployment, + get_analysis_logs, + get_pod_status, +) + + +# ─── Helpers ───────────────────────────────────────────────────────────────── + +def _make_pod_item(name: str, pod_ip: str = "10.0.0.1") -> MagicMock: + """Build a minimal mock K8s pod item with metadata.name and status.pod_ip.""" + pod = MagicMock() + pod.metadata.name = name + pod.status.pod_ip = pod_ip + return pod + + +def _make_pod_list(*names: str, pod_ip: str = "10.0.0.1") -> MagicMock: + """Build a mock K8s pod list response.""" + result = MagicMock() + result.items = [_make_pod_item(n, pod_ip) for n in names] + return result + + +def _not_found() -> ApiException: + return ApiException(status=404, reason="Not Found") + + +# ─── create_harbor_secret ──────────────────────────────────────────────────── + +class TestCreateHarborSecret: + def test_success_creates_secret(self, mock_k8s_clients): + create_harbor_secret("harbor.test", "user", "password") + + mock_k8s_clients.core_v1.create_namespaced_secret.assert_called_once() + mock_k8s_clients.core_v1.delete_namespaced_secret.assert_not_called() + + def test_first_call_fails_triggers_delete_and_retry(self, mock_k8s_clients): + mock_k8s_clients.core_v1.create_namespaced_secret.side_effect = [ + ApiException(status=409, reason="AlreadyExists"), + None, # retry succeeds + ] + + create_harbor_secret("harbor.test", "user", "password", name="my-secret") + + mock_k8s_clients.core_v1.delete_namespaced_secret.assert_called_once_with( + name="my-secret", namespace="default" + ) + assert mock_k8s_clients.core_v1.create_namespaced_secret.call_count == 2 + + def test_conflict_on_retry_raises(self, mock_k8s_clients): + conflict_exc = ApiException(status=409, reason="Conflict") + mock_k8s_clients.core_v1.create_namespaced_secret.side_effect = [ + ApiException(status=409, reason="AlreadyExists"), + conflict_exc, + ] + + with pytest.raises(Exception, match="Conflict in harbor secret creation remains unresolved"): + create_harbor_secret("harbor.test", "user", "password") + + def test_non_conflict_on_retry_raises(self, mock_k8s_clients): + other_exc = ApiException(status=500, reason="InternalError") + mock_k8s_clients.core_v1.create_namespaced_secret.side_effect = [ + ApiException(status=409, reason="AlreadyExists"), + other_exc, + ] + + with pytest.raises(Exception, match="Unknown error during harbor secret creation"): + create_harbor_secret("harbor.test", "user", "password") + + def test_custom_name_and_namespace(self, mock_k8s_clients): + create_harbor_secret("harbor.test", "user", "password", name="custom-secret", namespace="flame-ns") + + _, call_kwargs = mock_k8s_clients.core_v1.create_namespaced_secret.call_args + assert call_kwargs["namespace"] == "flame-ns" + + +# ─── create_analysis_deployment ───────────────────────────────────────────── + +class TestCreateAnalysisDeployment: + """Tests for the full deployment chain. + + _create_nginx_config_map contains while-loops waiting for pod IPs and calls + find_k8s_resources 7 times, so we patch both aggressively. + """ + + _ENV = { + "PROJECT_ID": "project-123", + "ANALYSIS_ID": "analysis-456", + } + + @pytest.fixture(autouse=True) + def _patch_sleep(self): + with patch("src.k8s.kubernetes.time.sleep"): + yield + + @pytest.fixture + def _setup_pod_reads(self, mock_k8s_clients): + """Return mocks configured so all while-loops exit on first iteration.""" + mb_pod = MagicMock() + mb_pod.status.pod_ip = "10.0.0.1" + po_pod = MagicMock() + po_pod.status.pod_ip = "10.0.0.2" + # read_namespaced_pod: first call = message-broker, second = po + mock_k8s_clients.core_v1.read_namespaced_pod.side_effect = [mb_pod, po_pod] + + # list_namespaced_pod: used for analysis IP (config map) and _get_pods at end + analysis_pod = _make_pod_item("analysis-my-dep-0-pod", pod_ip="10.0.0.3") + pod_list = MagicMock() + pod_list.items = [analysis_pod] + mock_k8s_clients.core_v1.list_namespaced_pod.return_value = pod_list + + return mock_k8s_clients + + def _find_side_effects(self): + return [ + "message-broker-svc", # service/label/component=flame-message-broker + "message-broker-pod", # pod/label/component=flame-message-broker + "po-svc", # service/label/component=flame-po + "po-pod", # pod/label/component=flame-po + "hub-adapter-svc", # service/label/component=flame-hub-adapter + "kong-proxy", # service/label/app.kubernetes.io/name=kong + "storage-svc", # service/label/component=flame-storage-service + ] + + def test_creates_analysis_deployment(self, mock_k8s_clients, _setup_pod_reads): + with patch("src.k8s.kubernetes.find_k8s_resources", side_effect=self._find_side_effects()): + create_analysis_deployment("my-dep", "harbor.test/image:latest", env=self._ENV) + + mock_k8s_clients.apps_v1.create_namespaced_deployment.assert_called() + first_call_kwargs = mock_k8s_clients.apps_v1.create_namespaced_deployment.call_args_list[0][1] + assert first_call_kwargs["namespace"] == "default" + + def test_creates_nginx_deployment(self, mock_k8s_clients, _setup_pod_reads): + with patch("src.k8s.kubernetes.find_k8s_resources", side_effect=self._find_side_effects()): + create_analysis_deployment("my-dep", "harbor.test/image:latest", env=self._ENV) + + deployment_names = [ + c[1]["body"].metadata.name + for c in mock_k8s_clients.apps_v1.create_namespaced_deployment.call_args_list + ] + assert any("nginx" in name for name in deployment_names) + + def test_creates_services(self, mock_k8s_clients, _setup_pod_reads): + with patch("src.k8s.kubernetes.find_k8s_resources", side_effect=self._find_side_effects()): + create_analysis_deployment("my-dep", "harbor.test/image:latest", env=self._ENV) + + mock_k8s_clients.core_v1.create_namespaced_service.assert_called() + + def test_creates_config_map(self, mock_k8s_clients, _setup_pod_reads): + with patch("src.k8s.kubernetes.find_k8s_resources", side_effect=self._find_side_effects()): + create_analysis_deployment("my-dep", "harbor.test/image:latest", env=self._ENV) + + mock_k8s_clients.core_v1.create_namespaced_config_map.assert_called_once() + + def test_creates_network_policy(self, mock_k8s_clients, _setup_pod_reads): + with patch("src.k8s.kubernetes.find_k8s_resources", side_effect=self._find_side_effects()): + create_analysis_deployment("my-dep", "harbor.test/image:latest", env=self._ENV) + + mock_k8s_clients.networking_v1.create_namespaced_network_policy.assert_called_once() + + def test_returns_pod_names(self, mock_k8s_clients, _setup_pod_reads): + with patch("src.k8s.kubernetes.find_k8s_resources", side_effect=self._find_side_effects()): + result = create_analysis_deployment("my-dep", "harbor.test/image:latest", env=self._ENV) + + assert isinstance(result, list) + assert result == ["analysis-my-dep-0-pod"] + + def test_custom_namespace_propagated(self, mock_k8s_clients, _setup_pod_reads): + with patch("src.k8s.kubernetes.find_k8s_resources", side_effect=self._find_side_effects()): + create_analysis_deployment( + "my-dep", "harbor.test/image:latest", env=self._ENV, namespace="flame-ns" + ) + + all_namespaces = [ + c[1].get("namespace") or c[0][0] if c[0] else c[1].get("namespace") + for c in mock_k8s_clients.apps_v1.create_namespaced_deployment.call_args_list + ] + for ns in all_namespaces: + assert ns == "flame-ns" + + +# ─── delete_deployment ─────────────────────────────────────────────────────── + +class TestDeleteDeployment: + def test_deletes_analysis_and_nginx_deployments(self, mock_k8s_clients): + delete_deployment("analysis-123-0") + + calls = mock_k8s_clients.apps_v1.delete_namespaced_deployment.call_args_list + names = [c[1]["name"] for c in calls] + assert "analysis-123-0" in names + assert "nginx-analysis-123-0" in names + + def test_deletes_analysis_and_nginx_services(self, mock_k8s_clients): + delete_deployment("analysis-123-0") + + calls = mock_k8s_clients.core_v1.delete_namespaced_service.call_args_list + names = [c[1]["name"] for c in calls] + assert "analysis-123-0" in names + assert "nginx-analysis-123-0" in names + + def test_deletes_network_policy(self, mock_k8s_clients): + delete_deployment("analysis-123-0") + + mock_k8s_clients.networking_v1.delete_namespaced_network_policy.assert_called_once_with( + name="nginx-to-analysis-123-0-policy", namespace="default" + ) + + def test_deletes_config_map(self, mock_k8s_clients): + delete_deployment("analysis-123-0") + + mock_k8s_clients.core_v1.delete_namespaced_config_map.assert_called_once_with( + name="nginx-analysis-123-0-config", namespace="default" + ) + + def test_not_found_deployment_exception_is_silenced(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.delete_namespaced_deployment.side_effect = _not_found() + delete_deployment("analysis-123-0") # must not raise + + def test_not_found_network_policy_exception_is_silenced(self, mock_k8s_clients): + mock_k8s_clients.networking_v1.delete_namespaced_network_policy.side_effect = _not_found() + delete_deployment("analysis-123-0") # must not raise + + def test_not_found_config_map_exception_is_silenced(self, mock_k8s_clients): + mock_k8s_clients.core_v1.delete_namespaced_config_map.side_effect = _not_found() + delete_deployment("analysis-123-0") # must not raise + + def test_custom_namespace_forwarded(self, mock_k8s_clients): + delete_deployment("analysis-123-0", namespace="flame-ns") + + calls = mock_k8s_clients.apps_v1.delete_namespaced_deployment.call_args_list + assert all(c[1]["namespace"] == "flame-ns" for c in calls) + mock_k8s_clients.networking_v1.delete_namespaced_network_policy.assert_called_once_with( + name="nginx-to-analysis-123-0-policy", namespace="flame-ns" + ) + + +# ─── get_analysis_logs ─────────────────────────────────────────────────────── + +class TestGetAnalysisLogs: + def test_returns_analysis_and_nginx_structure(self, mock_k8s_clients, mock_database): + mock_database.get_deployment_pod_ids.return_value = ["pod-1"] + mock_k8s_clients.core_v1.list_namespaced_pod.return_value = _make_pod_list("pod-1") + mock_k8s_clients.core_v1.read_namespaced_pod_log.return_value = "some log line\n" + + result = get_analysis_logs({"analysis-123": "analysis-123-0"}, mock_database) + + assert "analysis" in result + assert "nginx" in result + assert "analysis-123" in result["analysis"] + assert "analysis-123" in result["nginx"] + + def test_pod_id_filter_limits_analysis_logs_to_known_pods(self, mock_k8s_clients, mock_database): + """In the analysis path, only pods in pod_ids have logs fetched. + The nginx path has no pod_ids filter and fetches all pods, so we use an + empty nginx pod list to isolate the assertion to the analysis path. + """ + mock_database.get_deployment_pod_ids.return_value = ["pod-1"] + analysis_pod_list = _make_pod_list("pod-1", "pod-2") + nginx_pod_list = _make_pod_list() # nginx returns nothing -> no extra reads + mock_k8s_clients.core_v1.list_namespaced_pod.side_effect = [ + analysis_pod_list, # analysis deployment lookup + nginx_pod_list, # nginx deployment lookup + ] + mock_k8s_clients.core_v1.read_namespaced_pod_log.return_value = "log\n" + + get_analysis_logs({"analysis-123": "analysis-123-0"}, mock_database) + + log_call_args = mock_k8s_clients.core_v1.read_namespaced_pod_log.call_args_list + pod_names_fetched = [c[0][0] for c in log_call_args] + assert "pod-1" in pod_names_fetched + assert "pod-2" not in pod_names_fetched + + def test_api_exception_returns_empty_list(self, mock_k8s_clients, mock_database): + mock_database.get_deployment_pod_ids.return_value = ["pod-1"] + mock_k8s_clients.core_v1.list_namespaced_pod.return_value = _make_pod_list("pod-1") + mock_k8s_clients.core_v1.read_namespaced_pod_log.side_effect = ApiException(status=500) + + result = get_analysis_logs({"analysis-123": "analysis-123-0"}, mock_database) + + assert result["analysis"]["analysis-123"] == [] + + def test_multiple_analyses(self, mock_k8s_clients, mock_database): + mock_database.get_deployment_pod_ids.return_value = [] + mock_k8s_clients.core_v1.list_namespaced_pod.return_value = _make_pod_list() + + result = get_analysis_logs( + {"analysis-1": "dep-1", "analysis-2": "dep-2"}, + mock_database, + ) + + assert set(result["analysis"].keys()) == {"analysis-1", "analysis-2"} + assert set(result["nginx"].keys()) == {"analysis-1", "analysis-2"} + + def test_logs_sanitised_removes_info_lines(self, mock_k8s_clients, mock_database): + """_get_logs strips lines starting with 'INFO:' and healthz GET lines.""" + mock_database.get_deployment_pod_ids.return_value = None + mock_k8s_clients.core_v1.list_namespaced_pod.return_value = _make_pod_list("pod-1") + mock_k8s_clients.core_v1.read_namespaced_pod_log.return_value = ( + "INFO: should be removed\n" + 'useful log line\n' + '"GET /healthz HTTP/1.0" 200 OK\n' + ) + + result = get_analysis_logs({"analysis-123": "analysis-123-0"}, mock_database) + + combined = "\n".join(result["analysis"]["analysis-123"]) + assert "INFO:" not in combined + assert "useful log line" in combined + assert "healthz" not in combined + + +# ─── get_pod_status ────────────────────────────────────────────────────────── + +class TestGetPodStatus: + def _make_container_status(self, ready: bool, waiting=None, terminated=None) -> MagicMock: + cs = MagicMock() + cs.ready = ready + cs.state.waiting = waiting + cs.state.terminated = terminated + return cs + + def _make_full_pod(self, name: str, container_status: MagicMock) -> MagicMock: + pod = MagicMock() + pod.metadata.name = name + pod.status.container_statuses = [container_status] + return pod + + def test_ready_pod_returns_empty_reason_and_message(self, mock_k8s_clients): + cs = self._make_container_status(ready=True) + pod = self._make_full_pod("pod-1", cs) + mock_k8s_clients.core_v1.list_namespaced_pod.return_value.items = [pod] + + result = get_pod_status("analysis-123-0") + + assert result == { + "pod-1": {"ready": True, "reason": "", "message": ""} + } + + def test_waiting_pod_captures_reason_and_message(self, mock_k8s_clients): + waiting = MagicMock() + waiting.reason = "ImagePullBackOff" + waiting.message = "Back-off pulling image" + cs = self._make_container_status(ready=False, waiting=waiting) + pod = self._make_full_pod("pod-1", cs) + mock_k8s_clients.core_v1.list_namespaced_pod.return_value.items = [pod] + + result = get_pod_status("analysis-123-0") + + assert result["pod-1"]["ready"] is False + assert result["pod-1"]["reason"] == "ImagePullBackOff" + assert result["pod-1"]["message"] == "Back-off pulling image" + + def test_terminated_pod_captures_reason_and_message(self, mock_k8s_clients): + terminated = MagicMock() + terminated.reason = "OOMKilled" + terminated.message = "Container ran out of memory" + cs = self._make_container_status(ready=False, waiting=None, terminated=terminated) + pod = self._make_full_pod("pod-1", cs) + mock_k8s_clients.core_v1.list_namespaced_pod.return_value.items = [pod] + + result = get_pod_status("analysis-123-0") + + assert result["pod-1"]["ready"] is False + assert result["pod-1"]["reason"] == "OOMKilled" + assert result["pod-1"]["message"] == "Container ran out of memory" + + def test_unknown_error_state_returns_unknown_error(self, mock_k8s_clients): + cs = self._make_container_status(ready=False, waiting=None, terminated=None) + pod = self._make_full_pod("pod-1", cs) + mock_k8s_clients.core_v1.list_namespaced_pod.return_value.items = [pod] + + result = get_pod_status("analysis-123-0") + + assert result["pod-1"]["reason"] == "UnknownError" + assert "unknown error state" in result["pod-1"]["message"] + + def test_no_pods_returns_none(self, mock_k8s_clients): + mock_k8s_clients.core_v1.list_namespaced_pod.return_value.items = [] + + result = get_pod_status("analysis-123-0") + + assert result is None + + def test_multiple_pods_all_in_result(self, mock_k8s_clients): + pods = [ + self._make_full_pod("pod-1", self._make_container_status(ready=True)), + self._make_full_pod("pod-2", self._make_container_status(ready=True)), + ] + mock_k8s_clients.core_v1.list_namespaced_pod.return_value.items = pods + + result = get_pod_status("analysis-123-0") + + assert set(result.keys()) == {"pod-1", "pod-2"} + + def test_custom_namespace_forwarded(self, mock_k8s_clients): + mock_k8s_clients.core_v1.list_namespaced_pod.return_value.items = [] + + get_pod_status("analysis-123-0", namespace="flame-ns") + + mock_k8s_clients.core_v1.list_namespaced_pod.assert_called_once_with( + namespace="flame-ns", label_selector="app=analysis-123-0" + ) \ No newline at end of file diff --git a/tests/test_k8s_utils.py b/tests/test_k8s_utils.py new file mode 100644 index 0000000..75a3cde --- /dev/null +++ b/tests/test_k8s_utils.py @@ -0,0 +1,287 @@ +""" +Tests for src/k8s/utils.py + +Covers: + - load_cluster_config + - get_current_namespace (file found / not found) + - find_k8s_resources: all resource types, selectors, empty/single/multiple results + - delete_k8s_resource: all types, 404 handled gracefully, unsupported type error +""" + +import pytest +from unittest.mock import MagicMock, patch, mock_open + +from kubernetes.client.exceptions import ApiException + +from src.k8s.utils import ( + load_cluster_config, + get_current_namespace, + find_k8s_resources, + delete_k8s_resource, +) + + +# ─── Helpers ───────────────────────────────────────────────────────────────── + +def _make_resource_list(names: list[str]) -> MagicMock: + """Build a mock K8s list response with .items containing named resources.""" + result = MagicMock() + items = [] + for name in names: + item = MagicMock() + item.metadata.name = name + items.append(item) + result.items = items + return result + + +# ─── load_cluster_config ───────────────────────────────────────────────────── + +class TestLoadClusterConfig: + def test_delegates_to_incluster_config(self): + with patch("src.k8s.utils.config.load_incluster_config") as mock_load: + load_cluster_config() + mock_load.assert_called_once_with() + + +# ─── get_current_namespace ─────────────────────────────────────────────────── + +class TestGetCurrentNamespace: + def test_reads_namespace_from_file(self): + with patch("builtins.open", mock_open(read_data="flame-namespace\n")): + result = get_current_namespace() + assert result == "flame-namespace" + + def test_returns_default_when_file_not_found(self): + with patch("builtins.open", side_effect=FileNotFoundError): + result = get_current_namespace() + assert result == "default" + + def test_strips_whitespace_from_file_content(self): + with patch("builtins.open", mock_open(read_data=" my-ns \n")): + result = get_current_namespace() + assert result == "my-ns" + + +# ─── find_k8s_resources ────────────────────────────────────────────────────── + +class TestFindK8sResourcesValidation: + def test_invalid_resource_type_raises(self): + with pytest.raises(ValueError, match="resource_type must be one of"): + find_k8s_resources("unknown") + + def test_invalid_selector_type_raises(self): + with pytest.raises(ValueError, match="selector_type must be either"): + find_k8s_resources("pod", selector_type="invalid") + + def test_selector_type_without_arg_raises(self): + with pytest.raises(ValueError, match="selector_arg must not be None"): + find_k8s_resources("pod", selector_type="label") + + +class TestFindK8sResourcesResults: + """Tests for each resource type and result-count behaviours.""" + + def test_deployment_single_result(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value = ( + _make_resource_list(["analysis-123-0"]) + ) + result = find_k8s_resources("deployment")[0] + print(mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value, result) + assert result == "analysis-123-0" + + def test_deployment_multiple_results(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value = ( + _make_resource_list(["analysis-123-0", "analysis-456-0"]) + ) + result = find_k8s_resources("deployment") + assert result == ["analysis-123-0", "analysis-456-0"] + + def test_deployment_empty_returns_none(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value = ( + _make_resource_list([]) + ) + result = find_k8s_resources("deployment") + assert result == [None] + + def test_networkpolicy_resource(self, mock_k8s_clients): + mock_k8s_clients.networking_v1.list_namespaced_network_policy.return_value = ( + _make_resource_list(["np-analysis-123"]) + ) + result = find_k8s_resources("networkpolicy")[0] + assert result == "np-analysis-123" + + def test_pod_resource(self, mock_k8s_clients): + mock_k8s_clients.core_v1.list_namespaced_pod.return_value = ( + _make_resource_list(["pod-abc"]) + ) + result = find_k8s_resources("pod")[0] + assert result == "pod-abc" + + def test_service_resource(self, mock_k8s_clients): + mock_k8s_clients.core_v1.list_namespaced_service.return_value = ( + _make_resource_list(["svc-analysis-123"]) + ) + result = find_k8s_resources("service")[0] + assert result == "svc-analysis-123" + + def test_configmap_resource(self, mock_k8s_clients): + mock_k8s_clients.core_v1.list_namespaced_config_map.return_value = ( + _make_resource_list(["cm-nginx-123"]) + ) + result = find_k8s_resources("configmap")[0] + assert result == "cm-nginx-123" + + def test_job_resource(self, mock_k8s_clients): + mock_k8s_clients.batch_v1.list_namespaced_job.return_value = ( + _make_resource_list(["job-analysis-123"]) + ) + result = find_k8s_resources("job")[0] + assert result == "job-analysis-123" + + +class TestFindK8sResourcesSelectors: + def test_label_selector_forwarded_to_api(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value = ( + _make_resource_list(["dep-1"]) + ) + find_k8s_resources("deployment", selector_type="label", selector_arg="app=my-app") + mock_k8s_clients.apps_v1.list_namespaced_deployment.assert_called_once_with( + namespace="default", label_selector="app=my-app" + ) + + def test_field_selector_forwarded_to_api(self, mock_k8s_clients): + mock_k8s_clients.core_v1.list_namespaced_pod.return_value = ( + _make_resource_list(["pod-1"]) + ) + find_k8s_resources("pod", selector_type="field", selector_arg="status.phase=Running") + mock_k8s_clients.core_v1.list_namespaced_pod.assert_called_once_with( + namespace="default", field_selector="status.phase=Running" + ) + + def test_custom_namespace_forwarded_to_api(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value = ( + _make_resource_list(["dep-1"]) + ) + find_k8s_resources("deployment", namespace="flame-ns") + mock_k8s_clients.apps_v1.list_namespaced_deployment.assert_called_once_with( + namespace="flame-ns" + ) + + +class TestFindK8sResourcesManualNameSelector: + def test_filters_to_single_match(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value = ( + _make_resource_list(["analysis-123-dep", "analysis-456-dep", "other-dep"]) + ) + result = find_k8s_resources("deployment", manual_name_selector="analysis-123") + assert result == ["analysis-123-dep"] + + def test_returns_list_when_multiple_match(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value = ( + _make_resource_list(["analysis-123-dep-0", "analysis-123-dep-1", "other-dep"]) + ) + result = find_k8s_resources("deployment", manual_name_selector="analysis-123") + assert result == ["analysis-123-dep-0", "analysis-123-dep-1"] + + def test_no_match_returns_empty_list(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.list_namespaced_deployment.return_value = ( + _make_resource_list(["analysis-456-dep", "other-dep"]) + ) + result = find_k8s_resources("deployment", manual_name_selector="analysis-123") + assert result == [] + + +# ─── delete_k8s_resource ───────────────────────────────────────────────────── + +class TestDeleteK8sResourceTypes: + """Verify that each resource type calls the correct K8s API method.""" + + def test_delete_deployment(self, mock_k8s_clients): + delete_k8s_resource("my-dep", "deployment") + mock_k8s_clients.apps_v1.delete_namespaced_deployment.assert_called_once_with( + name="my-dep", namespace="default", propagation_policy="Foreground" + ) + + def test_delete_service(self, mock_k8s_clients): + delete_k8s_resource("my-svc", "service") + mock_k8s_clients.core_v1.delete_namespaced_service.assert_called_once_with( + name="my-svc", namespace="default" + ) + + def test_delete_pod(self, mock_k8s_clients): + delete_k8s_resource("my-pod", "pod") + mock_k8s_clients.core_v1.delete_namespaced_pod.assert_called_once_with( + name="my-pod", namespace="default" + ) + + def test_delete_configmap(self, mock_k8s_clients): + delete_k8s_resource("my-cm", "configmap") + mock_k8s_clients.core_v1.delete_namespaced_config_map.assert_called_once_with( + name="my-cm", namespace="default" + ) + + def test_delete_networkpolicy(self, mock_k8s_clients): + delete_k8s_resource("my-policy", "networkpolicy") + mock_k8s_clients.networking_v1.delete_namespaced_network_policy.assert_called_once_with( + name="my-policy", namespace="default" + ) + + def test_delete_job(self, mock_k8s_clients): + delete_k8s_resource("my-job", "job") + mock_k8s_clients.batch_v1.delete_namespaced_job.assert_called_once_with( + name="my-job", namespace="default", propagation_policy="Foreground" + ) + + def test_custom_namespace_forwarded(self, mock_k8s_clients): + delete_k8s_resource("my-dep", "deployment", namespace="flame-ns") + mock_k8s_clients.apps_v1.delete_namespaced_deployment.assert_called_once_with( + name="my-dep", namespace="flame-ns", propagation_policy="Foreground" + ) + + def test_unsupported_type_raises_value_error(self, mock_k8s_clients): + with pytest.raises(ValueError, match="Unsupported resource type"): + delete_k8s_resource("my-thing", "unknown") + + +class TestDeleteK8sResource404Handling: + """404 ApiExceptions must be swallowed for every resource type.""" + + def _not_found_exc(self): + return ApiException(status=404, reason="Not Found") + + def test_deployment_404_is_silent(self, mock_k8s_clients): + mock_k8s_clients.apps_v1.delete_namespaced_deployment.side_effect = ( + self._not_found_exc() + ) + delete_k8s_resource("gone", "deployment") # must not raise + + def test_service_404_is_silent(self, mock_k8s_clients): + mock_k8s_clients.core_v1.delete_namespaced_service.side_effect = ( + self._not_found_exc() + ) + delete_k8s_resource("gone", "service") + + def test_pod_404_is_silent(self, mock_k8s_clients): + mock_k8s_clients.core_v1.delete_namespaced_pod.side_effect = ( + self._not_found_exc() + ) + delete_k8s_resource("gone", "pod") + + def test_configmap_404_is_silent(self, mock_k8s_clients): + mock_k8s_clients.core_v1.delete_namespaced_config_map.side_effect = ( + self._not_found_exc() + ) + delete_k8s_resource("gone", "configmap") + + def test_networkpolicy_404_is_silent(self, mock_k8s_clients): + mock_k8s_clients.networking_v1.delete_namespaced_network_policy.side_effect = ( + self._not_found_exc() + ) + delete_k8s_resource("gone", "networkpolicy") + + def test_job_404_is_silent(self, mock_k8s_clients): + mock_k8s_clients.batch_v1.delete_namespaced_job.side_effect = ( + self._not_found_exc() + ) + delete_k8s_resource("gone", "job") \ No newline at end of file diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 0000000..5e1e94c --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,85 @@ +"""Tests for src/main.py — entry point thread startup.""" + +from unittest.mock import MagicMock, patch, call + + +class TestMain: + def test_main_starts_api_thread_and_status_loop(self): + """main() starts an API thread and calls status_loop in the main thread.""" + mock_db = MagicMock() + mock_thread = MagicMock() + + with ( + patch("src.main.load_dotenv"), + patch("src.main.find_dotenv", return_value=".env"), + patch("src.main.load_cluster_config"), + patch("src.main.Database", return_value=mock_db), + patch("src.main.get_current_namespace", return_value="default"), + patch("src.main.Thread", return_value=mock_thread) as mock_thread_cls, + patch("src.main.status_loop") as mock_status_loop, + ): + from src.main import main + main() + + # Thread was created targeting start_po_api with correct kwargs + mock_thread_cls.assert_called_once() + call_kwargs = mock_thread_cls.call_args + assert call_kwargs.kwargs["kwargs"] == {"database": mock_db, "namespace": "default"} + + # Thread was started + mock_thread.start.assert_called_once() + + # status_loop was called with the database and default interval + mock_status_loop.assert_called_once_with(mock_db, 10) + + def test_main_uses_custom_status_loop_interval(self, monkeypatch): + """When STATUS_LOOP_INTERVAL is set, main() uses that value.""" + monkeypatch.setenv("STATUS_LOOP_INTERVAL", "30") + + mock_db = MagicMock() + mock_thread = MagicMock() + + with ( + patch("src.main.load_dotenv"), + patch("src.main.find_dotenv", return_value=".env"), + patch("src.main.load_cluster_config"), + patch("src.main.Database", return_value=mock_db), + patch("src.main.get_current_namespace", return_value="default"), + patch("src.main.Thread", return_value=mock_thread), + patch("src.main.status_loop") as mock_status_loop, + ): + from src.main import main + main() + + mock_status_loop.assert_called_once_with(mock_db, 30) + + def test_main_sets_default_status_loop_interval_when_missing(self, monkeypatch): + """When STATUS_LOOP_INTERVAL is not set, main() defaults to 10.""" + monkeypatch.delenv("STATUS_LOOP_INTERVAL", raising=False) + + mock_db = MagicMock() + mock_thread = MagicMock() + + with ( + patch("src.main.load_dotenv"), + patch("src.main.find_dotenv", return_value=".env"), + patch("src.main.load_cluster_config"), + patch("src.main.Database", return_value=mock_db), + patch("src.main.get_current_namespace", return_value="default"), + patch("src.main.Thread", return_value=mock_thread), + patch("src.main.status_loop") as mock_status_loop, + ): + from src.main import main + main() + + mock_status_loop.assert_called_once_with(mock_db, 10) + + def test_start_po_api_instantiates_pod_orchestration_api(self): + """start_po_api creates a PodOrchestrationAPI with the given args.""" + mock_db = MagicMock() + + with patch("src.main.PodOrchestrationAPI") as mock_api_cls: + from src.main import start_po_api + start_po_api(database=mock_db, namespace="test-ns") + + mock_api_cls.assert_called_once_with(mock_db, "test-ns") \ No newline at end of file diff --git a/tests/test_resources_analysis_entity.py b/tests/test_resources_analysis_entity.py new file mode 100644 index 0000000..1e9ecc1 --- /dev/null +++ b/tests/test_resources_analysis_entity.py @@ -0,0 +1,321 @@ +"""Tests for src/resources/analysis/entity.py""" + +import json +import pytest +from unittest.mock import MagicMock, patch + +from src.resources.analysis.entity import Analysis, CreateAnalysis, read_db_analysis +from src.status.constants import AnalysisStatus + + +# ─── Analysis model ─────────────────────────────────────────────────────────── + +class TestAnalysisModel: + def test_required_fields(self): + a = Analysis( + analysis_id="a1", + project_id="p1", + registry_url="harbor.test", + image_url="harbor.test/img", + registry_user="user", + registry_password="pw", + kong_token="tok", + ) + assert a.analysis_id == "a1" + assert a.project_id == "p1" + assert a.registry_url == "harbor.test" + assert a.image_url == "harbor.test/img" + assert a.registry_user == "user" + assert a.registry_password == "pw" + assert a.kong_token == "tok" + + def test_default_values(self): + a = Analysis( + analysis_id="a1", + project_id="p1", + registry_url="harbor.test", + image_url="harbor.test/img", + registry_user="user", + registry_password="pw", + kong_token="tok", + ) + assert a.namespace == "default" + assert a.restart_counter == 0 + assert a.progress == 0 + assert a.deployment_name == "" + assert a.tokens is None + assert a.analysis_config is None + assert a.status == AnalysisStatus.STARTING.value + assert a.log is None + assert a.pod_ids is None + + def test_status_defaults_to_starting(self): + a = Analysis( + analysis_id="a1", + project_id="p1", + registry_url="harbor.test", + image_url="harbor.test/img", + registry_user="user", + registry_password="pw", + kong_token="tok", + ) + assert a.status == "starting" + + +# ─── Analysis.start() ───────────────────────────────────────────────────────── + +class TestAnalysisStart: + @pytest.fixture + def analysis(self): + return Analysis( + analysis_id="test-analysis", + project_id="test-project", + registry_url="harbor.test", + image_url="harbor.test/test-project/test-analysis", + registry_user="robot_user", + registry_password="secret", + kong_token="kong-tok", + ) + + def test_start_sets_status_to_started(self, analysis, mock_database): + mock_tokens = {"RESULT_TOKEN": "result-tok", "ANALYSIS_TOKEN": "analysis-tok"} + with ( + patch("src.resources.analysis.entity.create_analysis_tokens", return_value=mock_tokens), + patch("src.resources.analysis.entity.create_analysis_deployment", return_value=["pod-1"]), + ): + analysis.start(database=mock_database) + assert analysis.status == AnalysisStatus.STARTED.value + + def test_start_sets_deployment_name(self, analysis, mock_database): + mock_tokens = {"RESULT_TOKEN": "r", "ANALYSIS_TOKEN": "a"} + with ( + patch("src.resources.analysis.entity.create_analysis_tokens", return_value=mock_tokens), + patch("src.resources.analysis.entity.create_analysis_deployment", return_value=["pod-1"]), + ): + analysis.start(database=mock_database) + assert analysis.deployment_name == "analysis-test-analysis-0" + + def test_start_deployment_name_uses_restart_counter(self, mock_database): + analysis = Analysis( + analysis_id="test-analysis", + project_id="test-project", + registry_url="harbor.test", + image_url="harbor.test/img", + registry_user="user", + registry_password="pw", + kong_token="tok", + restart_counter=3, + ) + mock_tokens = {"RESULT_TOKEN": "r", "ANALYSIS_TOKEN": "a"} + with ( + patch("src.resources.analysis.entity.create_analysis_tokens", return_value=mock_tokens), + patch("src.resources.analysis.entity.create_analysis_deployment", return_value=["pod-1"]), + ): + analysis.start(database=mock_database) + assert analysis.deployment_name == "analysis-test-analysis-3" + + def test_start_sets_analysis_config_with_ids(self, analysis, mock_database): + mock_tokens = {"RESULT_TOKEN": "result-tok", "ANALYSIS_TOKEN": "analysis-tok"} + with ( + patch("src.resources.analysis.entity.create_analysis_tokens", return_value=mock_tokens), + patch("src.resources.analysis.entity.create_analysis_deployment", return_value=["pod-1"]), + ): + analysis.start(database=mock_database) + assert analysis.analysis_config["ANALYSIS_ID"] == "test-analysis" + assert analysis.analysis_config["PROJECT_ID"] == "test-project" + assert analysis.analysis_config["DEPLOYMENT_NAME"] == "analysis-test-analysis-0" + + def test_start_stores_pod_ids(self, analysis, mock_database): + mock_tokens = {"RESULT_TOKEN": "r", "ANALYSIS_TOKEN": "a"} + with ( + patch("src.resources.analysis.entity.create_analysis_tokens", return_value=mock_tokens), + patch( + "src.resources.analysis.entity.create_analysis_deployment", + return_value=["pod-1", "pod-2"], + ), + ): + analysis.start(database=mock_database) + assert analysis.pod_ids == ["pod-1", "pod-2"] + + def test_start_calls_database_create_analysis(self, analysis, mock_database): + mock_tokens = {"RESULT_TOKEN": "r", "ANALYSIS_TOKEN": "a"} + with ( + patch("src.resources.analysis.entity.create_analysis_tokens", return_value=mock_tokens), + patch("src.resources.analysis.entity.create_analysis_deployment", return_value=["pod-1"]), + ): + analysis.start(database=mock_database) + mock_database.create_analysis.assert_called_once() + call_kwargs = mock_database.create_analysis.call_args.kwargs + assert call_kwargs["analysis_id"] == "test-analysis" + assert call_kwargs["status"] == AnalysisStatus.STARTED.value + + def test_start_uses_provided_namespace(self, analysis, mock_database): + mock_tokens = {"RESULT_TOKEN": "r", "ANALYSIS_TOKEN": "a"} + with ( + patch("src.resources.analysis.entity.create_analysis_tokens", return_value=mock_tokens), + patch("src.resources.analysis.entity.create_analysis_deployment", return_value=["pod-1"]), + ): + analysis.start(database=mock_database, namespace="flame-ns") + assert analysis.namespace == "flame-ns" + call_kwargs = mock_database.create_analysis.call_args.kwargs + assert call_kwargs["namespace"] == "flame-ns" + + +# ─── Analysis.stop() ────────────────────────────────────────────────────────── + +class TestAnalysisStop: + @pytest.fixture + def started_analysis(self): + return Analysis( + analysis_id="test-analysis", + project_id="test-project", + registry_url="harbor.test", + image_url="harbor.test/img", + registry_user="user", + registry_password="pw", + kong_token="tok", + deployment_name="analysis-test-analysis-0", + status=AnalysisStatus.STARTED.value, + pod_ids=["pod-1"], + ) + + def test_stop_sets_status_to_stopped(self, started_analysis, mock_database): + with patch("src.resources.analysis.entity.delete_deployment"): + started_analysis.stop(database=mock_database) + assert started_analysis.status == AnalysisStatus.STOPPED.value + + def test_stop_with_custom_status(self, started_analysis, mock_database): + with patch("src.resources.analysis.entity.delete_deployment"): + started_analysis.stop(database=mock_database, status=AnalysisStatus.FAILED.value) + assert started_analysis.status == AnalysisStatus.FAILED.value + + def test_stop_sets_log_when_provided(self, started_analysis, mock_database): + with patch("src.resources.analysis.entity.delete_deployment"): + started_analysis.stop(database=mock_database, log="something went wrong") + assert started_analysis.log == "something went wrong" + + def test_stop_preserves_existing_log_when_none_provided(self, started_analysis, mock_database): + started_analysis.log = "original log" + with patch("src.resources.analysis.entity.delete_deployment"): + started_analysis.stop(database=mock_database) + assert started_analysis.log == "original log" + + def test_stop_calls_delete_deployment(self, started_analysis, mock_database): + with patch("src.resources.analysis.entity.delete_deployment") as mock_del: + started_analysis.stop(database=mock_database) + mock_del.assert_called_once_with("analysis-test-analysis-0", namespace="default") + + def test_stop_updates_database_deployment_status(self, started_analysis, mock_database): + with patch("src.resources.analysis.entity.delete_deployment"): + started_analysis.stop(database=mock_database) + calls = mock_database.update_deployment.call_args_list + assert any( + c.args == ("analysis-test-analysis-0",) and c.kwargs.get("status") == AnalysisStatus.STOPPED.value + for c in calls + ) + + def test_stop_updates_database_deployment_log(self, started_analysis, mock_database): + with patch("src.resources.analysis.entity.delete_deployment"): + started_analysis.stop(database=mock_database, log="bye") + calls = mock_database.update_deployment.call_args_list + assert any( + c.args == ("analysis-test-analysis-0",) and c.kwargs.get("log") == "bye" + for c in calls + ) + + +# ─── read_db_analysis() ─────────────────────────────────────────────────────── + +class TestReadDbAnalysis: + def test_returns_analysis_instance(self, sample_analysis_db): + db_row = sample_analysis_db() + result = read_db_analysis(db_row) + assert isinstance(result, Analysis) + + def test_maps_all_fields(self, sample_analysis_db): + db_row = sample_analysis_db( + analysis_id="a99", + project_id="p99", + registry_url="harbor.custom", + image_url="harbor.custom/img", + registry_user="ruser", + registry_password="rpw", + status="executing", + pod_ids=json.dumps(["pod-a", "pod-b"]), + log="some log", + namespace="flame-ns", + kong_token="kong123", + restart_counter=2, + progress=50, + deployment_name="analysis-a99-2", + ) + result = read_db_analysis(db_row) + assert result.analysis_id == "a99" + assert result.project_id == "p99" + assert result.registry_url == "harbor.custom" + assert result.image_url == "harbor.custom/img" + assert result.registry_user == "ruser" + assert result.registry_password == "rpw" + assert result.status == "executing" + assert result.pod_ids == ["pod-a", "pod-b"] + assert result.log == "some log" + assert result.namespace == "flame-ns" + assert result.kong_token == "kong123" + assert result.restart_counter == 2 + assert result.progress == 50 + assert result.deployment_name == "analysis-a99-2" + + def test_deserializes_pod_ids_from_json(self, sample_analysis_db): + db_row = sample_analysis_db(pod_ids=json.dumps(["pod-1", "pod-2", "pod-3"])) + result = read_db_analysis(db_row) + assert result.pod_ids == ["pod-1", "pod-2", "pod-3"] + + def test_null_log_preserved(self, sample_analysis_db): + db_row = sample_analysis_db(log=None) + result = read_db_analysis(db_row) + assert result.log is None + + +# ─── CreateAnalysis ─────────────────────────────────────────────────────────── + +class TestCreateAnalysis: + def test_default_values(self): + ca = CreateAnalysis( + analysis_id="analysis_id", + project_id="project_id", + registry_url="harbor.privateaim", + image_url="harbor.privateaim/node_id/analysis_id", + registry_user="robot_user", + registry_password="default_pw", + kong_token="default_kong_token", + ) + assert ca.analysis_id == "analysis_id" + assert ca.project_id == "project_id" + assert ca.registry_url == "harbor.privateaim" + assert ca.image_url == "harbor.privateaim/node_id/analysis_id" + assert ca.registry_user == "robot_user" + assert ca.registry_password == "default_pw" + assert ca.kong_token == "default_kong_token" + assert ca.restart_counter == 0 + assert ca.progress == 0 + + def test_custom_values(self): + ca = CreateAnalysis( + analysis_id="custom-id", + project_id="custom-proj", + registry_url="harbor.custom", + image_url="harbor.custom/img", + registry_user="cuser", + registry_password="cpw", + kong_token="ctok", + restart_counter=5, + progress=42, + ) + assert ca.analysis_id == "custom-id" + assert ca.project_id == "custom-proj" + assert ca.restart_counter == 5 + assert ca.progress == 42 + + def test_is_pydantic_model(self): + from pydantic import BaseModel + assert issubclass(CreateAnalysis, BaseModel) \ No newline at end of file diff --git a/tests/test_resources_log_entity.py b/tests/test_resources_log_entity.py new file mode 100644 index 0000000..0807216 --- /dev/null +++ b/tests/test_resources_log_entity.py @@ -0,0 +1,224 @@ +"""Tests for src/resources/log/entity.py""" + +import pytest + +from src.resources.log.entity import ( + LogEntity, + CreateLogEntity, + CreateStartUpErrorLog, + AnalysisStoppedLog, +) +from src.status.constants import _MAX_RESTARTS + + +# ─── LogEntity ──────────────────────────────────────────────────────────────── + +class TestLogEntity: + def test_creation_with_required_fields(self): + entity = LogEntity(log="test message", log_type="info", id="id-1", created_at="2026-01-01") + assert entity.log == "test message" + assert entity.log_type == "info" + + def test_id_and_created_at_are_strings(self): + entity = LogEntity(log="msg", log_type="debug", id="id-1", created_at="2026-01-01") + assert isinstance(entity.id, str) + assert isinstance(entity.created_at, str) + + @pytest.mark.parametrize("log_type", [ + "emerg", "alert", "crit", "error", "warn", "notice", "info", "debug" + ]) + def test_all_valid_log_types(self, log_type): + entity = LogEntity(log="msg", log_type=log_type, id="id-1", created_at="2026-01-01") + assert entity.log_type == log_type + + def test_invalid_log_type_raises(self): + with pytest.raises(Exception): + LogEntity(log="msg", log_type="invalid", id="id-1", created_at="2026-01-01") + + def test_str_representation(self): + entity = LogEntity(log="hello", log_type="warn", id="id-1", created_at="2026-01-01") + s = str(entity) + assert "LogEntity" in s + assert entity.id in s + assert "hello" in s + assert "warn" in s + + +# ─── CreateLogEntity ────────────────────────────────────────────────────────── + +class TestCreateLogEntity: + def test_creation(self): + entity = CreateLogEntity( + log="test", + log_type="info", + analysis_id="analysis-1", + status="started", + progress=50, + ) + assert entity.log == "test" + assert entity.analysis_id == "analysis-1" + assert entity.status == "started" + assert entity.progress == 50 + + def test_to_log_entity_returns_log_entity(self): + entity = CreateLogEntity( + log="my log", + log_type="error", + analysis_id="analysis-1", + status="failed", + progress=10, + ) + result = entity.to_log_entity() + assert isinstance(result, LogEntity) + + def test_to_log_entity_copies_log_and_type(self): + entity = CreateLogEntity( + log="the message", + log_type="warn", + analysis_id="analysis-1", + status="started", + progress=0, + ) + result = entity.to_log_entity() + assert result.log == "the message" + assert result.log_type == "warn" + + def test_to_log_entity_drops_analysis_fields(self): + entity = CreateLogEntity( + log="msg", + log_type="debug", + analysis_id="analysis-42", + status="executing", + progress=99, + ) + result = entity.to_log_entity() + assert not hasattr(result, "analysis_id") + assert not hasattr(result, "status") + assert not hasattr(result, "progress") + + +# ─── CreateStartUpErrorLog ──────────────────────────────────────────────────── + +class TestCreateStartUpErrorLog: + def test_stuck_type_log_content(self): + log = CreateStartUpErrorLog( + restart_num=1, + error_type="stuck", + analysis_id="analysis-1", + status="stuck", + ) + assert "ANALYSISSTARTUPERROR" in log.log + assert "failed to connect" in log.log + assert f"restart 1 of {_MAX_RESTARTS}" in log.log + assert log.log_type == "error" + assert log.analysis_id == "analysis-1" + assert log.status == "stuck" + assert log.progress == 0 + + def test_slow_type_log_content(self): + log = CreateStartUpErrorLog( + restart_num=3, + error_type="slow", + analysis_id="analysis-2", + status="stuck", + ) + assert "took to long during startup" in log.log + assert f"restart 3 of {_MAX_RESTARTS}" in log.log + assert log.log_type == "error" + + def test_k8s_type_log_content_no_k8s_msg(self): + log = CreateStartUpErrorLog( + restart_num=2, + error_type="k8s", + analysis_id="analysis-3", + status="stuck", + ) + assert "failed to deploy in kubernetes" in log.log + assert f"restart 2 of {_MAX_RESTARTS}" in log.log + assert "KubernetesApiError" not in log.log + + def test_k8s_type_log_content_with_k8s_msg(self): + log = CreateStartUpErrorLog( + restart_num=2, + error_type="k8s", + analysis_id="analysis-3", + status="stuck", + k8s_error_msg="ImagePullBackOff", + ) + assert "KubernetesApiError: ImagePullBackOff" in log.log + + def test_termination_message_at_max_restarts(self): + log = CreateStartUpErrorLog( + restart_num=_MAX_RESTARTS, + error_type="stuck", + analysis_id="analysis-4", + status="stuck", + ) + assert "Terminating analysis as failed" in log.log + + def test_no_termination_message_below_max_restarts(self): + log = CreateStartUpErrorLog( + restart_num=_MAX_RESTARTS - 1, + error_type="stuck", + analysis_id="analysis-4", + status="stuck", + ) + assert "Terminating" not in log.log + + def test_unknown_error_type_produces_empty_log(self): + log = CreateStartUpErrorLog( + restart_num=1, + error_type="unknown", # type: ignore[arg-type] + analysis_id="analysis-5", + status="stuck", + ) + assert log.log == "" + + def test_restart_num_reflected_in_log(self): + for num in [1, 5, _MAX_RESTARTS]: + log = CreateStartUpErrorLog( + restart_num=num, + error_type="slow", + analysis_id="analysis-1", + status="stuck", + ) + assert f"restart {num} of {_MAX_RESTARTS}" in log.log + + def test_is_create_log_entity_subclass(self): + log = CreateStartUpErrorLog( + restart_num=1, + error_type="stuck", + analysis_id="analysis-1", + status="stuck", + ) + assert isinstance(log, CreateLogEntity) + + def test_to_log_entity(self): + log = CreateStartUpErrorLog( + restart_num=1, + error_type="k8s", + analysis_id="analysis-1", + status="stuck", + ) + result = log.to_log_entity() + assert isinstance(result, LogEntity) + assert result.log_type == "error" + + +# ─── AnalysisStoppedLog ─────────────────────────────────────────────────────── + +class TestAnalysisStoppedLog: + def test_instantiation(self): + """AnalysisStoppedLog can be created with just an analysis_id.""" + log = AnalysisStoppedLog(analysis_id="analysis-1") + assert log.analysis_id == "analysis-1" + assert log.progress == 0 + assert log.log_type == "info" + assert log.status == "stopped" + + def test_log_content(self): + """Verify log message content.""" + log = AnalysisStoppedLog(analysis_id="analysis-99") + assert "ANALYSISSTOPPED" in log.log + assert log.analysis_id == "analysis-99" + assert log.log_type == "info" \ No newline at end of file diff --git a/tests/test_resources_utils.py b/tests/test_resources_utils.py new file mode 100644 index 0000000..c159ee8 --- /dev/null +++ b/tests/test_resources_utils.py @@ -0,0 +1,696 @@ +""" +Tests for src/resources/utils.py — business logic layer. + +All external dependencies are mocked: + - get_current_namespace / create_harbor_secret + - Analysis class / read_db_analysis + - get_analysis_logs + - init_hub_client_and_update_hub_status_with_client + - find_k8s_resources / delete_k8s_resource + - _get_all_keycloak_clients / delete_keycloak_client + - update_hub_status / get_node_analysis_id + - time.sleep / resource_name_to_analysis +""" + +from unittest.mock import MagicMock, patch, call + +import pytest + +from src.resources.analysis.entity import CreateAnalysis +from src.resources.log.entity import CreateLogEntity +from src.status.constants import AnalysisStatus + +# Sample log string: a valid Python literal representing the log dict stored in the DB. +# retrieve_history calls ast.literal_eval() on this, then reads ['analysis'][id] and ['nginx'][id]. +_ANALYSIS_ID = "analysis_id" +_SAMPLE_LOG = str({ + "analysis": {_ANALYSIS_ID: ["analysis log line"]}, + "nginx": {_ANALYSIS_ID: ["nginx log line"]}, +}) + + +# ─── Helpers ────────────────────────────────────────────────────────────────── + +def _analysis_mock( + analysis_id=_ANALYSIS_ID, + status="started", + deployment_name="analysis-analysis_id-0", + namespace="default", + log=None, + pod_ids=None, + progress=0, +): + """Create a mock Analysis object with configurable attributes.""" + m = MagicMock() + m.analysis_id = analysis_id + m.status = status + m.deployment_name = deployment_name + m.namespace = namespace + m.log = log + m.pod_ids = pod_ids if pod_ids is not None else ["pod-1"] + m.progress = progress + return m + + +# ─── create_analysis ────────────────────────────────────────────────────────── + +class TestCreateAnalysis: + _VALID_UUID = "123e4567-e89b-42d3-a456-426614174000" + + def _valid_body_kwargs(self): + return { + "analysis_id": self._VALID_UUID, + "project_id": self._VALID_UUID, + "registry_url": "harbor.privateaim", + "image_url": "harbor.privateaim/node_id/analysis_id", + "registry_user": "robot_user", + "registry_password": "default_pw", + "kong_token": "default_kong_token", + } + + @patch("src.resources.utils.init_hub_client_and_update_hub_status_with_client") + @patch("src.resources.utils.Analysis") + @patch("src.resources.utils.create_harbor_secret") + @patch("src.resources.utils.get_current_namespace", return_value="default") + def test_from_create_analysis_body( + self, mock_ns, mock_harbor, mock_analysis_cls, mock_hub, mock_database + ): + from src.resources.utils import create_analysis + + mock_inst = _analysis_mock( + analysis_id=self._VALID_UUID, status=AnalysisStatus.STARTED.value + ) + mock_analysis_cls.return_value = mock_inst + + body = CreateAnalysis(**self._valid_body_kwargs()) + result = create_analysis(body, mock_database) + + mock_harbor.assert_called_once() + mock_inst.start.assert_called_once_with(database=mock_database, namespace="default") + mock_hub.assert_called_once_with(self._VALID_UUID, AnalysisStatus.STARTED.value) + assert result == {self._VALID_UUID: AnalysisStatus.STARTED.value} + + @patch("src.resources.utils.init_hub_client_and_update_hub_status_with_client") + @patch("src.resources.utils.Analysis") + @patch("src.resources.utils.create_harbor_secret") + @patch("src.resources.utils.get_current_namespace", return_value="default") + def test_from_string_extracts_body_and_restarts( + self, mock_ns, mock_harbor, mock_analysis_cls, mock_hub, mock_database + ): + """When body is a string, extract_analysis_body is called and the analysis is restarted.""" + from src.resources.utils import create_analysis + + mock_inst = _analysis_mock( + analysis_id=self._VALID_UUID, status=AnalysisStatus.STARTED.value + ) + mock_analysis_cls.return_value = mock_inst + mock_database.extract_analysis_body.return_value = self._valid_body_kwargs() + + result = create_analysis(self._VALID_UUID, mock_database) + + mock_database.extract_analysis_body.assert_called_once_with(self._VALID_UUID) + mock_harbor.assert_called_once() + mock_hub.assert_called_once() + assert self._VALID_UUID in result + + def test_from_string_not_found_returns_status_message(self, mock_database): + """When extract_analysis_body returns None, return a status error dict.""" + from src.resources.utils import create_analysis + + mock_database.extract_analysis_body.return_value = None + + with patch("src.resources.utils.get_current_namespace", return_value="default"): + result = create_analysis("nonexistent_id", mock_database) + + assert result == {"status": "Analysis ID not found in database."} + + +# ─── retrieve_history ───────────────────────────────────────────────────────── + +class TestRetrieveHistory: + def test_single_stopped_analysis(self, mock_database, sample_analysis_db): + from src.resources.utils import retrieve_history + + db_row = sample_analysis_db(status=AnalysisStatus.STOPPED.value, log=_SAMPLE_LOG) + mock_database.get_latest_deployment.return_value = db_row + + result = retrieve_history(_ANALYSIS_ID, mock_database) + + assert result["analysis"][_ANALYSIS_ID] == ["analysis log line"] + assert result["nginx"][_ANALYSIS_ID] == ["nginx log line"] + + def test_single_executed_analysis(self, mock_database, sample_analysis_db): + from src.resources.utils import retrieve_history + + db_row = sample_analysis_db(status=AnalysisStatus.EXECUTED.value, log=_SAMPLE_LOG) + mock_database.get_latest_deployment.return_value = db_row + + result = retrieve_history(_ANALYSIS_ID, mock_database) + + assert _ANALYSIS_ID in result["analysis"] + + def test_running_analysis_excluded(self, mock_database, sample_analysis_db): + """A running (started) analysis is not included in history.""" + from src.resources.utils import retrieve_history + + db_row = sample_analysis_db(status=AnalysisStatus.STARTED.value) + mock_database.get_latest_deployment.return_value = db_row + + result = retrieve_history(_ANALYSIS_ID, mock_database) + + assert result == {"analysis": {}, "nginx": {}} + + def test_all_analyses_queries_all_ids(self, mock_database, sample_analysis_db): + from src.resources.utils import retrieve_history + + db_row = sample_analysis_db(status=AnalysisStatus.STOPPED.value, log=_SAMPLE_LOG) + mock_database.get_analysis_ids.return_value = [_ANALYSIS_ID] + mock_database.get_latest_deployment.return_value = db_row + + result = retrieve_history("all", mock_database) + + mock_database.get_analysis_ids.assert_called_once() + assert _ANALYSIS_ID in result["analysis"] + + def test_not_found_excluded(self, mock_database): + from src.resources.utils import retrieve_history + + mock_database.get_latest_deployment.return_value = None + + result = retrieve_history(_ANALYSIS_ID, mock_database) + + assert result == {"analysis": {}, "nginx": {}} + + +# ─── retrieve_logs ──────────────────────────────────────────────────────────── + +class TestRetrieveLogs: + @patch("src.resources.utils.get_analysis_logs", return_value={"analysis": {}, "nginx": {}}) + def test_single_executing_deployment(self, mock_get_logs, mock_database, sample_analysis_db): + from src.resources.utils import retrieve_logs + + db_row = sample_analysis_db( + status=AnalysisStatus.EXECUTING.value, + deployment_name="analysis-analysis_id-0", + ) + mock_database.get_latest_deployment.return_value = db_row + + retrieve_logs(_ANALYSIS_ID, mock_database) + + mock_get_logs.assert_called_once_with( + {_ANALYSIS_ID: "analysis-analysis_id-0"}, database=mock_database + ) + + @patch("src.resources.utils.get_analysis_logs", return_value={"analysis": {}, "nginx": {}}) + def test_non_executing_excluded(self, mock_get_logs, mock_database, sample_analysis_db): + """Non-executing deployments are not passed to get_analysis_logs.""" + from src.resources.utils import retrieve_logs + + db_row = sample_analysis_db(status=AnalysisStatus.STARTED.value) + mock_database.get_latest_deployment.return_value = db_row + + retrieve_logs(_ANALYSIS_ID, mock_database) + + mock_get_logs.assert_called_once_with({}, database=mock_database) + + @patch("src.resources.utils.get_analysis_logs", return_value={"analysis": {}, "nginx": {}}) + def test_all_analyses(self, mock_get_logs, mock_database, sample_analysis_db): + from src.resources.utils import retrieve_logs + + db_row = sample_analysis_db(status=AnalysisStatus.EXECUTING.value) + mock_database.get_analysis_ids.return_value = [_ANALYSIS_ID] + mock_database.get_latest_deployment.return_value = db_row + + retrieve_logs("all", mock_database) + + mock_database.get_analysis_ids.assert_called_once() + + +# ─── get_status_and_progress ────────────────────────────────────────────────── + +class TestGetStatusAndProgress: + def test_single_analysis(self, mock_database, sample_analysis_db): + from src.resources.utils import get_status_and_progress + + db_row = sample_analysis_db(status="executing", progress=50) + mock_database.get_latest_deployment.return_value = db_row + + result = get_status_and_progress(_ANALYSIS_ID, mock_database) + + assert result[_ANALYSIS_ID]["status"] == "executing" + assert result[_ANALYSIS_ID]["progress"] == 50 + + def test_all_analyses(self, mock_database, sample_analysis_db): + from src.resources.utils import get_status_and_progress + + db_row = sample_analysis_db(status="started") + mock_database.get_analysis_ids.return_value = [_ANALYSIS_ID] + mock_database.get_latest_deployment.return_value = db_row + + result = get_status_and_progress("all", mock_database) + + mock_database.get_analysis_ids.assert_called_once() + assert _ANALYSIS_ID in result + + def test_not_found_excluded(self, mock_database): + from src.resources.utils import get_status_and_progress + + mock_database.get_latest_deployment.return_value = None + + result = get_status_and_progress(_ANALYSIS_ID, mock_database) + + assert result == {} + + +# ─── get_pods ───────────────────────────────────────────────────────────────── + +class TestGetPods: + def test_single_analysis(self, mock_database): + from src.resources.utils import get_pods + + mock_database.get_analysis_pod_ids.return_value = ["pod-1", "pod-2"] + + result = get_pods(_ANALYSIS_ID, mock_database) + + assert result == {_ANALYSIS_ID: ["pod-1", "pod-2"]} + + def test_all_analyses(self, mock_database): + from src.resources.utils import get_pods + + mock_database.get_analysis_ids.return_value = [_ANALYSIS_ID] + mock_database.get_analysis_pod_ids.return_value = ["pod-1"] + + result = get_pods("all", mock_database) + + mock_database.get_analysis_ids.assert_called_once() + assert _ANALYSIS_ID in result + + +# ─── stop_analysis ──────────────────────────────────────────────────────────── + +class TestStopAnalysis: + @patch("src.resources.utils.init_hub_client_and_update_hub_status_with_client") + @patch("src.resources.utils.get_analysis_logs", return_value={"analysis": {}, "nginx": {}}) + @patch("src.resources.utils.read_db_analysis") + def test_running_analysis_stopped(self, mock_read, mock_logs, mock_hub, mock_database): + from src.resources.utils import stop_analysis + + mock_deployment = _analysis_mock(status=AnalysisStatus.STARTED.value) + mock_read.return_value = mock_deployment + + stop_analysis(_ANALYSIS_ID, mock_database) + + # STARTED status is preserved to avoid signaling failure to partner nodes + mock_deployment.stop.assert_called_once() + assert mock_deployment.stop.call_args.kwargs["status"] == AnalysisStatus.STARTED.value + mock_hub.assert_called_once_with(_ANALYSIS_ID, AnalysisStatus.STARTED.value) + + @patch("src.resources.utils.init_hub_client_and_update_hub_status_with_client") + @patch("src.resources.utils.get_analysis_logs", return_value={"analysis": {}, "nginx": {}}) + @patch("src.resources.utils.read_db_analysis") + def test_executed_analysis_keeps_executed_status(self, mock_read, mock_logs, mock_hub, mock_database): + from src.resources.utils import stop_analysis + + mock_deployment = _analysis_mock(status=AnalysisStatus.EXECUTED.value) + mock_read.return_value = mock_deployment + + stop_analysis(_ANALYSIS_ID, mock_database) + + call_kwargs = mock_deployment.stop.call_args.kwargs + assert call_kwargs["status"] == AnalysisStatus.EXECUTED.value + mock_hub.assert_called_once_with(_ANALYSIS_ID, AnalysisStatus.EXECUTED.value) + + @patch("src.resources.utils.init_hub_client_and_update_hub_status_with_client") + @patch("src.resources.utils.get_analysis_logs", return_value={"analysis": {}, "nginx": {}}) + @patch("src.resources.utils.read_db_analysis") + def test_failed_analysis_keeps_failed_status(self, mock_read, mock_logs, mock_hub, mock_database): + from src.resources.utils import stop_analysis + + mock_deployment = _analysis_mock(status=AnalysisStatus.FAILED.value) + mock_read.return_value = mock_deployment + + stop_analysis(_ANALYSIS_ID, mock_database) + + call_kwargs = mock_deployment.stop.call_args.kwargs + assert call_kwargs["status"] == AnalysisStatus.FAILED.value + mock_hub.assert_called_once_with(_ANALYSIS_ID, AnalysisStatus.FAILED.value) + + @patch("src.resources.utils.init_hub_client_and_update_hub_status_with_client") + @patch("src.resources.utils.get_analysis_logs", return_value={"analysis": {}, "nginx": {}}) + @patch("src.resources.utils.read_db_analysis") + def test_all_analyses(self, mock_read, mock_logs, mock_hub, mock_database): + from src.resources.utils import stop_analysis + + mock_deployment = _analysis_mock(status=AnalysisStatus.STARTED.value) + mock_read.return_value = mock_deployment + mock_database.get_analysis_ids.return_value = [_ANALYSIS_ID] + + result = stop_analysis("all", mock_database) + + mock_database.get_analysis_ids.assert_called_once() + assert _ANALYSIS_ID in result + + def test_not_found_returns_empty(self, mock_database): + from src.resources.utils import stop_analysis + + mock_database.get_latest_deployment.return_value = None + + result = stop_analysis(_ANALYSIS_ID, mock_database) + + assert result == {} + + +# ─── delete_analysis ────────────────────────────────────────────────────────── + +class TestDeleteAnalysis: + @patch("src.resources.utils.delete_keycloak_client") + @patch("src.resources.utils.read_db_analysis") + def test_stopped_analysis_also_stopped(self, mock_read, mock_keycloak, mock_database): + """New behavior: delete_analysis unconditionally calls stop() on the deployment.""" + from src.resources.utils import delete_analysis + + mock_deployment = _analysis_mock(status=AnalysisStatus.STOPPED.value) + mock_read.return_value = mock_deployment + + delete_analysis(_ANALYSIS_ID, mock_database) + + mock_deployment.stop.assert_called_once_with(mock_database, log="") + mock_keycloak.assert_called_once_with(_ANALYSIS_ID) + mock_database.delete_analysis.assert_called_once_with(_ANALYSIS_ID) + + @patch("src.resources.utils.delete_keycloak_client") + @patch("src.resources.utils.read_db_analysis") + def test_running_analysis_stopped_then_deleted(self, mock_read, mock_keycloak, mock_database): + from src.resources.utils import delete_analysis + + mock_deployment = _analysis_mock(status=AnalysisStatus.STARTED.value) + mock_read.return_value = mock_deployment + + delete_analysis(_ANALYSIS_ID, mock_database) + + mock_deployment.stop.assert_called_once_with(mock_database, log="") + mock_keycloak.assert_called_once_with(_ANALYSIS_ID) + mock_database.delete_analysis.assert_called_once_with(_ANALYSIS_ID) + + def test_not_found_returns_empty(self, mock_database): + from src.resources.utils import delete_analysis + + mock_database.get_latest_deployment.return_value = None + + result = delete_analysis(_ANALYSIS_ID, mock_database) + + assert result == {} + + @patch("src.resources.utils.delete_keycloak_client") + @patch("src.resources.utils.read_db_analysis") + def test_all_analyses(self, mock_read, mock_keycloak, mock_database): + from src.resources.utils import delete_analysis + + mock_deployment = _analysis_mock(status=AnalysisStatus.STOPPED.value) + mock_read.return_value = mock_deployment + mock_database.get_analysis_ids.return_value = [_ANALYSIS_ID] + + delete_analysis("all", mock_database) + + mock_database.get_analysis_ids.assert_called_once() + mock_database.delete_analysis.assert_called_once_with(_ANALYSIS_ID) + + +# ─── unstuck_analysis_deployments ───────────────────────────────────────────── + +class TestUnstuckAnalysisDeployments: + @patch("src.resources.utils.create_analysis") + @patch("src.resources.utils.stop_analysis") + @patch("src.resources.utils.time.sleep") + def test_restartable_analysis(self, mock_sleep, mock_stop, mock_create, mock_database): + from src.resources.utils import unstuck_analysis_deployments + + unstuck_analysis_deployments(_ANALYSIS_ID, mock_database) + + mock_stop.assert_called_once_with(_ANALYSIS_ID, mock_database) + mock_sleep.assert_called_once_with(10) + mock_create.assert_called_once_with(_ANALYSIS_ID, mock_database) + mock_database.delete_old_deployments_from_db.assert_called_once_with(_ANALYSIS_ID) + + def test_not_found_does_nothing(self, mock_database): + from src.resources.utils import unstuck_analysis_deployments + + mock_database.get_latest_deployment.return_value = None + + with patch("src.resources.utils.stop_analysis") as mock_stop: + unstuck_analysis_deployments("nonexistent_id", mock_database) + mock_stop.assert_not_called() + + +# ─── cleanup ────────────────────────────────────────────────────────────────── + +class TestCleanup: + @patch("src.resources.utils.clean_up_the_rest", return_value="") + def test_analyzes_resets_db(self, mock_cztr, mock_database): + from src.resources.utils import cleanup + + mock_database.get_analysis_ids.return_value = ["id1", "id2"] + result = cleanup("analyzes", mock_database) + + mock_database.reset_db.assert_called_once() + assert "analyzes" in result + + @patch("src.resources.utils.clean_up_the_rest", return_value="") + @patch("src.resources.utils.delete_k8s_resource") + @patch( + "src.resources.utils.find_k8s_resources", + return_value=["flame-message-broker-pod"], + ) + def test_mb_reinitializes_message_broker(self, mock_find, mock_delete, mock_cztr, mock_database): + from src.resources.utils import cleanup + + result = cleanup("mb", mock_database) + + mock_find.assert_called_once_with( + "pod", "label", "component=flame-message-broker", namespace="default" + ) + mock_delete.assert_called_once_with("flame-message-broker-pod", "pod", "default") + assert result["mb"] == "Reset message broker" + + @patch("src.resources.utils.clean_up_the_rest", return_value="") + @patch("src.resources.utils.delete_k8s_resource") + @patch( + "src.resources.utils.find_k8s_resources", + return_value=["flame-storage-service-pod"], + ) + def test_rs_reinitializes_storage_service(self, mock_find, mock_delete, mock_cztr, mock_database): + from src.resources.utils import cleanup + + result = cleanup("rs", mock_database) + + mock_find.assert_called_once_with( + "pod", "label", "component=flame-storage-service", namespace="default" + ) + mock_delete.assert_called_once_with("flame-storage-service-pod", "pod", "default") + assert result["rs"] == "Reset storage service" + + @patch("src.resources.utils.clean_up_the_rest", return_value="") + @patch("src.resources.utils.delete_keycloak_client") + @patch("src.resources.utils._get_all_keycloak_clients") + def test_keycloak_deletes_orphaned_clients(self, mock_get_clients, mock_delete, mock_cztr, mock_database): + from src.resources.utils import cleanup + + mock_database.get_analysis_ids.return_value = ["existing_analysis"] + mock_get_clients.return_value = [ + {"clientId": "orphaned_analysis", "name": "flame-orphaned_analysis"}, + {"clientId": "existing_analysis", "name": "flame-existing_analysis"}, + {"clientId": "non_flame_client", "name": "other-client"}, + ] + + cleanup("keycloak", mock_database) + + # Only the orphaned flame client should be deleted; existing and non-flame skipped. + mock_delete.assert_called_once_with("orphaned_analysis") + + @patch("src.resources.utils.clean_up_the_rest", return_value="") + def test_unknown_type_returns_error_message(self, mock_cztr, mock_database): + from src.resources.utils import cleanup + + result = cleanup("unknown_type", mock_database) + + assert "unknown_type" in result["unknown_type"] + assert "Unknown cleanup type" in result["unknown_type"] + + @patch("src.resources.utils.clean_up_the_rest", return_value="") + @patch("src.resources.utils.delete_k8s_resource") + @patch("src.resources.utils.find_k8s_resources", return_value=["pod-name"]) + def test_comma_separated_processes_both_types(self, mock_find, mock_delete, mock_cztr, mock_database): + from src.resources.utils import cleanup + + result = cleanup("mb,rs", mock_database) + + # mb calls find for message-broker, rs calls find for storage-service + assert mock_find.call_count == 2 + + @patch("src.resources.utils.clean_up_the_rest", return_value="zombie cleanup done") + def test_always_calls_clean_up_the_rest(self, mock_cztr, mock_database): + from src.resources.utils import cleanup + + result = cleanup("unknown_type", mock_database) + + mock_cztr.assert_called_once_with(mock_database, "default") + assert result["zombies"] == "zombie cleanup done" + + +# ─── clean_up_the_rest ──────────────────────────────────────────────────────── + +class TestCleanUpTheRest: + @patch("src.resources.utils.delete_k8s_resource") + @patch("src.resources.utils.resource_name_to_analysis", return_value="zombie_id") + @patch( + "src.resources.utils.find_k8s_resources", + return_value=["analysis-zombie_id-0"], + ) + def test_deletes_zombie_resources(self, mock_find, mock_name_to_analysis, mock_delete, mock_database): + from src.resources.utils import clean_up_the_rest + + mock_database.get_analysis_ids.return_value = ["known_id"] + + result = clean_up_the_rest(mock_database) + + # Zombie resources (not in known_analysis_ids) should be deleted. + assert mock_delete.call_count > 0 + assert "Deleted" in result + + @patch("src.resources.utils.delete_k8s_resource") + @patch("src.resources.utils.resource_name_to_analysis", return_value="known_id") + @patch( + "src.resources.utils.find_k8s_resources", + return_value=["analysis-known_id-0"], + ) + def test_skips_known_resources(self, mock_find, mock_name_to_analysis, mock_delete, mock_database): + from src.resources.utils import clean_up_the_rest + + mock_database.get_analysis_ids.return_value = ["known_id"] + + clean_up_the_rest(mock_database) + + mock_delete.assert_not_called() + + @patch("src.resources.utils.delete_k8s_resource") + @patch("src.resources.utils.find_k8s_resources", return_value=[None]) + def test_handles_none_resources(self, mock_find, mock_delete, mock_database): + from src.resources.utils import clean_up_the_rest + + mock_database.get_analysis_ids.return_value = [] + + result = clean_up_the_rest(mock_database) + + mock_delete.assert_not_called() + assert isinstance(result, str) + + @patch("src.resources.utils.delete_k8s_resource") + @patch("src.resources.utils.resource_name_to_analysis", return_value="zombie_id") + @patch( + "src.resources.utils.find_k8s_resources", + return_value="analysis-zombie_id-0", # str, not list + ) + def test_wraps_string_result_in_list(self, mock_find, mock_name_to_analysis, mock_delete, mock_database): + """find_k8s_resources returning a str (single result) is wrapped in a list.""" + from src.resources.utils import clean_up_the_rest + + mock_database.get_analysis_ids.return_value = ["known_id"] + + result = clean_up_the_rest(mock_database) + + # The string is treated as a single resource and identified as a zombie. + assert mock_delete.call_count > 0 + + +# ─── stream_logs ────────────────────────────────────────────────────────────── + +class TestStreamLogs: + def _make_log_entity(self, progress=50, status="executing"): + return CreateLogEntity( + analysis_id=_ANALYSIS_ID, + log="test log message", + log_type="info", + status=status, + progress=progress, + ) + + def test_always_updates_database_log(self, mock_database, mock_hub_client): + from src.resources.utils import stream_logs + + log_entity = self._make_log_entity() + mock_database.progress_valid.return_value = False + + with patch("src.resources.utils.get_node_analysis_id", return_value="node_analysis_id"): + with patch("src.resources.utils.update_hub_status"): + stream_logs(log_entity, "node-id", False, mock_database, mock_hub_client) + + mock_database.update_analysis_log.assert_called_once() + args, _ = mock_database.update_analysis_log.call_args + assert args[0] == _ANALYSIS_ID + assert "test log message" in args[1] + assert "log_type=info" in args[1] + + def test_hub_logging_disabled_skips_hub_log(self, mock_database, mock_hub_client): + from src.resources.utils import stream_logs + + log_entity = self._make_log_entity() + mock_database.progress_valid.return_value = False + + with patch("src.resources.utils.get_node_analysis_id", return_value="na"): + with patch("src.resources.utils.update_hub_status"): + stream_logs(log_entity, "node-id", False, mock_database, mock_hub_client) + + mock_hub_client.create_analysis_node_log.assert_not_called() + + def test_hub_logging_enabled_calls_hub(self, mock_database, mock_hub_client): + from src.resources.utils import stream_logs + + log_entity = self._make_log_entity() + mock_database.progress_valid.return_value = False + + with patch("src.resources.utils.get_node_analysis_id", return_value="na"): + with patch("src.resources.utils.update_hub_status"): + stream_logs(log_entity, "node-id", True, mock_database, mock_hub_client) + + mock_hub_client.create_analysis_node_log.assert_called_once_with( + analysis_id=_ANALYSIS_ID, + node_id="node-id", + status="executing", + level="info", + message="test log message", + ) + + def test_valid_progress_updates_progress_and_hub(self, mock_database, mock_hub_client): + from src.resources.utils import stream_logs + + log_entity = self._make_log_entity(progress=75) + mock_database.progress_valid.return_value = True + + with patch("src.resources.utils.get_node_analysis_id", return_value="node_analysis_id"): + with patch("src.resources.utils.update_hub_status") as mock_hub_update: + stream_logs(log_entity, "node-id", False, mock_database, mock_hub_client) + + mock_database.update_analysis_progress.assert_called_once_with(_ANALYSIS_ID, 75) + mock_hub_update.assert_called_once_with( + mock_hub_client, + "node_analysis_id", + run_status="executing", + run_progress=75, + ) + + def test_invalid_progress_skips_progress_update(self, mock_database, mock_hub_client): + from src.resources.utils import stream_logs + + log_entity = self._make_log_entity(progress=50) + mock_database.progress_valid.return_value = False + + with patch("src.resources.utils.get_node_analysis_id", return_value="node_analysis_id"): + with patch("src.resources.utils.update_hub_status") as mock_hub_update: + stream_logs(log_entity, "node-id", False, mock_database, mock_hub_client) + + mock_database.update_analysis_progress.assert_not_called() + mock_hub_update.assert_called_once_with( + mock_hub_client, + "node_analysis_id", + run_status="executing", + ) \ No newline at end of file diff --git a/tests/test_status_constants.py b/tests/test_status_constants.py new file mode 100644 index 0000000..5fe44ed --- /dev/null +++ b/tests/test_status_constants.py @@ -0,0 +1,67 @@ +"""Tests for src/status/constants.py""" + +from src.status.constants import AnalysisStatus, _MAX_RESTARTS, _INTERNAL_STATUS_TIMEOUT + + +class TestAnalysisStatusEnum: + def test_all_values_present(self): + values = {s.value for s in AnalysisStatus} + assert values == { + "starting", + "started", + "stuck", + "stopping", + "stopped", + "executing", + "executed", + "failed", + } + + def test_starting(self): + assert AnalysisStatus.STARTING.value == "starting" + + def test_started(self): + assert AnalysisStatus.STARTED.value == "started" + + def test_stuck(self): + assert AnalysisStatus.STUCK.value == "stuck" + + def test_stopping(self): + assert AnalysisStatus.STOPPING.value == "stopping" + + def test_stopped(self): + assert AnalysisStatus.STOPPED.value == "stopped" + + def test_executing(self): + assert AnalysisStatus.EXECUTING.value == "executing" + + def test_executed(self): + assert AnalysisStatus.EXECUTED.value == "executed" + + def test_failed(self): + assert AnalysisStatus.FAILED.value == "failed" + + def test_member_count(self): + assert len(AnalysisStatus) == 8 + + def test_lookup_by_value(self): + assert AnalysisStatus("executing") is AnalysisStatus.EXECUTING + + def test_invalid_value_raises(self): + import pytest + with pytest.raises(ValueError): + AnalysisStatus("nonexistent") + + +class TestConstants: + def test_max_restarts_value(self): + assert _MAX_RESTARTS == 10 + + def test_internal_status_timeout_value(self): + assert _INTERNAL_STATUS_TIMEOUT == 10 + + def test_max_restarts_is_int(self): + assert isinstance(_MAX_RESTARTS, int) + + def test_internal_status_timeout_is_int(self): + assert isinstance(_INTERNAL_STATUS_TIMEOUT, int) \ No newline at end of file diff --git a/tests/test_status_status.py b/tests/test_status_status.py new file mode 100644 index 0000000..b767d29 --- /dev/null +++ b/tests/test_status_status.py @@ -0,0 +1,360 @@ +"""Tests for src/status/status.py. + +Does NOT test status_loop itself (infinite loop — untestable without mocking time). +Tests all helper functions: _decide_status_action, _get_analysis_status, +_get_internal_deployment_status, _refresh_keycloak_token, +inform_analysis_of_partner_statuses, _fix_stuck_status, +_update_running_status, _update_finished_status, _set_analysis_hub_status. +""" + +from unittest.mock import MagicMock, patch + +import pytest +from httpx import ConnectError, ConnectTimeout + +from src.status.constants import AnalysisStatus, _MAX_RESTARTS +from src.status.status import ( + _decide_status_action, + _fix_stuck_status, + _get_analysis_status, + _get_internal_deployment_status, + _refresh_keycloak_token, + _set_analysis_hub_status, + _update_finished_status, + _update_running_status, + inform_analysis_of_partner_statuses, +) + + +# ─── TestDecideStatusAction ─────────────────────────────────────────────────── + +class TestDecideStatusAction: + """All 9 combinations of db_status × int_status.""" + + def test_stuck_any_db_status_returns_unstuck(self): + # is_stuck: int_status=STUCK regardless of db_status + assert _decide_status_action(AnalysisStatus.EXECUTING.value, AnalysisStatus.STUCK.value) == "unstuck" + + def test_slow_started_failed_returns_unstuck(self): + # is_slow: db=STARTED + int=FAILED + assert _decide_status_action(AnalysisStatus.STARTED.value, AnalysisStatus.FAILED.value) == "unstuck" + + def test_newly_running_returns_running(self): + # db=STARTED + int=EXECUTING + assert _decide_status_action(AnalysisStatus.STARTED.value, AnalysisStatus.EXECUTING.value) == "running" + + def test_speedy_finished_returns_finishing(self): + # db=STARTED + int=EXECUTED + assert _decide_status_action(AnalysisStatus.STARTED.value, AnalysisStatus.EXECUTED.value) == "finishing" + + def test_newly_ended_executing_to_executed_returns_finishing(self): + # db=EXECUTING + int=EXECUTED + assert _decide_status_action(AnalysisStatus.EXECUTING.value, AnalysisStatus.EXECUTED.value) == "finishing" + + def test_newly_ended_executing_to_failed_returns_finishing(self): + # db=EXECUTING + int=FAILED (newly_ended) + assert _decide_status_action(AnalysisStatus.EXECUTING.value, AnalysisStatus.FAILED.value) == "finishing" + + def test_firmly_stuck_failed_db_stuck_int_returns_unstuck(self): + # db=FAILED + int=STUCK: is_stuck fires before firmly_stuck branch + # Note: firmly_stuck (db=FAILED, int=STUCK) overlaps with is_stuck, + # so this returns 'unstuck', not 'finishing'. + assert _decide_status_action(AnalysisStatus.FAILED.value, AnalysisStatus.STUCK.value) == "unstuck" + + def test_was_stopped_returns_finishing(self): + # int_status=STOPPED + assert _decide_status_action(AnalysisStatus.EXECUTING.value, AnalysisStatus.STOPPED.value) == "finishing" + + def test_no_matching_condition_returns_none(self): + # db=EXECUTING + int=EXECUTING: no condition matches + assert _decide_status_action(AnalysisStatus.EXECUTING.value, AnalysisStatus.EXECUTING.value) is None + + +# ─── TestGetAnalysisStatus ──────────────────────────────────────────────────── + +class TestGetAnalysisStatus: + def test_not_found_returns_none(self, mock_database): + mock_database.get_latest_deployment.return_value = None + assert _get_analysis_status("analysis_id", mock_database) is None + + def test_already_executed_skips_internal_check(self, mock_database, sample_analysis_db): + analysis = sample_analysis_db(status=AnalysisStatus.EXECUTED.value) + mock_database.get_latest_deployment.return_value = analysis + + result = _get_analysis_status("analysis_id", mock_database) + + assert result["db_status"] == AnalysisStatus.EXECUTED.value + assert result["int_status"] == AnalysisStatus.EXECUTED.value + + @patch("src.status.status._get_internal_deployment_status") + def test_found_non_executed_calls_internal_check(self, mock_internal, mock_database, sample_analysis_db): + analysis = sample_analysis_db(status=AnalysisStatus.EXECUTING.value, deployment_name="dep-name") + mock_database.get_latest_deployment.return_value = analysis + mock_internal.return_value = AnalysisStatus.EXECUTING.value + + result = _get_analysis_status("analysis_id", mock_database) + + mock_internal.assert_called_once_with("dep-name", "analysis_id") + assert result["analysis_id"] == "analysis_id" + assert result["db_status"] == AnalysisStatus.EXECUTING.value + assert result["int_status"] == AnalysisStatus.EXECUTING.value + assert "status_action" in result + + +# ─── TestGetInternalDeploymentStatus ───────────────────────────────────────── + +class TestGetInternalDeploymentStatus: + @patch("src.status.status._refresh_keycloak_token") + @patch("src.status.status.Client") + def test_executing_status_returned(self, mock_client_cls, mock_refresh): + mock_response = MagicMock() + mock_response.json.return_value = {"status": "executing", "token_remaining_time": 9999} + mock_client_cls.return_value.get.return_value = mock_response + + result = _get_internal_deployment_status("dep-name", "analysis_id") + + assert result == AnalysisStatus.EXECUTING.value + + @patch("src.status.status._refresh_keycloak_token") + @patch("src.status.status.Client") + def test_executed_status_returned(self, mock_client_cls, mock_refresh): + mock_response = MagicMock() + mock_response.json.return_value = {"status": "executed", "token_remaining_time": 9999} + mock_client_cls.return_value.get.return_value = mock_response + + result = _get_internal_deployment_status("dep-name", "analysis_id") + + assert result == AnalysisStatus.EXECUTED.value + + @patch("src.status.status.time") + @patch("src.status.status.Client") + def test_timeout_returns_failed(self, mock_client_cls, mock_time): + # start_time=0, then elapsed_time=11 > _INTERNAL_STATUS_TIMEOUT=10 + mock_time.time.side_effect = [0, 11] + mock_time.sleep = MagicMock() + mock_client_cls.return_value.get.side_effect = ConnectError("connection refused") + + result = _get_internal_deployment_status("dep-name", "analysis_id") + + assert result == AnalysisStatus.FAILED.value + + +# ─── TestRefreshKeycloakToken ───────────────────────────────────────────────── + +class TestRefreshKeycloakToken: + @patch("src.status.status.get_keycloak_token") + @patch("src.status.status.Client") + def test_no_refresh_when_token_valid(self, mock_client_cls, mock_get_token, monkeypatch): + monkeypatch.setenv("STATUS_LOOP_INTERVAL", "30") + # threshold = 30*2+1 = 61; 9999 >= 61 → no refresh + _refresh_keycloak_token("dep-name", "analysis_id", 9999) + mock_get_token.assert_not_called() + mock_client_cls.assert_not_called() + + @patch("src.status.status.get_keycloak_token", return_value="new-token") + @patch("src.status.status.Client") + def test_refresh_when_token_expiring(self, mock_client_cls, mock_get_token, monkeypatch): + monkeypatch.setenv("STATUS_LOOP_INTERVAL", "30") + # threshold = 30*2+1 = 61; 10 < 61 → refresh + mock_client_cls.return_value.post.return_value = MagicMock() + + _refresh_keycloak_token("dep-name", "analysis_id", 10) + + mock_get_token.assert_called_once_with("analysis_id") + mock_client_cls.return_value.post.assert_called_once() + + +# ─── TestInformAnalysisOfPartnerStatuses ───────────────────────────────────── + +class TestInformAnalysisOfPartnerStatuses: + @patch("src.status.status.get_partner_node_statuses") + @patch("src.status.status.Client") + def test_success_returns_response_json( + self, mock_client_cls, mock_get_partners, mock_database, mock_hub_client, sample_analysis_db + ): + mock_database.get_latest_deployment.return_value = sample_analysis_db(deployment_name="analysis-id-0") + mock_get_partners.return_value = {"node-1": "running"} + mock_response = MagicMock() + mock_response.json.return_value = {"ok": True} + mock_client_cls.return_value.post.return_value = mock_response + + result = inform_analysis_of_partner_statuses( + mock_database, mock_hub_client, "analysis_id", "node-analysis-id" + ) + + assert result == {"ok": True} + + @patch("src.status.status.get_partner_node_statuses") + @patch("src.status.status.Client") + def test_connect_error_returns_none( + self, mock_client_cls, mock_get_partners, mock_database, mock_hub_client, sample_analysis_db + ): + mock_database.get_latest_deployment.return_value = sample_analysis_db(deployment_name="analysis-id-0") + mock_get_partners.return_value = {} + mock_client_cls.return_value.post.side_effect = ConnectError("refused") + + result = inform_analysis_of_partner_statuses( + mock_database, mock_hub_client, "analysis_id", "node-analysis-id" + ) + + assert result is None + + @patch("src.status.status.get_partner_node_statuses") + @patch("src.status.status.Client") + def test_connect_timeout_returns_none( + self, mock_client_cls, mock_get_partners, mock_database, mock_hub_client, sample_analysis_db + ): + mock_database.get_latest_deployment.return_value = sample_analysis_db(deployment_name="analysis-id-0") + mock_get_partners.return_value = {} + mock_client_cls.return_value.post.side_effect = ConnectTimeout("timed out") + + result = inform_analysis_of_partner_statuses( + mock_database, mock_hub_client, "analysis_id", "node-analysis-id" + ) + + assert result is None + + +# ─── TestFixStuckStatus ─────────────────────────────────────────────────────── + +class TestFixStuckStatus: + @patch("src.status.status.unstuck_analysis_deployments") + @patch("src.status.status._stream_stuck_logs") + def test_restartable_calls_unstuck( + self, mock_stream, mock_unstuck, mock_database, mock_hub_client, sample_analysis_db + ): + analysis = sample_analysis_db(restart_counter=0) + mock_database.get_latest_deployment.return_value = analysis + analysis_status = { + "analysis_id": "analysis_id", + "db_status": AnalysisStatus.EXECUTING.value, + "int_status": AnalysisStatus.STUCK.value, + "status_action": "unstuck", + } + + _fix_stuck_status(mock_database, analysis_status, "node-id", False, mock_hub_client) + + mock_database.update_deployment_status.assert_not_called() + mock_unstuck.assert_called_once_with("analysis_id", mock_database) + mock_stream.assert_called_once() + + @patch("src.status.status.unstuck_analysis_deployments") + @patch("src.status.status._stream_stuck_logs") + def test_max_restarts_skips_unstuck( + self, mock_stream, mock_unstuck, mock_database, mock_hub_client, sample_analysis_db + ): + analysis = sample_analysis_db(restart_counter=_MAX_RESTARTS) + mock_database.get_latest_deployment.return_value = analysis + analysis_status = { + "analysis_id": "analysis_id", + "db_status": AnalysisStatus.EXECUTING.value, + "int_status": AnalysisStatus.STUCK.value, + "status_action": "unstuck", + } + + _fix_stuck_status(mock_database, analysis_status, "node-id", False, mock_hub_client) + + mock_unstuck.assert_not_called() + mock_stream.assert_called_once() + + +# ─── TestUpdateRunningStatus ────────────────────────────────────────────────── + +class TestUpdateRunningStatus: + def test_updates_deployment_to_executing(self, mock_database, sample_analysis_db): + analysis = sample_analysis_db(deployment_name="dep-name", status=AnalysisStatus.STARTED.value) + mock_database.get_latest_deployment.return_value = analysis + + _update_running_status( + mock_database, + {"analysis_id": "analysis_id", "db_status": AnalysisStatus.STARTED.value, "int_status": AnalysisStatus.EXECUTING.value}, + ) + + mock_database.update_deployment_status.assert_called_once_with("dep-name", AnalysisStatus.EXECUTING.value) + + def test_no_update_when_deployment_not_found(self, mock_database): + mock_database.get_latest_deployment.return_value = None + _update_running_status(mock_database, {"analysis_id": "analysis_id"}) + mock_database.update_deployment_status.assert_not_called() + + +# ─── TestUpdateFinishedStatus ───────────────────────────────────────────────── + +class TestUpdateFinishedStatus: + @patch("src.status.status.delete_analysis") + def test_executed_deletes_analysis(self, mock_delete, mock_database, sample_analysis_db): + analysis = sample_analysis_db(deployment_name="dep-name") + mock_database.get_latest_deployment.return_value = analysis + + _update_finished_status( + mock_database, + {"analysis_id": "analysis_id", "db_status": AnalysisStatus.EXECUTING.value, "int_status": AnalysisStatus.EXECUTED.value}, + ) + + mock_database.update_deployment_status.assert_called_once_with("dep-name", AnalysisStatus.EXECUTED.value) + mock_delete.assert_called_once_with("analysis_id", mock_database) + + @patch("src.status.status.stop_analysis") + def test_failed_stops_analysis(self, mock_stop, mock_database, sample_analysis_db): + analysis = sample_analysis_db(deployment_name="dep-name") + mock_database.get_latest_deployment.return_value = analysis + + _update_finished_status( + mock_database, + {"analysis_id": "analysis_id", "db_status": AnalysisStatus.EXECUTING.value, "int_status": AnalysisStatus.FAILED.value}, + ) + + mock_database.update_deployment_status.assert_called_once_with("dep-name", AnalysisStatus.FAILED.value) + mock_stop.assert_called_once_with("analysis_id", mock_database) + + +# ─── TestSetAnalysisHubStatus ───────────────────────────────────────────────── + +class TestSetAnalysisHubStatus: + """Priority: db_status (if failed/executed) > int_status (if failed/executed/executing) > db_status (default).""" + + @patch("src.status.status.update_hub_status") + def test_db_failed_takes_priority(self, mock_update, mock_hub_client): + result = _set_analysis_hub_status( + mock_hub_client, + "node-analysis-id", + {"db_status": AnalysisStatus.FAILED.value, "int_status": AnalysisStatus.EXECUTING.value}, + ) + assert result == AnalysisStatus.FAILED.value + mock_update.assert_called_once_with(mock_hub_client, "node-analysis-id", AnalysisStatus.FAILED.value) + + @patch("src.status.status.update_hub_status") + def test_db_executed_takes_priority(self, mock_update, mock_hub_client): + result = _set_analysis_hub_status( + mock_hub_client, + "node-analysis-id", + {"db_status": AnalysisStatus.EXECUTED.value, "int_status": AnalysisStatus.EXECUTING.value}, + ) + assert result == AnalysisStatus.EXECUTED.value + + @patch("src.status.status.update_hub_status") + def test_int_executing_used_when_db_not_terminal(self, mock_update, mock_hub_client): + result = _set_analysis_hub_status( + mock_hub_client, + "node-analysis-id", + {"db_status": AnalysisStatus.STARTED.value, "int_status": AnalysisStatus.EXECUTING.value}, + ) + assert result == AnalysisStatus.EXECUTING.value + + @patch("src.status.status.update_hub_status") + def test_int_failed_used_when_db_not_terminal(self, mock_update, mock_hub_client): + result = _set_analysis_hub_status( + mock_hub_client, + "node-analysis-id", + {"db_status": AnalysisStatus.STARTED.value, "int_status": AnalysisStatus.FAILED.value}, + ) + assert result == AnalysisStatus.FAILED.value + + @patch("src.status.status.update_hub_status") + def test_default_falls_back_to_db_status(self, mock_update, mock_hub_client): + result = _set_analysis_hub_status( + mock_hub_client, + "node-analysis-id", + {"db_status": AnalysisStatus.STARTED.value, "int_status": AnalysisStatus.STARTED.value}, + ) + assert result == AnalysisStatus.STARTED.value \ No newline at end of file diff --git a/tests/test_utils_hub_client.py b/tests/test_utils_hub_client.py new file mode 100644 index 0000000..63f01fb --- /dev/null +++ b/tests/test_utils_hub_client.py @@ -0,0 +1,370 @@ +import ssl +from json import JSONDecodeError +from unittest.mock import MagicMock, patch, call + +import pytest +from httpx import HTTPStatusError, ConnectError, ConnectTimeout + + +# ─── TestInitHubClientWithClient ───────────────────────────────────────────── + +class TestInitHubClientWithClient: + def test_success_returns_core_client(self): + mock_core_client = MagicMock() + mock_ssl_ctx = MagicMock() + + with ( + patch("src.utils.hub_client.get_ssl_context", return_value=mock_ssl_ctx), + patch("src.utils.hub_client.Client") as mock_httpx_client, + patch("src.utils.hub_client.flame_hub.auth.ClientAuth") as mock_auth, + patch("src.utils.hub_client.flame_hub.CoreClient", return_value=mock_core_client), + ): + from src.utils.hub_client import init_hub_client_with_client + result = init_hub_client_with_client( + client_id="cid", + client_secret="csec", + hub_url_core="http://core:3000", + hub_auth="http://auth:3001", + http_proxy="", + https_proxy="", + ) + + assert result is mock_core_client + + def test_exception_returns_none(self): + with ( + patch("src.utils.hub_client.get_ssl_context", return_value=MagicMock()), + patch("src.utils.hub_client.Client", side_effect=Exception("conn failed")), + ): + from src.utils.hub_client import init_hub_client_with_client + result = init_hub_client_with_client( + client_id="cid", + client_secret="csec", + hub_url_core="http://core:3000", + hub_auth="http://auth:3001", + http_proxy="", + https_proxy="", + ) + + assert result is None + + def test_with_proxies_creates_http_transports(self): + mock_ssl_ctx = MagicMock() + mock_transport = MagicMock() + + with ( + patch("src.utils.hub_client.get_ssl_context", return_value=mock_ssl_ctx), + patch("src.utils.hub_client.HTTPTransport", return_value=mock_transport) as mock_transport_cls, + patch("src.utils.hub_client.Client") as mock_httpx_client, + patch("src.utils.hub_client.flame_hub.auth.ClientAuth"), + patch("src.utils.hub_client.flame_hub.CoreClient"), + ): + from src.utils.hub_client import init_hub_client_with_client + init_hub_client_with_client( + client_id="cid", + client_secret="csec", + hub_url_core="http://core:3000", + hub_auth="http://auth:3001", + http_proxy="http://proxy:8080", + https_proxy="https://proxy:8443", + ) + + assert mock_transport_cls.call_count == 2 + # HTTP transport gets just proxy + mock_transport_cls.assert_any_call(proxy="http://proxy:8080") + # HTTPS transport gets proxy + verify + mock_transport_cls.assert_any_call(proxy="https://proxy:8443", verify=mock_ssl_ctx) + + def test_without_proxies_passes_none_mounts(self): + """When proxy strings are empty, mounts=None is passed to Client.""" + mock_ssl_ctx = MagicMock() + + with ( + patch("src.utils.hub_client.get_ssl_context", return_value=mock_ssl_ctx), + patch("src.utils.hub_client.Client") as mock_httpx_client, + patch("src.utils.hub_client.flame_hub.auth.ClientAuth"), + patch("src.utils.hub_client.flame_hub.CoreClient"), + ): + from src.utils.hub_client import init_hub_client_with_client + init_hub_client_with_client( + client_id="cid", + client_secret="csec", + hub_url_core="http://core:3000", + hub_auth="http://auth:3001", + http_proxy="", + https_proxy="", + ) + + first_call_kwargs = mock_httpx_client.call_args_list[0][1] + assert first_call_kwargs["mounts"] is None + + +# ─── TestGetSslContext ──────────────────────────────────────────────────────── + +class TestGetSslContext: + def setup_method(self): + from src.utils.hub_client import get_ssl_context + get_ssl_context.cache_clear() + + def teardown_method(self): + from src.utils.hub_client import get_ssl_context + get_ssl_context.cache_clear() + + def test_without_extra_certs_does_not_load_verify_locations(self, monkeypatch): + monkeypatch.delenv("EXTRA_CA_CERTS", raising=False) + mock_ctx = MagicMock(spec=ssl.SSLContext) + + with patch("src.utils.hub_client.truststore.SSLContext", return_value=mock_ctx): + from src.utils.hub_client import get_ssl_context + result = get_ssl_context() + + assert result is mock_ctx + mock_ctx.load_verify_locations.assert_not_called() + + def test_with_existing_cert_path_loads_verify_locations(self, monkeypatch, tmp_path): + cert_file = tmp_path / "ca.crt" + cert_file.write_text("FAKE CERT") + monkeypatch.setenv("EXTRA_CA_CERTS", str(cert_file)) + mock_ctx = MagicMock(spec=ssl.SSLContext) + + with patch("src.utils.hub_client.truststore.SSLContext", return_value=mock_ctx): + from src.utils.hub_client import get_ssl_context + result = get_ssl_context() + + assert result is mock_ctx + mock_ctx.load_verify_locations.assert_called_once_with(cafile=str(cert_file)) + + def test_with_nonexistent_cert_path_does_not_load(self, monkeypatch): + monkeypatch.setenv("EXTRA_CA_CERTS", "/nonexistent/path/ca.crt") + mock_ctx = MagicMock(spec=ssl.SSLContext) + + with patch("src.utils.hub_client.truststore.SSLContext", return_value=mock_ctx): + from src.utils.hub_client import get_ssl_context + result = get_ssl_context() + + assert result is mock_ctx + mock_ctx.load_verify_locations.assert_not_called() + + +# ─── TestGetNodeIdByClient ──────────────────────────────────────────────────── + +class TestGetNodeIdByClient: + def test_success_returns_string_id(self, mock_hub_client): + mock_node = MagicMock() + mock_node.id = "node-uuid-123" + mock_hub_client.find_nodes.return_value = [mock_node] + + from src.utils.hub_client import get_node_id_by_client + result = get_node_id_by_client(mock_hub_client, "my-client-id") + + assert result == "node-uuid-123" + mock_hub_client.find_nodes.assert_called_once_with(filter={"client_id": "my-client-id"}) + + def test_http_status_error_returns_none(self, mock_hub_client): + mock_hub_client.find_nodes.side_effect = HTTPStatusError( + "404", request=MagicMock(), response=MagicMock() + ) + + from src.utils.hub_client import get_node_id_by_client + result = get_node_id_by_client(mock_hub_client, "cid") + + assert result is None + + def test_json_decode_error_returns_none(self, mock_hub_client): + mock_hub_client.find_nodes.side_effect = JSONDecodeError("err", "", 0) + + from src.utils.hub_client import get_node_id_by_client + result = get_node_id_by_client(mock_hub_client, "cid") + + assert result is None + + def test_connect_timeout_returns_none(self, mock_hub_client): + mock_hub_client.find_nodes.side_effect = ConnectTimeout("timeout") + + from src.utils.hub_client import get_node_id_by_client + result = get_node_id_by_client(mock_hub_client, "cid") + + assert result is None + + def test_attribute_error_returns_none(self, mock_hub_client): + mock_hub_client.find_nodes.side_effect = AttributeError("no attr") + + from src.utils.hub_client import get_node_id_by_client + result = get_node_id_by_client(mock_hub_client, "cid") + + assert result is None + + def test_hub_api_error_returns_none(self, mock_hub_client): + import flame_hub + mock_hub_client.find_nodes.side_effect = flame_hub._exceptions.HubAPIError( + "hub error", request=MagicMock() + ) + + from src.utils.hub_client import get_node_id_by_client + result = get_node_id_by_client(mock_hub_client, "cid") + + assert result is None + + +# ─── TestGetNodeAnalysisId ──────────────────────────────────────────────────── + +class TestGetNodeAnalysisId: + def test_success_returns_string_id(self, mock_hub_client): + mock_node_analysis = MagicMock() + mock_node_analysis.id = "na-uuid-456" + mock_hub_client.find_analysis_nodes.return_value = [mock_node_analysis] + + from src.utils.hub_client import get_node_analysis_id + result = get_node_analysis_id(mock_hub_client, "analysis-1", "node-obj-id") + + assert result == "na-uuid-456" + mock_hub_client.find_analysis_nodes.assert_called_once_with( + filter={"analysis_id": "analysis-1", "node_id": "node-obj-id"} + ) + + def test_empty_list_returns_none(self, mock_hub_client): + mock_hub_client.find_analysis_nodes.return_value = [] + + from src.utils.hub_client import get_node_analysis_id + result = get_node_analysis_id(mock_hub_client, "analysis-1", "node-obj-id") + + assert result is None + + def test_http_status_error_returns_none(self, mock_hub_client): + mock_hub_client.find_analysis_nodes.side_effect = HTTPStatusError( + "500", request=MagicMock(), response=MagicMock() + ) + + from src.utils.hub_client import get_node_analysis_id + result = get_node_analysis_id(mock_hub_client, "analysis-1", "node-obj-id") + + assert result is None + + def test_hub_api_error_returns_none(self, mock_hub_client): + import flame_hub + mock_hub_client.find_analysis_nodes.side_effect = flame_hub._exceptions.HubAPIError( + "err", request=MagicMock() + ) + + from src.utils.hub_client import get_node_analysis_id + result = get_node_analysis_id(mock_hub_client, "analysis-1", "node-obj-id") + + assert result is None + + def test_attribute_error_returns_none(self, mock_hub_client): + mock_hub_client.find_analysis_nodes.side_effect = AttributeError("no attr") + + from src.utils.hub_client import get_node_analysis_id + result = get_node_analysis_id(mock_hub_client, "analysis-1", "node-obj-id") + + assert result is None + + +# ─── TestUpdateHubStatus ────────────────────────────────────────────────────── + +class TestUpdateHubStatus: + def test_success_without_progress(self, mock_hub_client): + from src.utils.hub_client import update_hub_status + update_hub_status(mock_hub_client, "na-id", "started") + + mock_hub_client.update_analysis_node.assert_called_once_with( + "na-id", execution_status="started" + ) + + def test_success_with_progress(self, mock_hub_client): + from src.utils.hub_client import update_hub_status + update_hub_status(mock_hub_client, "na-id", "executing", run_progress=42) + + mock_hub_client.update_analysis_node.assert_called_once_with( + "na-id", execution_status="executing", execution_progress=42 + ) + + def test_stuck_status_mapped_to_failed(self, mock_hub_client): + from src.utils.hub_client import update_hub_status + from src.status.constants import AnalysisStatus + update_hub_status(mock_hub_client, "na-id", AnalysisStatus.STUCK.value) + + mock_hub_client.update_analysis_node.assert_called_once_with( + "na-id", execution_status=AnalysisStatus.FAILED.value + ) + + def test_http_status_error_does_not_raise(self, mock_hub_client): + mock_hub_client.update_analysis_node.side_effect = HTTPStatusError( + "500", request=MagicMock(), response=MagicMock() + ) + + from src.utils.hub_client import update_hub_status + # Should not raise + update_hub_status(mock_hub_client, "na-id", "started") + + def test_connect_error_does_not_raise(self, mock_hub_client): + mock_hub_client.update_analysis_node.side_effect = ConnectError("conn refused") + + from src.utils.hub_client import update_hub_status + update_hub_status(mock_hub_client, "na-id", "started") + + def test_attribute_error_does_not_raise(self, mock_hub_client): + mock_hub_client.update_analysis_node.side_effect = AttributeError("no attr") + + from src.utils.hub_client import update_hub_status + update_hub_status(mock_hub_client, "na-id", "started") + + def test_hub_api_error_does_not_raise(self, mock_hub_client): + import flame_hub + mock_hub_client.update_analysis_node.side_effect = flame_hub._exceptions.HubAPIError( + "err", request=MagicMock() + ) + + from src.utils.hub_client import update_hub_status + update_hub_status(mock_hub_client, "na-id", "started") + + +# ─── TestGetPartnerNodeStatuses ─────────────────────────────────────────────── + +class TestGetPartnerNodeStatuses: + def test_self_filtered_out(self, mock_hub_client): + node_a = MagicMock() + node_a.id = "self-id" + node_a.execution_status = "started" + node_b = MagicMock() + node_b.id = "partner-id" + node_b.execution_status = "executing" + mock_hub_client.find_analysis_nodes.return_value = [node_a, node_b] + + from src.utils.hub_client import get_partner_node_statuses + result = get_partner_node_statuses(mock_hub_client, "analysis-1", "self-id") + + assert result == {"partner-id": "executing"} + assert "self-id" not in result + + def test_all_partners_returned_when_no_self(self, mock_hub_client): + node_a = MagicMock() + node_a.id = "partner-a" + node_a.execution_status = "started" + node_b = MagicMock() + node_b.id = "partner-b" + node_b.execution_status = "finished" + mock_hub_client.find_analysis_nodes.return_value = [node_a, node_b] + + from src.utils.hub_client import get_partner_node_statuses + result = get_partner_node_statuses(mock_hub_client, "analysis-1", "self-id") + + assert result == {"partner-a": "started", "partner-b": "finished"} + + def test_returns_none_when_hub_call_fails(self, mock_hub_client): + mock_hub_client.find_analysis_nodes.side_effect = HTTPStatusError( + "500", request=MagicMock(), response=MagicMock() + ) + + from src.utils.hub_client import get_partner_node_statuses + result = get_partner_node_statuses(mock_hub_client, "analysis-1", "self-id") + + assert result is None + + def test_empty_analysis_nodes_returns_empty_dict(self, mock_hub_client): + mock_hub_client.find_analysis_nodes.return_value = [] + + from src.utils.hub_client import get_partner_node_statuses + result = get_partner_node_statuses(mock_hub_client, "analysis-1", "self-id") + + assert result == {} \ No newline at end of file diff --git a/tests/test_utils_other.py b/tests/test_utils_other.py new file mode 100644 index 0000000..5ab5389 --- /dev/null +++ b/tests/test_utils_other.py @@ -0,0 +1,110 @@ +import pytest +from unittest.mock import patch, MagicMock, AsyncMock + + +class TestExtractHubEnvs: + def test_all_set(self, monkeypatch): + monkeypatch.setenv("HUB_CLIENT_ID", "client-id") + monkeypatch.setenv("HUB_CLIENT_SECRET", "secret") + monkeypatch.setenv("HUB_URL_CORE", "http://hub-core") + monkeypatch.setenv("HUB_URL_AUTH", "http://hub-auth") + monkeypatch.setenv("HUB_LOGGING", "true") + monkeypatch.setenv("PO_HTTP_PROXY", "http://proxy") + monkeypatch.setenv("PO_HTTPS_PROXY", "https://proxy") + + from src.utils.other import extract_hub_envs + result = extract_hub_envs() + + assert result == ( + "client-id", + "secret", + "http://hub-core", + "http://hub-auth", + True, + "http://proxy", + "https://proxy", + ) + + def test_missing_optional(self, monkeypatch): + monkeypatch.setenv("HUB_CLIENT_ID", "client-id") + monkeypatch.setenv("HUB_CLIENT_SECRET", "secret") + monkeypatch.setenv("HUB_URL_CORE", "http://hub-core") + monkeypatch.setenv("HUB_URL_AUTH", "http://hub-auth") + monkeypatch.delenv("HUB_LOGGING", raising=False) + monkeypatch.delenv("PO_HTTP_PROXY", raising=False) + monkeypatch.delenv("PO_HTTPS_PROXY", raising=False) + + from src.utils.other import extract_hub_envs + result = extract_hub_envs() + + assert result[4] is False # HUB_LOGGING defaults to False + assert result[5] is None # PO_HTTP_PROXY + assert result[6] is None # PO_HTTPS_PROXY + + def test_missing_required(self, monkeypatch): + monkeypatch.delenv("HUB_CLIENT_ID", raising=False) + monkeypatch.delenv("HUB_CLIENT_SECRET", raising=False) + monkeypatch.delenv("HUB_URL_CORE", raising=False) + monkeypatch.delenv("HUB_URL_AUTH", raising=False) + monkeypatch.delenv("HUB_LOGGING", raising=False) + monkeypatch.delenv("PO_HTTP_PROXY", raising=False) + monkeypatch.delenv("PO_HTTPS_PROXY", raising=False) + + from src.utils.other import extract_hub_envs + result = extract_hub_envs() + + assert result[0] is None + assert result[1] is None + assert result[2] is None + assert result[3] is None + + @pytest.mark.parametrize("value,expected", [ + ("True", True), + ("true", True), + ("1", True), + ("t", True), + ("False", False), + ("false", False), + ("0", False), + ("yes", False), + ("", False), + ]) + def test_hub_logging_variants(self, monkeypatch, value, expected): + monkeypatch.setenv("HUB_LOGGING", value) + + from src.utils.other import extract_hub_envs + result = extract_hub_envs() + + assert result[4] is expected + + +class TestResourceNameToAnalysis: + def test_single_split(self): + from src.utils.other import resource_name_to_analysis + # deployment_name = "analysis-{analysis_id}-{restart_counter}" + result = resource_name_to_analysis("analysis-abc123-0") + assert result == "abc123" + + def test_double_split(self): + from src.utils.other import resource_name_to_analysis + # analysis_id itself contains a hyphen + result = resource_name_to_analysis("analysis-abc-123-0", max_r_split=1) + assert result == "abc-123" + + def test_nginx_prefix(self): + from src.utils.other import resource_name_to_analysis + # nginx sidecar container name includes extra prefix before "analysis-" + result = resource_name_to_analysis("nginx-analysis-abc123-0") + assert result == "abc123" + + def test_max_r_split_two(self): + from src.utils.other import resource_name_to_analysis + # with max_r_split=2 strips two trailing segments + result = resource_name_to_analysis("analysis-abc123-pod-0", max_r_split=2) + assert result == "abc123" + + def test_no_restart_counter(self): + from src.utils.other import resource_name_to_analysis + # edge case: no trailing hyphen — rsplit finds nothing to strip, returns full segment + result = resource_name_to_analysis("analysis-abc123") + assert result == "abc123" diff --git a/tests/test_utils_po_logging.py b/tests/test_utils_po_logging.py new file mode 100644 index 0000000..113f367 --- /dev/null +++ b/tests/test_utils_po_logging.py @@ -0,0 +1,225 @@ +import json +import logging + +import pytest + + +@pytest.fixture(autouse=True) +def reset_root_logger(): + """Snapshot and restore the root logger around each test. + + po_logging.get_logger() mutates the root logger's handlers and level, + and is idempotent based on whether a JsonFormatter is already attached. + Each test needs a clean slate. + """ + root = logging.getLogger() + original_handlers = root.handlers[:] + original_level = root.level + + root.handlers = [] + + yield + + root.handlers = original_handlers + root.level = original_level + + +class TestJsonFormatter: + def _make_record(self, **overrides): + defaults = { + "name": "test.logger", + "level": logging.INFO, + "pathname": "/some/path/mod.py", + "lineno": 10, + "msg": "hello", + "args": (), + "exc_info": None, + } + defaults.update(overrides) + return logging.LogRecord(**defaults) + + def test_emits_required_fields(self): + from src.utils.po_logging import JsonFormatter + + record = self._make_record() + output = JsonFormatter().format(record) + parsed = json.loads(output) + + assert "timestamp" in parsed + assert parsed["level"] == "INFO" + assert parsed["msg"] == "hello" + + def test_emits_extra_fields(self): + from src.utils.po_logging import JsonFormatter + + record = self._make_record(name="my.logger", pathname="/x/foo.py") + parsed = json.loads(JsonFormatter().format(record)) + + assert parsed["logger"] == "my.logger" + assert parsed["module"] == "foo" + + def test_output_is_single_line(self): + from src.utils.po_logging import JsonFormatter + + record = self._make_record(msg="line1\nline2") + output = JsonFormatter().format(record) + + # json.dumps escapes \n → no raw newlines in output + assert "\n" not in output + assert json.loads(output)["msg"] == "line1\nline2" + + def test_timestamp_format(self): + from src.utils.po_logging import JsonFormatter + + record = self._make_record() + parsed = json.loads(JsonFormatter().format(record)) + + # ISO-ish: YYYY-MM-DDTHH:MM:SS + import re + assert re.match(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$", parsed["timestamp"]) + + def test_message_with_args_is_formatted(self): + from src.utils.po_logging import JsonFormatter + + record = self._make_record(msg="user %s did %d things", args=("alice", 3)) + parsed = json.loads(JsonFormatter().format(record)) + + assert parsed["msg"] == "user alice did 3 things" + + def test_exc_info_adds_error_field(self): + from src.utils.po_logging import JsonFormatter + + try: + raise ValueError("boom") + except ValueError: + import sys + exc_info = sys.exc_info() + + record = self._make_record(level=logging.ERROR, msg="failed", exc_info=exc_info) + parsed = json.loads(JsonFormatter().format(record)) + + assert "error" in parsed + assert "ValueError" in parsed["error"] + assert "boom" in parsed["error"] + + def test_no_error_field_when_no_exc_info(self): + from src.utils.po_logging import JsonFormatter + + record = self._make_record() + parsed = json.loads(JsonFormatter().format(record)) + + assert "error" not in parsed + + def test_non_serializable_msg_falls_back_to_str(self): + from src.utils.po_logging import JsonFormatter + + class Weird: + def __str__(self): + return "weird-repr" + + record = self._make_record(msg=Weird()) + output = JsonFormatter().format(record) + + # getMessage() calls str() on non-string msg → JSON encodes as string + parsed = json.loads(output) + assert parsed["msg"] == "weird-repr" + + def test_custom_level_name_in_output(self): + from src.utils.po_logging import JsonFormatter, get_logger + + get_logger() # registers ACTION / STATUS_LOOP levels + + record = self._make_record(level=21) # ACTION + parsed = json.loads(JsonFormatter().format(record)) + assert parsed["level"] == "ACTION" + + +class TestGetLogger: + def test_returns_logger(self): + from src.utils.po_logging import get_logger + + logger = get_logger() + assert isinstance(logger, logging.Logger) + + def test_attaches_json_formatter_to_root(self): + from src.utils.po_logging import JsonFormatter, get_logger + + get_logger() + + root = logging.getLogger() + json_handlers = [h for h in root.handlers if isinstance(h.formatter, JsonFormatter)] + assert len(json_handlers) == 1 + + def test_sets_root_level_to_info(self): + from src.utils.po_logging import get_logger + + get_logger() + assert logging.getLogger().level == logging.INFO + + def test_is_idempotent(self): + from src.utils.po_logging import JsonFormatter, get_logger + + get_logger() + get_logger() + get_logger() + + root = logging.getLogger() + json_handlers = [h for h in root.handlers if isinstance(h.formatter, JsonFormatter)] + # multiple calls must NOT stack duplicate handlers + assert len(json_handlers) == 1 + + def test_registers_custom_levels(self): + from src.utils.po_logging import get_logger + + get_logger() + + assert logging.getLevelName(21) == "ACTION" + assert logging.getLevelName(22) == "STATUS_LOOP" + assert hasattr(logging.getLoggerClass(), "action") + assert hasattr(logging.getLoggerClass(), "status_loop") + + def test_end_to_end_emits_json_to_stream(self, capsys): + from src.utils.po_logging import get_logger + + logger = get_logger() + logger.info("end-to-end message") + + captured = capsys.readouterr() + # StreamHandler(sys.stdout) — message lands on stdout + line = captured.out.strip().splitlines()[-1] + parsed = json.loads(line) + + assert parsed["level"] == "INFO" + assert parsed["msg"] == "end-to-end message" + assert "timestamp" in parsed + + def test_end_to_end_custom_level(self, capsys): + from src.utils.po_logging import get_logger + + logger = get_logger() + logger.action("custom-level message") # type: ignore[attr-defined] + + captured = capsys.readouterr() + line = captured.out.strip().splitlines()[-1] + parsed = json.loads(line) + + assert parsed["level"] == "ACTION" + assert parsed["msg"] == "custom-level message" + + def test_end_to_end_exception_populates_error_field(self, capsys): + from src.utils.po_logging import get_logger + + logger = get_logger() + try: + raise RuntimeError("kaboom") + except RuntimeError: + logger.exception("caught it") + + captured = capsys.readouterr() + line = captured.out.strip().splitlines()[-1] + parsed = json.loads(line) + + assert parsed["msg"] == "caught it" + assert "error" in parsed + assert "RuntimeError" in parsed["error"] + assert "kaboom" in parsed["error"] diff --git a/tests/test_utils_token.py b/tests/test_utils_token.py new file mode 100644 index 0000000..3276a85 --- /dev/null +++ b/tests/test_utils_token.py @@ -0,0 +1,303 @@ +import pytest +from unittest.mock import patch, MagicMock +import requests + + +class TestCreateAnalysisTokens: + def test_returns_both_keys(self): + with patch("src.utils.token.get_keycloak_token", return_value="kc-token"): + from src.utils.token import create_analysis_tokens + result = create_analysis_tokens("kong-tok", "analysis-1") + assert result == {"DATA_SOURCE_TOKEN": "kong-tok", "KEYCLOAK_TOKEN": "kc-token"} + + def test_data_source_token_is_kong_token(self): + with patch("src.utils.token.get_keycloak_token", return_value="kc"): + from src.utils.token import create_analysis_tokens + result = create_analysis_tokens("my-kong-token", "aid") + assert result["DATA_SOURCE_TOKEN"] == "my-kong-token" + + def test_keycloak_token_from_getter(self): + with patch("src.utils.token.get_keycloak_token", return_value="kc-abc") as mock_get: + from src.utils.token import create_analysis_tokens + result = create_analysis_tokens("tok", "aid") + mock_get.assert_called_once_with("aid") + assert result["KEYCLOAK_TOKEN"] == "kc-abc" + + +class TestGetKeycloakToken: + def test_success_returns_access_token(self): + mock_response = MagicMock() + mock_response.json.return_value = {"access_token": "bearer-xyz"} + + with ( + patch("src.utils.token._get_keycloak_client_secret", return_value="secret-123"), + patch("src.utils.token.requests.post", return_value=mock_response), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + ): + from src.utils.token import get_keycloak_token + result = get_keycloak_token("analysis-1") + + assert result == "bearer-xyz" + + def test_request_exception_returns_none(self): + with ( + patch("src.utils.token._get_keycloak_client_secret", return_value="sec"), + patch( + "src.utils.token.requests.post", + side_effect=requests.exceptions.RequestException("conn refused"), + ), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + ): + from src.utils.token import get_keycloak_token + result = get_keycloak_token("analysis-1") + + assert result is None + + def test_http_error_returns_none(self): + mock_response = MagicMock() + mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError("404") + + with ( + patch("src.utils.token._get_keycloak_client_secret", return_value="sec"), + patch("src.utils.token.requests.post", return_value=mock_response), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + ): + from src.utils.token import get_keycloak_token + result = get_keycloak_token("analysis-1") + + assert result is None + + def test_url_hardcodes_flame_realm(self): + # Note: get_keycloak_token uses _KEYCLOAK_URL but hardcodes "flame" in the + # path rather than reading _KEYCLOAK_REALM. Verify expected URL is used. + mock_response = MagicMock() + mock_response.json.return_value = {"access_token": "tok"} + + with ( + patch("src.utils.token._get_keycloak_client_secret", return_value="sec"), + patch("src.utils.token.requests.post", return_value=mock_response) as mock_post, + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + ): + from src.utils.token import get_keycloak_token + get_keycloak_token("analysis-1") + + call_url = mock_post.call_args[0][0] + assert call_url == "http://kc:8080/realms/flame/protocol/openid-connect/token" + + +class TestGetKeycloakAdminToken: + def test_success_returns_access_token(self): + mock_response = MagicMock() + mock_response.json.return_value = {"access_token": "admin-bearer"} + + with ( + patch("src.utils.token.requests.post", return_value=mock_response), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import _get_keycloak_admin_token + result = _get_keycloak_admin_token() + + assert result == "admin-bearer" + + def test_uses_result_client_credentials(self, monkeypatch): + monkeypatch.setenv("RESULT_CLIENT_ID", "my-result-client") + monkeypatch.setenv("RESULT_CLIENT_SECRET", "my-result-secret") + + mock_response = MagicMock() + mock_response.json.return_value = {"access_token": "tok"} + + with ( + patch("src.utils.token.requests.post", return_value=mock_response) as mock_post, + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import _get_keycloak_admin_token + _get_keycloak_admin_token() + + posted_data = mock_post.call_args[1]["data"] + assert posted_data["client_id"] == "my-result-client" + assert posted_data["client_secret"] == "my-result-secret" + assert posted_data["grant_type"] == "client_credentials" + + +class TestKeycloakClientExists: + def test_exists_returns_true(self): + mock_response = MagicMock() + mock_response.json.return_value = [{"clientId": "analysis-1", "id": "uuid-1"}] + + with ( + patch("src.utils.token.requests.get", return_value=mock_response), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import _keycloak_client_exists + result = _keycloak_client_exists("analysis-1", "admin-token") + + assert result is True + + def test_not_exists_returns_false(self): + mock_response = MagicMock() + mock_response.json.return_value = [] + + with ( + patch("src.utils.token.requests.get", return_value=mock_response), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import _keycloak_client_exists + result = _keycloak_client_exists("analysis-1", "admin-token") + + assert result is False + + def test_uses_correct_url_and_auth_header(self): + mock_response = MagicMock() + mock_response.json.return_value = [] + + with ( + patch("src.utils.token.requests.get", return_value=mock_response) as mock_get, + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "testrealm"), + ): + from src.utils.token import _keycloak_client_exists + _keycloak_client_exists("my-analysis", "my-admin-token") + + mock_get.assert_called_once_with( + "http://kc:8080/admin/realms/testrealm/clients?clientId=my-analysis", + headers={"Authorization": "Bearer my-admin-token"}, + ) + + +class TestCreateKeycloakClient: + def test_posts_correct_payload(self): + mock_response = MagicMock() + + with ( + patch("src.utils.token.requests.post", return_value=mock_response) as mock_post, + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import _create_keycloak_client + _create_keycloak_client("admin-tok", "analysis-abc") + + mock_post.assert_called_once_with( + "http://kc:8080/admin/realms/flame/clients", + headers={ + "Authorization": "Bearer admin-tok", + "Content-Type": "application/json", + }, + json={ + "clientId": "analysis-abc", + "name": "flame-analysis-abc", + "serviceAccountsEnabled": "true", + }, + ) + + def test_raises_on_http_error(self): + mock_response = MagicMock() + mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError("409") + + with ( + patch("src.utils.token.requests.post", return_value=mock_response), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import _create_keycloak_client + with pytest.raises(requests.exceptions.HTTPError): + _create_keycloak_client("admin-tok", "analysis-abc") + + +class TestGetKeycloakClientSecret: + def test_client_exists_skips_creation(self): + mock_get_response = MagicMock() + mock_get_response.json.return_value = [{"secret": "my-secret"}] + + with ( + patch("src.utils.token._get_keycloak_admin_token", return_value="admin-tok"), + patch("src.utils.token._keycloak_client_exists", return_value=True), + patch("src.utils.token._create_keycloak_client") as mock_create, + patch("src.utils.token.requests.get", return_value=mock_get_response), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import _get_keycloak_client_secret + result = _get_keycloak_client_secret("analysis-1") + + mock_create.assert_not_called() + assert result == "my-secret" + + def test_client_not_exists_creates_then_gets_secret(self): + mock_get_response = MagicMock() + mock_get_response.json.return_value = [{"secret": "new-secret"}] + + with ( + patch("src.utils.token._get_keycloak_admin_token", return_value="admin-tok"), + patch("src.utils.token._keycloak_client_exists", return_value=False), + patch("src.utils.token._create_keycloak_client") as mock_create, + patch("src.utils.token.requests.get", return_value=mock_get_response), + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import _get_keycloak_client_secret + result = _get_keycloak_client_secret("analysis-1") + + mock_create.assert_called_once_with("admin-tok", "analysis-1") + assert result == "new-secret" + + +class TestDeleteKeycloakClient: + def test_success_deletes_by_uuid(self): + mock_get_response = MagicMock() + mock_get_response.json.return_value = [{"id": "uuid-abc", "clientId": "analysis-1"}] + mock_delete_response = MagicMock() + + with ( + patch("src.utils.token._get_keycloak_admin_token", return_value="admin-tok"), + patch("src.utils.token.requests.get", return_value=mock_get_response), + patch( + "src.utils.token.requests.delete", return_value=mock_delete_response + ) as mock_delete, + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import delete_keycloak_client + delete_keycloak_client("analysis-1") + + mock_delete.assert_called_once_with( + "http://kc:8080/admin/realms/flame/clients/uuid-abc", + headers={"Authorization": "Bearer admin-tok"}, + ) + + def test_client_not_found_skips_delete(self): + mock_get_response = MagicMock() + mock_get_response.json.return_value = [] + + with ( + patch("src.utils.token._get_keycloak_admin_token", return_value="admin-tok"), + patch("src.utils.token.requests.get", return_value=mock_get_response), + patch("src.utils.token.requests.delete") as mock_delete, + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import delete_keycloak_client + result = delete_keycloak_client("analysis-1") + + mock_delete.assert_not_called() + assert result is None + + def test_missing_id_key_returns_gracefully(self): + # Response has entry but no 'id' key — KeyError caught, returns None + mock_get_response = MagicMock() + mock_get_response.json.return_value = [{"clientId": "analysis-1"}] + + with ( + patch("src.utils.token._get_keycloak_admin_token", return_value="admin-tok"), + patch("src.utils.token.requests.get", return_value=mock_get_response), + patch("src.utils.token.requests.delete") as mock_delete, + patch("src.utils.token._KEYCLOAK_URL", "http://kc:8080"), + patch("src.utils.token._KEYCLOAK_REALM", "flame"), + ): + from src.utils.token import delete_keycloak_client + delete_keycloak_client("analysis-1") + + mock_delete.assert_not_called() \ No newline at end of file From 5c89ae31c66d68ee3df5be00b91d0d40a5c8b22a Mon Sep 17 00:00:00 2001 From: Nightknight3000 Date: Wed, 22 Apr 2026 14:00:47 +0200 Subject: [PATCH 2/4] fix: reattempt unstuck on failure Co-authored-by: antidodo --- poetry.lock | 176 +++++++++++++++++++----------------- pyproject.toml | 2 +- src/resources/log/entity.py | 2 +- src/resources/utils.py | 24 ++++- 4 files changed, 114 insertions(+), 90 deletions(-) diff --git a/poetry.lock b/poetry.lock index bd4743c..b5eb6d2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,12 +1,12 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.4 and should not be changed by hand. [[package]] name = "annotated-doc" version = "0.0.4" description = "Document parameters, class attributes, return types, and variables inline, with Annotated." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"}, {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"}, @@ -16,9 +16,9 @@ files = [ name = "annotated-types" version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -28,9 +28,9 @@ files = [ name = "anyio" version = "4.13.0" description = "High-level concurrency and networking framework on top of asyncio or Trio" -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708"}, {file = "anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc"}, @@ -46,23 +46,24 @@ trio = ["trio (>=0.32.0)"] [[package]] name = "certifi" -version = "2026.2.25" +version = "2026.4.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa"}, - {file = "certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7"}, + {file = "certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a"}, + {file = "certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580"}, ] [[package]] name = "cffi" version = "2.0.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, @@ -157,9 +158,9 @@ pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} name = "cfgv" version = "3.5.0" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0"}, {file = "cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132"}, @@ -169,9 +170,9 @@ files = [ name = "charset-normalizer" version = "3.4.7" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cdd68a1fb318e290a2077696b7eb7a21a49163c455979c639bf5a5dcdc46617d"}, {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e17b8d5d6a8c47c85e68ca8379def1303fd360c3e22093a807cd34a71cd082b8"}, @@ -308,9 +309,9 @@ files = [ name = "click" version = "8.3.2" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "click-8.3.2-py3-none-any.whl", hash = "sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d"}, {file = "click-8.3.2.tar.gz", hash = "sha256:14162b8b3b3550a7d479eafa77dfd3c38d9dc8951f6f69c78913a8f9a7540fd5"}, @@ -323,21 +324,22 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} [[package]] name = "coverage" version = "7.13.5" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "coverage-7.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5"}, {file = "coverage-7.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf"}, @@ -448,15 +450,15 @@ files = [ ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" version = "46.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["main"] files = [ {file = "cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4"}, {file = "cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325"}, @@ -510,8 +512,8 @@ files = [ ] [package.dependencies] -cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9\" and platform_python_implementation != \"PyPy\""} -typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11\""} +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} +typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11.0\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] @@ -527,9 +529,9 @@ test-randomorder = ["pytest-randomly"] name = "distlib" version = "0.4.0" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"}, {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}, @@ -539,9 +541,9 @@ files = [ name = "dnspython" version = "2.8.0" description = "DNS toolkit" -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"}, {file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"}, @@ -554,15 +556,15 @@ doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"] doq = ["aioquic (>=1.2.0)"] idna = ["idna (>=3.10)"] trio = ["trio (>=0.30)"] -wmi = ["wmi (>=1.5.1)"] +wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] [[package]] name = "docker" version = "7.1.0" description = "A Python library for the Docker Engine API." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -583,9 +585,9 @@ websockets = ["websocket-client (>=1.3.0)"] name = "durationpy" version = "0.10" description = "Module for converting between datetime.timedelta and Go's Duration strings." -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286"}, {file = "durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba"}, @@ -595,9 +597,9 @@ files = [ name = "email-validator" version = "2.3.0" description = "A robust email address syntax and deliverability validation library." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4"}, {file = "email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426"}, @@ -611,9 +613,10 @@ idna = ">=2.0.0" name = "exceptiongroup" version = "1.3.1" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version == \"3.10\"" files = [ {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, @@ -629,9 +632,9 @@ test = ["pytest (>=6)"] name = "fastapi" version = "0.135.4" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "fastapi-0.135.4-py3-none-any.whl", hash = "sha256:539d3531f8aba9b286ab44658344553f4a4adc218529137501e5d97be071a78b"}, {file = "fastapi-0.135.4.tar.gz", hash = "sha256:d87c41b0a7bcaa6f14629d73fe48e360821605c7b6d518caacbc00dcf8fa5e0e"}, @@ -653,9 +656,9 @@ standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[stand name = "filelock" version = "3.29.0" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "filelock-3.29.0-py3-none-any.whl", hash = "sha256:96f5f6344709aa1572bbf631c640e4ebeeb519e08da902c39a001882f30ac258"}, {file = "filelock-3.29.0.tar.gz", hash = "sha256:69974355e960702e789734cb4871f884ea6fe50bd8404051a3530bc07809cf90"}, @@ -665,9 +668,9 @@ files = [ name = "flame-hub-client" version = "0.2.15" description = "HTTP client for interacting with FLAME Hub services." -category = "main" optional = false python-versions = "<4,>=3.10" +groups = ["main"] files = [ {file = "flame_hub_client-0.2.15-py3-none-any.whl", hash = "sha256:a31f04dd0e55cbdf0f12984613b3834da0cf294fa0e6ae5988a1335133e88734"}, {file = "flame_hub_client-0.2.15.tar.gz", hash = "sha256:dd296472c05a92bf546e6ec2b9c68a68bc00848607539e77ba12dc756a912a7c"}, @@ -682,9 +685,10 @@ pydantic = {version = ">=2.12.5,<3.0.0", extras = ["email"]} name = "greenlet" version = "3.4.0" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" files = [ {file = "greenlet-3.4.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d18eae9a7fb0f499efcd146b8c9750a2e1f6e0e93b5a382b3481875354a430e6"}, {file = "greenlet-3.4.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:636d2f95c309e35f650e421c23297d5011716be15d966e6328b367c9fc513a82"}, @@ -755,9 +759,9 @@ test = ["objgraph", "psutil", "setuptools"] name = "h11" version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, @@ -767,9 +771,9 @@ files = [ name = "httpcore" version = "1.0.9" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, @@ -782,16 +786,16 @@ h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" version = "0.28.1" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -800,23 +804,23 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = ">=1.0.0,<2.0.0" +httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "identify" version = "2.6.19" description = "File identification library for Python" -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "identify-2.6.19-py2.py3-none-any.whl", hash = "sha256:20e6a87f786f768c092a721ad107fc9df0eb89347be9396cadf3f4abbd1fb78a"}, {file = "identify-2.6.19.tar.gz", hash = "sha256:6be5020c38fcb07da56c53733538a3081ea5aa70d36a156f83044bfbf9173842"}, @@ -829,9 +833,9 @@ license = ["ukkonen"] name = "idna" version = "3.12" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "idna-3.12-py3-none-any.whl", hash = "sha256:60ffaa1858fac94c9c124728c24fcde8160f3fb4a7f79aa8cdd33a9d1af60a67"}, {file = "idna-3.12.tar.gz", hash = "sha256:724e9952cc9e2bd7550ea784adb098d837ab5267ef67a1ab9cf7846bdbdd8254"}, @@ -844,9 +848,9 @@ all = ["mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] name = "iniconfig" version = "2.3.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, @@ -856,9 +860,9 @@ files = [ name = "kong-admin-client" version = "3.5.0" description = "Kong Admin API" -category = "main" optional = false python-versions = "^3.7" +groups = ["main"] files = [] develop = false @@ -878,16 +882,16 @@ resolved_reference = "0524678992903864694c706ed50c0d8cad80f45a" name = "kubernetes" version = "35.0.0" description = "Kubernetes python client" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "kubernetes-35.0.0-py2.py3-none-any.whl", hash = "sha256:39e2b33b46e5834ef6c3985ebfe2047ab39135d41de51ce7641a7ca5b372a13d"}, {file = "kubernetes-35.0.0.tar.gz", hash = "sha256:3d00d344944239821458b9efd484d6df9f011da367ecb155dadf9513f05f09ee"}, ] [package.dependencies] -certifi = ">=14.05.14" +certifi = ">=14.5.14" durationpy = ">=0.7" python-dateutil = ">=2.5.3" pyyaml = ">=5.4.1" @@ -895,7 +899,7 @@ requests = "*" requests-oauthlib = "*" six = ">=1.9.0" urllib3 = ">=1.24.2,<2.6.0 || >2.6.0" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] @@ -905,9 +909,9 @@ google-auth = ["google-auth (>=1.0.1)"] name = "nodeenv" version = "1.10.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] files = [ {file = "nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827"}, {file = "nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb"}, @@ -917,9 +921,9 @@ files = [ name = "oauthlib" version = "3.3.1" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, @@ -934,9 +938,9 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "packaging" version = "26.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "packaging-26.1-py3-none-any.whl", hash = "sha256:5d9c0669c6285e491e0ced2eee587eaf67b670d94a19e94e3984a481aba6802f"}, {file = "packaging-26.1.tar.gz", hash = "sha256:f042152b681c4bfac5cae2742a55e103d27ab2ec0f3d88037136b6bfe7c9c5de"}, @@ -946,9 +950,9 @@ files = [ name = "platformdirs" version = "4.9.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "platformdirs-4.9.6-py3-none-any.whl", hash = "sha256:e61adb1d5e5cb3441b4b7710bea7e4c12250ca49439228cc1021c00dcfac0917"}, {file = "platformdirs-4.9.6.tar.gz", hash = "sha256:3bfa75b0ad0db84096ae777218481852c0ebc6c727b3168c1b9e0118e458cf0a"}, @@ -958,9 +962,9 @@ files = [ name = "pluggy" version = "1.6.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, @@ -972,14 +976,14 @@ testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "4.5.1" +version = "4.6.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77"}, - {file = "pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61"}, + {file = "pre_commit-4.6.0-py2.py3-none-any.whl", hash = "sha256:e2cf246f7299edcabcf15f9b0571fdce06058527f0a06535068a86d38089f29b"}, + {file = "pre_commit-4.6.0.tar.gz", hash = "sha256:718d2208cef53fdc38206e40524a6d4d9576d103eb16f0fec11c875e7716e9d9"}, ] [package.dependencies] @@ -993,9 +997,9 @@ virtualenv = ">=20.10.0" name = "psycopg2-binary" version = "2.9.12" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "psycopg2_binary-2.9.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b818ceff717f98851a64bffd4c5eb5b3059ae280276dcecc52ac658dcf006a4"}, {file = "psycopg2_binary-2.9.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fa0d7caca8635c56e373055094eeda3208d901d55dd0ff5abc1d4e47f82b56"}, @@ -1070,9 +1074,10 @@ files = [ name = "pycparser" version = "3.0" description = "C parser in Python" -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" files = [ {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, @@ -1082,9 +1087,9 @@ files = [ name = "pydantic" version = "2.13.3" description = "Data validation using Python type hints" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pydantic-2.13.3-py3-none-any.whl", hash = "sha256:6db14ac8dfc9a1e57f87ea2c0de670c251240f43cb0c30a5130e9720dc612927"}, {file = "pydantic-2.13.3.tar.gz", hash = "sha256:af09e9d1d09f4e7fe37145c1f577e1d61ceb9a41924bf0094a36506285d0a84d"}, @@ -1099,15 +1104,15 @@ typing-inspection = ">=0.4.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" version = "2.46.3" description = "Core functionality for Pydantic validation and serialization" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pydantic_core-2.46.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1da3786b8018e60349680720158cc19161cc3b4bdd815beb0a321cd5ce1ad5b1"}, {file = "pydantic_core-2.46.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc0988cb29d21bf4a9d5cf2ef970b5c0e38d8d8e107a493278c05dc6c1dda69f"}, @@ -1238,9 +1243,9 @@ typing-extensions = ">=4.14.1" name = "pygments" version = "2.20.0" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176"}, {file = "pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f"}, @@ -1253,9 +1258,9 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.12.1" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pyjwt-2.12.1-py3-none-any.whl", hash = "sha256:28ca37c070cad8ba8cd9790cd940535d40274d22f80ab87f3ac6a713e6e8454c"}, {file = "pyjwt-2.12.1.tar.gz", hash = "sha256:c74a7a2adf861c04d002db713dd85f84beb242228e671280bf709d765b03672b"}, @@ -1274,9 +1279,9 @@ tests = ["coverage[toml] (==7.10.7)", "pytest (>=8.4.2,<9.0.0)"] name = "pytest" version = "9.0.3" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9"}, {file = "pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c"}, @@ -1298,9 +1303,9 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1313,9 +1318,9 @@ six = ">=1.5" name = "python-discovery" version = "1.2.2" description = "Python interpreter discovery" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "python_discovery-1.2.2-py3-none-any.whl", hash = "sha256:e1ae95d9af875e78f15e19aed0c6137ab1bb49c200f21f5061786490c9585c7a"}, {file = "python_discovery-1.2.2.tar.gz", hash = "sha256:876e9c57139eb757cb5878cbdd9ae5379e5d96266c99ef731119e04fffe533bb"}, @@ -1333,9 +1338,9 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.5.4)", "pytest (>=8.3.5)", "pyt name = "python-dotenv" version = "1.2.2" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a"}, {file = "python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3"}, @@ -1348,9 +1353,10 @@ cli = ["click (>=5.0)"] name = "pywin32" version = "311" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, @@ -1378,9 +1384,9 @@ files = [ name = "pyyaml" version = "6.0.3" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, @@ -1461,9 +1467,9 @@ files = [ name = "requests" version = "2.33.1" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a"}, {file = "requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517"}, @@ -1483,9 +1489,9 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<8)"] name = "requests-oauthlib" version = "2.0.0" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -1502,9 +1508,9 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "ruff" version = "0.15.11" description = "An extremely fast Python linter and code formatter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.15.11-py3-none-linux_armv6l.whl", hash = "sha256:e927cfff503135c558eb581a0c9792264aae9507904eb27809cdcff2f2c847b7"}, {file = "ruff-0.15.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7a1b5b2938d8f890b76084d4fa843604d787a912541eae85fd7e233398bbb73e"}, @@ -1530,9 +1536,9 @@ files = [ name = "six" version = "1.17.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1542,9 +1548,9 @@ files = [ name = "sqlalchemy" version = "2.0.49" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sqlalchemy-2.0.49-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:42e8804962f9e6f4be2cbaedc0c3718f08f60a16910fa3d86da5a1e3b1bfe60f"}, {file = "sqlalchemy-2.0.49-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc992c6ed024c8c3c592c5fc9846a03dd68a425674900c70122c77ea16c5fb0b"}, @@ -1644,9 +1650,9 @@ sqlcipher = ["sqlcipher3_binary"] name = "starlette" version = "1.0.0" description = "The little ASGI library that shines." -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b"}, {file = "starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149"}, @@ -1663,9 +1669,10 @@ full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart name = "tomli" version = "2.4.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version == \"3.10\"" files = [ {file = "tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30"}, {file = "tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a"}, @@ -1720,9 +1727,9 @@ files = [ name = "truststore" version = "0.10.4" description = "Verify certificates using native system trust stores" -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "truststore-0.10.4-py3-none-any.whl", hash = "sha256:adaeaecf1cbb5f4de3b1959b42d41f6fab57b2b1666adb59e89cb0b53361d981"}, {file = "truststore-0.10.4.tar.gz", hash = "sha256:9d91bd436463ad5e4ee4aba766628dd6cd7010cf3e2461756b3303710eebc301"}, @@ -1732,21 +1739,22 @@ files = [ name = "typing-extensions" version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] +markers = {dev = "python_version == \"3.10\""} [[package]] name = "typing-inspection" version = "0.4.2" description = "Runtime typing introspection tools" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, @@ -1759,27 +1767,27 @@ typing-extensions = ">=4.12.0" name = "urllib3" version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, ] [package.extras] -brotli = ["brotli (>=1.2.0)", "brotlicffi (>=1.2.0.0)"] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["backports-zstd (>=1.0.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [[package]] name = "uvicorn" version = "0.44.0" description = "The lightning-fast ASGI server." -category = "main" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "uvicorn-0.44.0-py3-none-any.whl", hash = "sha256:ce937c99a2cc70279556967274414c087888e8cec9f9c94644dfca11bd3ced89"}, {file = "uvicorn-0.44.0.tar.gz", hash = "sha256:6c942071b68f07e178264b9152f1f16dfac5da85880c4ce06366a96d70d4f31e"}, @@ -1791,15 +1799,15 @@ h11 = ">=0.8" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1)", "watchfiles (>=0.20)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.20)", "websockets (>=10.4)"] [[package]] name = "virtualenv" version = "21.2.4" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "virtualenv-21.2.4-py3-none-any.whl", hash = "sha256:29d21e941795206138d0f22f4e45ff7050e5da6c6472299fb7103318763861ac"}, {file = "virtualenv-21.2.4.tar.gz", hash = "sha256:b294ef68192638004d72524ce7ef303e9d0cf5a44c95ce2e54a7500a6381cada"}, @@ -1816,9 +1824,9 @@ typing-extensions = {version = ">=4.13.2", markers = "python_version < \"3.11\"" name = "websocket-client" version = "1.9.0" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef"}, {file = "websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98"}, @@ -1830,6 +1838,6 @@ optional = ["python-socks", "wsaccel"] test = ["pytest", "websockets"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.10,<4.0" content-hash = "89ba097b2444a09843cabd2693b3ee180bc527e3cfe81430f493978ffa495792" diff --git a/pyproject.toml b/pyproject.toml index f4ff622..5e01e04 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "Node-Pod-Orchestration" -version = "0.5.0" +version = "0.5.1" description = "" authors = ["Alexander Röhl ", "David Hieber "] license = "Apache 2.0" diff --git a/src/resources/log/entity.py b/src/resources/log/entity.py index 5ac3369..1de489a 100644 --- a/src/resources/log/entity.py +++ b/src/resources/log/entity.py @@ -76,7 +76,7 @@ def __init__(self, elif error_type == "slow": log = (f"[flame -- POAPI: ANALYSISSTARTUPERROR -- " f"{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())}] " - f"Error: The analysis took to long during startup and was restarted " + f"Error: The analysis took too long during startup and was restarted " f"[restart {restart_num} of {_MAX_RESTARTS}].{term_msg}") elif error_type == "k8s": log = (f"[flame -- POAPI: ANALYSISSTARTUPERROR -- " diff --git a/src/resources/utils.py b/src/resources/utils.py index 06723ab..77f49c1 100644 --- a/src/resources/utils.py +++ b/src/resources/utils.py @@ -24,6 +24,9 @@ logger = get_logger() +_MAX_UNSTUCK_REATTEMPTS = 10 + + def create_analysis(body: Union[CreateAnalysis, str], database: Database) -> dict[str, str]: """Create and start a new analysis deployment. @@ -269,11 +272,24 @@ def unstuck_analysis_deployments(analysis_id: str, database: Database) -> None: Waits 10 seconds between stop and recreate to let Kubernetes settle, then prunes historical deployment rows so only the latest one remains. """ - if database.get_latest_deployment(analysis_id) is not None: + deployment = database.get_latest_deployment(analysis_id) + if deployment is not None: stop_analysis(analysis_id, database) - time.sleep(10) # wait for k8s to update status - create_analysis(analysis_id, database) - database.delete_old_deployments_from_db(analysis_id) + success = False + for i in range(_MAX_UNSTUCK_REATTEMPTS): + try: + time.sleep(10) # wait for k8s to update status + create_analysis(analysis_id, database) + database.delete_old_deployments_from_db(analysis_id) + success = True + break + except Exception as e: + logger.warning(f"Failed to stop analysis {analysis_id} ({repr(e)}) " + f"-> Reattempting unstuck ({i + 1} of {_MAX_UNSTUCK_REATTEMPTS})") + if not success: + logger.error(f"Failed to unstuck analysis {analysis_id} after max reattempts.") + database.update_deployment_status(deployment.deployment_name, AnalysisStatus.FAILED.value) + stop_analysis(analysis_id, database) def cleanup(cleanup_type: str, From 1e55133ad8944f8767e3545e685ad6da9c942b0f Mon Sep 17 00:00:00 2001 From: Bruce Schultz Date: Mon, 27 Apr 2026 19:09:07 +0200 Subject: [PATCH 3/4] chore(netstat): begin netstat implementation --- src/k8s/kubernetes.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/src/k8s/kubernetes.py b/src/k8s/kubernetes.py index 5aec805..4051193 100644 --- a/src/k8s/kubernetes.py +++ b/src/k8s/kubernetes.py @@ -241,6 +241,43 @@ def get_pod_status(deployment_name: str, namespace: str = 'default') -> Optional return None +def _build_net_stats_container() -> Optional[client.V1Container]: + """Build the net-stats sidecar container spec, or return None if disabled. + + Controlled by the ``NET_STATS_ENABLED`` env var. Image and polling interval + are read from ``NET_STATS_IMAGE`` and ``NET_STATS_INTERVAL_SECONDS``. + """ + if os.getenv('NET_STATS_ENABLED', '').lower() not in ('1', 'true'): + return None + + _NET_STATS_SCRIPT = """\ + prev_rx=0; prev_tx=0 + while true; do + iface=$(grep -v -e lo -e 'Inter' -e 'face' /proc/net/dev | awk -F: '{print $1}' | tr -d ' ' | head -1) + if [ -n "$iface" ]; then + line=$(grep "${iface}:" /proc/net/dev | tr -s ' ') + rx=$(echo $line | cut -d' ' -f2) + tx=$(echo $line | cut -d' ' -f10) + if [ "$prev_rx" -gt 0 ]; then + delta_rx=$((rx - prev_rx)) + delta_tx=$((tx - prev_tx)) + printf '{"level":"info","message":"network_stats","bytes_in":%d,"bytes_out":%d,"interval_seconds":%d,"interface":"%s"}\\n' $delta_rx $delta_tx $INTERVAL "$iface" + fi + prev_rx=$rx; prev_tx=$tx + fi + sleep $INTERVAL + done + """ + + return client.V1Container( + name='net-stats', + image=os.getenv('NET_STATS_IMAGE', 'busybox:1.37'), + image_pull_policy='IfNotPresent', + command=['/bin/sh', '-c', _NET_STATS_SCRIPT], + env=[client.V1EnvVar(name='INTERVAL', value=os.getenv('NET_STATS_INTERVAL_SECONDS', '10'))], + ) + + def _create_analysis_nginx_deployment(analysis_name: str, analysis_service_name: str, analysis_env: Optional[dict[str, str]] = None, @@ -302,6 +339,10 @@ def _create_analysis_nginx_deployment(analysis_name: str, volume_mounts=[vol_mount]) containers.append(container) + net_stats_container = _build_net_stats_container() + if net_stats_container is not None: + containers.append(net_stats_container) + depl_metadata = client.V1ObjectMeta(name=nginx_name, namespace=namespace, labels={'app': nginx_name, 'component': 'flame-analysis-nginx'}) From 8e305f1a9e6ec717c6f61f109503d05959caba6b Mon Sep 17 00:00:00 2001 From: Bruce Schultz Date: Tue, 28 Apr 2026 14:37:24 +0200 Subject: [PATCH 4/4] fix(netstat): bind sidecar to analysis pod --- src/k8s/kubernetes.py | 49 +++++++++++++++++++++---------------------- 1 file changed, 24 insertions(+), 25 deletions(-) diff --git a/src/k8s/kubernetes.py b/src/k8s/kubernetes.py index 4051193..259185a 100644 --- a/src/k8s/kubernetes.py +++ b/src/k8s/kubernetes.py @@ -101,6 +101,10 @@ def create_analysis_deployment(name: str, if env is not None else []) containers.append(container) + net_stats_container = _build_net_stats_container(name) + if net_stats_container is not None: + containers.append(net_stats_container) + labels = {'app': name, 'component': "flame-analysis"} depl_metadata = client.V1ObjectMeta(name=name, namespace=namespace, labels=labels) depl_pod_metadata = client.V1ObjectMeta(labels=labels) @@ -241,40 +245,39 @@ def get_pod_status(deployment_name: str, namespace: str = 'default') -> Optional return None -def _build_net_stats_container() -> Optional[client.V1Container]: +def _build_net_stats_container(analysis_name: str) -> Optional[client.V1Container]: """Build the net-stats sidecar container spec, or return None if disabled. - Controlled by the ``NET_STATS_ENABLED`` env var. Image and polling interval - are read from ``NET_STATS_IMAGE`` and ``NET_STATS_INTERVAL_SECONDS``. + Controlled by the ``NET_STATS_ENABLED`` env var. Image is read from + ``NET_STATS_IMAGE``. Emits a single cumulative log on SIGTERM. """ if os.getenv('NET_STATS_ENABLED', '').lower() not in ('1', 'true'): return None _NET_STATS_SCRIPT = """\ - prev_rx=0; prev_tx=0 - while true; do - iface=$(grep -v -e lo -e 'Inter' -e 'face' /proc/net/dev | awk -F: '{print $1}' | tr -d ' ' | head -1) - if [ -n "$iface" ]; then - line=$(grep "${iface}:" /proc/net/dev | tr -s ' ') - rx=$(echo $line | cut -d' ' -f2) - tx=$(echo $line | cut -d' ' -f10) - if [ "$prev_rx" -gt 0 ]; then - delta_rx=$((rx - prev_rx)) - delta_tx=$((tx - prev_tx)) - printf '{"level":"info","message":"network_stats","bytes_in":%d,"bytes_out":%d,"interval_seconds":%d,"interface":"%s"}\\n' $delta_rx $delta_tx $INTERVAL "$iface" - fi - prev_rx=$rx; prev_tx=$tx - fi - sleep $INTERVAL - done + iface=$(grep -v -e lo -e 'Inter' -e 'face' /proc/net/dev | awk -F: '{print $1}' | tr -d ' ' | head -1) + line=$(grep "${iface}:" /proc/net/dev | tr -s ' ') + start_rx=$(echo $line | cut -d' ' -f2) + start_tx=$(echo $line | cut -d' ' -f10) + + handle_term() { + line=$(grep "${iface}:" /proc/net/dev | tr -s ' ') + rx=$(echo $line | cut -d' ' -f2) + tx=$(echo $line | cut -d' ' -f10) + printf '{"level":"info","message":"network_stats","bytes_in":%d,"bytes_out":%d,"interface":"%s","event_name":"netstats.analysis.traffic"}\\n' $((rx - start_rx)) $((tx - start_tx)) "$iface" + sleep 5 + exit 0 + } + trap handle_term TERM INT + + while true; do sleep 3600 & wait $!; done """ return client.V1Container( - name='net-stats', + name=f'net-stats-{analysis_name}', image=os.getenv('NET_STATS_IMAGE', 'busybox:1.37'), image_pull_policy='IfNotPresent', command=['/bin/sh', '-c', _NET_STATS_SCRIPT], - env=[client.V1EnvVar(name='INTERVAL', value=os.getenv('NET_STATS_INTERVAL_SECONDS', '10'))], ) @@ -339,10 +342,6 @@ def _create_analysis_nginx_deployment(analysis_name: str, volume_mounts=[vol_mount]) containers.append(container) - net_stats_container = _build_net_stats_container() - if net_stats_container is not None: - containers.append(net_stats_container) - depl_metadata = client.V1ObjectMeta(name=nginx_name, namespace=namespace, labels={'app': nginx_name, 'component': 'flame-analysis-nginx'})