From accb603458a6eb8d1ae664bee911625942ed4656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maty=C3=A1=C5=A1=20Jir=C3=A1t?= Date: Tue, 17 Feb 2026 18:27:26 +0100 Subject: [PATCH 1/7] Add VCR auto-recording in platform debug mode execute_action() now detects KBC_COMPONENT_RUN_MODE=debug and automatically records HTTP interactions via keboola.vcr. Components can define a module-level VCR_SANITIZERS list for custom sanitizers. Add keboola.vcr as a dependency. Co-Authored-By: Claude Opus 4.6 --- pyproject.toml | 1 + src/keboola/component/base.py | 26 ++++++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 06052a0..caba169 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,7 @@ dependencies = [ "pygelf", "pytz<2021.0", "deprecated", + "keboola.vcr", ] requires-python = ">=3.8" diff --git a/src/keboola/component/base.py b/src/keboola/component/base.py index af2f5a7..468d3a2 100644 --- a/src/keboola/component/base.py +++ b/src/keboola/component/base.py @@ -236,7 +236,17 @@ def execute_action(self): """ Executes action defined in the configuration. The default action is 'run'. See base._SYNC_ACTION_MAPPING + + When ``KBC_COMPONENT_RUN_MODE=debug`` is set (platform debug mode), + the action execution is automatically wrapped with VCR recording + so that HTTP interactions are captured for later replay in tests. """ + if self._should_vcr_record(): + return self._execute_with_vcr_recording() + return self._do_execute_action() + + def _do_execute_action(self): + """Internal: runs the actual action dispatch.""" action = self.configuration.action if not action: logging.warning("No action defined in the configuration, using the default run action.") @@ -249,6 +259,22 @@ def execute_action(self): raise AttributeError(f"The defined action {action} is not implemented!") from e return action_method() + @staticmethod + def _should_vcr_record(): + """Check if running in platform debug mode.""" + return os.environ.get("KBC_COMPONENT_RUN_MODE", "").lower() == "debug" + + def _execute_with_vcr_recording(self): + """Wrap action execution with VCR recording for debug runs.""" + import inspect + from keboola.vcr import VCRRecorder + + module = inspect.getmodule(type(self)) + VCRRecorder.record_debug_run( + self._do_execute_action, + sanitizers=getattr(module, 'VCR_SANITIZERS', None), + ) + def _generate_table_metadata_legacy(self, table_schema: ts.TableSchema) -> dao.TableMetadata: """ Generates a TableMetadata object for the table definition using a TableSchema object. From 4357bc44a40d80c1042deb7464398348971af0dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maty=C3=A1=C5=A1=20Jir=C3=A1t?= Date: Tue, 24 Feb 2026 16:53:56 +0100 Subject: [PATCH 2/7] Project updates --- .flake8 | 9 - pyproject.toml | 11 +- uv.lock | 863 +++++++++++++++---------------------------------- 3 files changed, 266 insertions(+), 617 deletions(-) delete mode 100644 .flake8 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index f36c003..0000000 --- a/.flake8 +++ /dev/null @@ -1,9 +0,0 @@ -[flake8] -exclude = - __pycache__, - .git, - .venv, - venv, - docs -ignore = E203,W503 -max-line-length = 120 diff --git a/pyproject.toml b/pyproject.toml index caba169..abfbe03 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ dependencies = [ "deprecated", "keboola.vcr", ] -requires-python = ">=3.8" +requires-python = ">=3.10" authors = [ { name = "Keboola KDS Team", email = "support@keboola.com" } @@ -18,8 +18,6 @@ license = "MIT" license-files = [ "LICENSE" ] classifiers = [ "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -41,7 +39,6 @@ Repository = "https://github.com/keboola/python-component" [dependency-groups] dev = [ - "flake8>=5.0.4", "pytest>=8.3.5", "ruff>=0.13.2", "pdoc3", @@ -52,3 +49,9 @@ name = "testpypi" url = "https://test.pypi.org/simple/" publish-url = "https://test.pypi.org/legacy/" explicit = true + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +extend-select = ["I"] \ No newline at end of file diff --git a/uv.lock b/uv.lock index 81c5a3f..8746ca3 100644 --- a/uv.lock +++ b/uv.lock @@ -1,12 +1,6 @@ version = 1 revision = 3 -requires-python = ">=3.8" -resolution-markers = [ - "python_full_version >= '3.10'", - "python_full_version == '3.9.*'", - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", -] +requires-python = ">=3.10" [[package]] name = "colorama" @@ -34,8 +28,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, - { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9' and python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -43,109 +36,21 @@ wheels = [ ] [[package]] -name = "flake8" -version = "5.0.4" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.8.1'", -] -dependencies = [ - { name = "mccabe", marker = "python_full_version < '3.8.1'" }, - { name = "pycodestyle", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.8.1'" }, - { name = "pyflakes", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.8.1'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ad/00/9808c62b2d529cefc69ce4e4a1ea42c0f855effa55817b7327ec5b75e60a/flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db", size = 145862, upload-time = "2022-08-03T23:21:27.108Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/a0/b881b63a17a59d9d07f5c0cc91a29182c8e8a9aa2bde5b3b2b16519c02f4/flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248", size = 61897, upload-time = "2022-08-03T23:21:25.027Z" }, -] - -[[package]] -name = "flake8" -version = "7.1.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", -] -dependencies = [ - { name = "mccabe", marker = "python_full_version >= '3.8.1' and python_full_version < '3.9'" }, - { name = "pycodestyle", version = "2.12.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.8.1' and python_full_version < '3.9'" }, - { name = "pyflakes", version = "3.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.8.1' and python_full_version < '3.9'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/58/16/3f2a0bb700ad65ac9663262905a025917c020a3f92f014d2ba8964b4602c/flake8-7.1.2.tar.gz", hash = "sha256:c586ffd0b41540951ae41af572e6790dbd49fc12b3aa2541685d253d9bd504bd", size = 48119, upload-time = "2025-02-16T18:45:44.296Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/35/f8/08d37b2cd89da306e3520bd27f8a85692122b42b56c0c2c3784ff09c022f/flake8-7.1.2-py2.py3-none-any.whl", hash = "sha256:1cbc62e65536f65e6d754dfe6f1bada7f5cf392d6f5db3c2b85892466c3e7c1a", size = 57745, upload-time = "2025-02-16T18:45:42.351Z" }, -] - -[[package]] -name = "flake8" -version = "7.3.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", - "python_full_version == '3.9.*'", -] -dependencies = [ - { name = "mccabe", marker = "python_full_version >= '3.9'" }, - { name = "pycodestyle", version = "2.14.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, - { name = "pyflakes", version = "3.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9b/af/fbfe3c4b5a657d79e5c47a2827a362f9e1b763336a52f926126aa6dc7123/flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872", size = 48326, upload-time = "2025-06-20T19:31:35.838Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/56/13ab06b4f93ca7cac71078fbe37fcea175d3216f31f85c3168a6bbd0bb9a/flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e", size = 57922, upload-time = "2025-06-20T19:31:34.425Z" }, -] - -[[package]] -name = "importlib-metadata" -version = "8.5.0" +name = "freezegun" +version = "1.5.5" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", -] dependencies = [ - { name = "zipp", version = "3.20.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304, upload-time = "2024-09-11T14:56:08.937Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/dd/23e2f4e357f8fd3bdff613c1fe4466d21bfb00a6177f238079b17f7b1c84/freezegun-1.5.5.tar.gz", hash = "sha256:ac7742a6cc6c25a2c35e9292dfd554b897b517d2dec26891a2e8debf205cb94a", size = 35914, upload-time = "2025-08-09T10:39:08.338Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514, upload-time = "2024-09-11T14:56:07.019Z" }, -] - -[[package]] -name = "importlib-metadata" -version = "8.7.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.9.*'", -] -dependencies = [ - { name = "zipp", version = "3.23.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, -] - -[[package]] -name = "iniconfig" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.9.*'", - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", -] -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/5e/2e/b41d8a1a917d6581fc27a35d05561037b048e47df50f27f8ac9c7e27a710/freezegun-1.5.5-py3-none-any.whl", hash = "sha256:cd557f4a75cf074e84bc374249b9dd491eaeacd61376b9eb3c423282211619d2", size = 19266, upload-time = "2025-08-09T10:39:06.636Z" }, ] [[package]] name = "iniconfig" version = "2.3.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", -] sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, @@ -157,164 +62,71 @@ version = "0.0.0" source = { virtual = "." } dependencies = [ { name = "deprecated" }, + { name = "keboola-vcr" }, { name = "pygelf" }, { name = "pytz" }, ] [package.dev-dependencies] dev = [ - { name = "flake8", version = "5.0.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.8.1'" }, - { name = "flake8", version = "7.1.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.8.1' and python_full_version < '3.9'" }, - { name = "flake8", version = "7.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, - { name = "pdoc3", version = "0.11.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, - { name = "pdoc3", version = "0.11.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, - { name = "pytest", version = "8.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, - { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, - { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pdoc3" }, + { name = "pytest" }, { name = "ruff" }, ] [package.metadata] requires-dist = [ { name = "deprecated" }, + { name = "keboola-vcr" }, { name = "pygelf" }, { name = "pytz", specifier = "<2021.0" }, ] [package.metadata.requires-dev] dev = [ - { name = "flake8", specifier = ">=5.0.4" }, { name = "pdoc3" }, { name = "pytest", specifier = ">=8.3.5" }, { name = "ruff", specifier = ">=0.13.2" }, ] [[package]] -name = "mako" -version = "1.3.10" +name = "keboola-vcr" +version = "0.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "markupsafe", version = "2.1.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, - { name = "markupsafe", version = "3.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "freezegun" }, + { name = "vcrpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/a5/97bf02f746f285b9260c81dbabdbea11ff56a688f952f35def24dfb4b54c/keboola_vcr-0.1.1.tar.gz", hash = "sha256:f7163c8359553fcedb185bdb2d651afd6b47bbb732a5ec49debd5c749edfd288", size = 23678, upload-time = "2026-02-24T15:33:20.185Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c7/1bd606a733bcb8be8eac1c096ba4c577ebd57651a020cd1f175836cdba4f/keboola_vcr-0.1.1-py3-none-any.whl", hash = "sha256:98a371cba4bb40a192156341cf7b55bbd6f3975acb095c7531ba503de230133f", size = 26454, upload-time = "2026-02-24T15:33:16.769Z" }, ] [[package]] -name = "markdown" -version = "3.7" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", -] -dependencies = [ - { name = "importlib-metadata", version = "8.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/54/28/3af612670f82f4c056911fbbbb42760255801b3068c48de792d354ff4472/markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2", size = 357086, upload-time = "2024-08-16T15:55:17.812Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/08/83871f3c50fc983b88547c196d11cf8c3340e37c32d2e9d6152abe2c61f7/Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803", size = 106349, upload-time = "2024-08-16T15:55:16.176Z" }, -] - -[[package]] -name = "markdown" -version = "3.9" +name = "mako" +version = "1.3.10" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.9.*'", -] dependencies = [ - { name = "importlib-metadata", version = "8.7.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/37/02347f6d6d8279247a5837082ebc26fc0d5aaeaf75aa013fcbb433c777ab/markdown-3.9.tar.gz", hash = "sha256:d2900fe1782bd33bdbbd56859defef70c2e78fc46668f8eb9df3128138f2cb6a", size = 364585, upload-time = "2025-09-04T20:25:22.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/ae/44c4a6a4cbb496d93c6257954260fe3a6e91b7bed2240e5dad2a717f5111/markdown-3.9-py3-none-any.whl", hash = "sha256:9f4d91ed810864ea88a6f32c07ba8bee1346c0cc1f6b1f9f6c822f2a9667d280", size = 107441, upload-time = "2025-09-04T20:25:21.784Z" }, + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, ] [[package]] name = "markdown" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/7dd27d9d863b3376fcf23a5a13cb5d024aed1db46f963f1b5735ae43b3be/markdown-3.10.tar.gz", hash = "sha256:37062d4f2aa4b2b6b32aefb80faa300f82cc790cb949a35b8caede34f2b68c0e", size = 364931, upload-time = "2025-11-03T19:51:15.007Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/70/81/54e3ce63502cd085a0c556652a4e1b919c45a446bd1e5300e10c44c8c521/markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c", size = 107678, upload-time = "2025-11-03T19:51:13.887Z" }, -] - -[[package]] -name = "markupsafe" -version = "2.1.5" +version = "3.10.2" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", -] -sdist = { url = "https://files.pythonhosted.org/packages/87/5b/aae44c6655f3801e81aa3eef09dbbf012431987ba564d7231722f68df02d/MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", size = 19384, upload-time = "2024-02-02T16:31:22.863Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2b/f4/69fa6ed85ae003c2378ffa8f6d2e3234662abd02c10d216c0ba96081a238/markdown-3.10.2.tar.gz", hash = "sha256:994d51325d25ad8aa7ce4ebaec003febcce822c3f8c911e3b17c52f7f589f950", size = 368805, upload-time = "2026-02-09T14:57:26.942Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/54/ad5eb37bf9d51800010a74e4665425831a9db4e7c4e0fde4352e391e808e/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", size = 18206, upload-time = "2024-02-02T16:30:04.105Z" }, - { url = "https://files.pythonhosted.org/packages/6a/4a/a4d49415e600bacae038c67f9fecc1d5433b9d3c71a4de6f33537b89654c/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", size = 14079, upload-time = "2024-02-02T16:30:06.5Z" }, - { url = "https://files.pythonhosted.org/packages/0a/7b/85681ae3c33c385b10ac0f8dd025c30af83c78cec1c37a6aa3b55e67f5ec/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", size = 26620, upload-time = "2024-02-02T16:30:08.31Z" }, - { url = "https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", size = 25818, upload-time = "2024-02-02T16:30:09.577Z" }, - { url = "https://files.pythonhosted.org/packages/29/fe/a36ba8c7ca55621620b2d7c585313efd10729e63ef81e4e61f52330da781/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", size = 25493, upload-time = "2024-02-02T16:30:11.488Z" }, - { url = "https://files.pythonhosted.org/packages/60/ae/9c60231cdfda003434e8bd27282b1f4e197ad5a710c14bee8bea8a9ca4f0/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", size = 30630, upload-time = "2024-02-02T16:30:13.144Z" }, - { url = "https://files.pythonhosted.org/packages/65/dc/1510be4d179869f5dafe071aecb3f1f41b45d37c02329dfba01ff59e5ac5/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", size = 29745, upload-time = "2024-02-02T16:30:14.222Z" }, - { url = "https://files.pythonhosted.org/packages/30/39/8d845dd7d0b0613d86e0ef89549bfb5f61ed781f59af45fc96496e897f3a/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", size = 30021, upload-time = "2024-02-02T16:30:16.032Z" }, - { url = "https://files.pythonhosted.org/packages/c7/5c/356a6f62e4f3c5fbf2602b4771376af22a3b16efa74eb8716fb4e328e01e/MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", size = 16659, upload-time = "2024-02-02T16:30:17.079Z" }, - { url = "https://files.pythonhosted.org/packages/69/48/acbf292615c65f0604a0c6fc402ce6d8c991276e16c80c46a8f758fbd30c/MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", size = 17213, upload-time = "2024-02-02T16:30:18.251Z" }, - { url = "https://files.pythonhosted.org/packages/11/e7/291e55127bb2ae67c64d66cef01432b5933859dfb7d6949daa721b89d0b3/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", size = 18219, upload-time = "2024-02-02T16:30:19.988Z" }, - { url = "https://files.pythonhosted.org/packages/6b/cb/aed7a284c00dfa7c0682d14df85ad4955a350a21d2e3b06d8240497359bf/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", size = 14098, upload-time = "2024-02-02T16:30:21.063Z" }, - { url = "https://files.pythonhosted.org/packages/1c/cf/35fe557e53709e93feb65575c93927942087e9b97213eabc3fe9d5b25a55/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", size = 29014, upload-time = "2024-02-02T16:30:22.926Z" }, - { url = "https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", size = 28220, upload-time = "2024-02-02T16:30:24.76Z" }, - { url = "https://files.pythonhosted.org/packages/0c/40/2e73e7d532d030b1e41180807a80d564eda53babaf04d65e15c1cf897e40/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", size = 27756, upload-time = "2024-02-02T16:30:25.877Z" }, - { url = "https://files.pythonhosted.org/packages/18/46/5dca760547e8c59c5311b332f70605d24c99d1303dd9a6e1fc3ed0d73561/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", size = 33988, upload-time = "2024-02-02T16:30:26.935Z" }, - { url = "https://files.pythonhosted.org/packages/6d/c5/27febe918ac36397919cd4a67d5579cbbfa8da027fa1238af6285bb368ea/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", size = 32718, upload-time = "2024-02-02T16:30:28.111Z" }, - { url = "https://files.pythonhosted.org/packages/f8/81/56e567126a2c2bc2684d6391332e357589a96a76cb9f8e5052d85cb0ead8/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", size = 33317, upload-time = "2024-02-02T16:30:29.214Z" }, - { url = "https://files.pythonhosted.org/packages/00/0b/23f4b2470accb53285c613a3ab9ec19dc944eaf53592cb6d9e2af8aa24cc/MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", size = 16670, upload-time = "2024-02-02T16:30:30.915Z" }, - { url = "https://files.pythonhosted.org/packages/b7/a2/c78a06a9ec6d04b3445a949615c4c7ed86a0b2eb68e44e7541b9d57067cc/MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", size = 17224, upload-time = "2024-02-02T16:30:32.09Z" }, - { url = "https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", size = 18215, upload-time = "2024-02-02T16:30:33.081Z" }, - { url = "https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", size = 14069, upload-time = "2024-02-02T16:30:34.148Z" }, - { url = "https://files.pythonhosted.org/packages/51/b5/5d8ec796e2a08fc814a2c7d2584b55f889a55cf17dd1a90f2beb70744e5c/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", size = 29452, upload-time = "2024-02-02T16:30:35.149Z" }, - { url = "https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", size = 28462, upload-time = "2024-02-02T16:30:36.166Z" }, - { url = "https://files.pythonhosted.org/packages/2d/75/fd6cb2e68780f72d47e6671840ca517bda5ef663d30ada7616b0462ad1e3/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", size = 27869, upload-time = "2024-02-02T16:30:37.834Z" }, - { url = "https://files.pythonhosted.org/packages/b0/81/147c477391c2750e8fc7705829f7351cf1cd3be64406edcf900dc633feb2/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", size = 33906, upload-time = "2024-02-02T16:30:39.366Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ff/9a52b71839d7a256b563e85d11050e307121000dcebc97df120176b3ad93/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", size = 32296, upload-time = "2024-02-02T16:30:40.413Z" }, - { url = "https://files.pythonhosted.org/packages/88/07/2dc76aa51b481eb96a4c3198894f38b480490e834479611a4053fbf08623/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", size = 33038, upload-time = "2024-02-02T16:30:42.243Z" }, - { url = "https://files.pythonhosted.org/packages/96/0c/620c1fb3661858c0e37eb3cbffd8c6f732a67cd97296f725789679801b31/MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", size = 16572, upload-time = "2024-02-02T16:30:43.326Z" }, - { url = "https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", size = 17127, upload-time = "2024-02-02T16:30:44.418Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ff/2c942a82c35a49df5de3a630ce0a8456ac2969691b230e530ac12314364c/MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a", size = 18192, upload-time = "2024-02-02T16:30:57.715Z" }, - { url = "https://files.pythonhosted.org/packages/4f/14/6f294b9c4f969d0c801a4615e221c1e084722ea6114ab2114189c5b8cbe0/MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46", size = 14072, upload-time = "2024-02-02T16:30:58.844Z" }, - { url = "https://files.pythonhosted.org/packages/81/d4/fd74714ed30a1dedd0b82427c02fa4deec64f173831ec716da11c51a50aa/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532", size = 26928, upload-time = "2024-02-02T16:30:59.922Z" }, - { url = "https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab", size = 26106, upload-time = "2024-02-02T16:31:01.582Z" }, - { url = "https://files.pythonhosted.org/packages/4c/6f/f2b0f675635b05f6afd5ea03c094557bdb8622fa8e673387444fe8d8e787/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68", size = 25781, upload-time = "2024-02-02T16:31:02.71Z" }, - { url = "https://files.pythonhosted.org/packages/51/e0/393467cf899b34a9d3678e78961c2c8cdf49fb902a959ba54ece01273fb1/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0", size = 30518, upload-time = "2024-02-02T16:31:04.392Z" }, - { url = "https://files.pythonhosted.org/packages/f6/02/5437e2ad33047290dafced9df741d9efc3e716b75583bbd73a9984f1b6f7/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4", size = 29669, upload-time = "2024-02-02T16:31:05.53Z" }, - { url = "https://files.pythonhosted.org/packages/0e/7d/968284145ffd9d726183ed6237c77938c021abacde4e073020f920e060b2/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3", size = 29933, upload-time = "2024-02-02T16:31:06.636Z" }, - { url = "https://files.pythonhosted.org/packages/bf/f3/ecb00fc8ab02b7beae8699f34db9357ae49d9f21d4d3de6f305f34fa949e/MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff", size = 16656, upload-time = "2024-02-02T16:31:07.767Z" }, - { url = "https://files.pythonhosted.org/packages/92/21/357205f03514a49b293e214ac39de01fadd0970a6e05e4bf1ddd0ffd0881/MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029", size = 17206, upload-time = "2024-02-02T16:31:08.843Z" }, - { url = "https://files.pythonhosted.org/packages/0f/31/780bb297db036ba7b7bbede5e1d7f1e14d704ad4beb3ce53fb495d22bc62/MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf", size = 18193, upload-time = "2024-02-02T16:31:10.155Z" }, - { url = "https://files.pythonhosted.org/packages/6c/77/d77701bbef72892affe060cdacb7a2ed7fd68dae3b477a8642f15ad3b132/MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2", size = 14073, upload-time = "2024-02-02T16:31:11.442Z" }, - { url = "https://files.pythonhosted.org/packages/d9/a7/1e558b4f78454c8a3a0199292d96159eb4d091f983bc35ef258314fe7269/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8", size = 26486, upload-time = "2024-02-02T16:31:12.488Z" }, - { url = "https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3", size = 25685, upload-time = "2024-02-02T16:31:13.726Z" }, - { url = "https://files.pythonhosted.org/packages/6a/18/ae5a258e3401f9b8312f92b028c54d7026a97ec3ab20bfaddbdfa7d8cce8/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465", size = 25338, upload-time = "2024-02-02T16:31:14.812Z" }, - { url = "https://files.pythonhosted.org/packages/0b/cc/48206bd61c5b9d0129f4d75243b156929b04c94c09041321456fd06a876d/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e", size = 30439, upload-time = "2024-02-02T16:31:15.946Z" }, - { url = "https://files.pythonhosted.org/packages/d1/06/a41c112ab9ffdeeb5f77bc3e331fdadf97fa65e52e44ba31880f4e7f983c/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea", size = 29531, upload-time = "2024-02-02T16:31:17.13Z" }, - { url = "https://files.pythonhosted.org/packages/02/8c/ab9a463301a50dab04d5472e998acbd4080597abc048166ded5c7aa768c8/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6", size = 29823, upload-time = "2024-02-02T16:31:18.247Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/9bc18da763496b055d8e98ce476c8e718dcfd78157e17f555ce6dd7d0895/MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf", size = 16658, upload-time = "2024-02-02T16:31:19.583Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f8/4da07de16f10551ca1f640c92b5f316f9394088b183c6a57183df6de5ae4/MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5", size = 17211, upload-time = "2024-02-02T16:31:20.96Z" }, + { url = "https://files.pythonhosted.org/packages/de/1f/77fa3081e4f66ca3576c896ae5d31c3002ac6607f9747d2e3aa49227e464/markdown-3.10.2-py3-none-any.whl", hash = "sha256:e91464b71ae3ee7afd3017d9f358ef0baf158fd9a298db92f1d4761133824c36", size = 108180, upload-time = "2026-02-09T14:57:25.787Z" }, ] [[package]] name = "markupsafe" version = "3.0.3" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", - "python_full_version == '3.9.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, @@ -394,169 +206,39 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, - { url = "https://files.pythonhosted.org/packages/56/23/0d8c13a44bde9154821586520840643467aee574d8ce79a17da539ee7fed/markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26", size = 11623, upload-time = "2025-09-27T18:37:29.296Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/07a2cb9a8045d5f3f0890a8c3bc0859d7a47bfd9a560b563899bec7b72ed/markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc", size = 12049, upload-time = "2025-09-27T18:37:30.234Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e4/6be85eb81503f8e11b61c0b6369b6e077dcf0a74adbd9ebf6b349937b4e9/markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c", size = 21923, upload-time = "2025-09-27T18:37:31.177Z" }, - { url = "https://files.pythonhosted.org/packages/6f/bc/4dc914ead3fe6ddaef035341fee0fc956949bbd27335b611829292b89ee2/markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42", size = 20543, upload-time = "2025-09-27T18:37:32.168Z" }, - { url = "https://files.pythonhosted.org/packages/89/6e/5fe81fbcfba4aef4093d5f856e5c774ec2057946052d18d168219b7bd9f9/markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b", size = 20585, upload-time = "2025-09-27T18:37:33.166Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f6/e0e5a3d3ae9c4020f696cd055f940ef86b64fe88de26f3a0308b9d3d048c/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758", size = 21387, upload-time = "2025-09-27T18:37:34.185Z" }, - { url = "https://files.pythonhosted.org/packages/c8/25/651753ef4dea08ea790f4fbb65146a9a44a014986996ca40102e237aa49a/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2", size = 20133, upload-time = "2025-09-27T18:37:35.138Z" }, - { url = "https://files.pythonhosted.org/packages/dc/0a/c3cf2b4fef5f0426e8a6d7fce3cb966a17817c568ce59d76b92a233fdbec/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d", size = 20588, upload-time = "2025-09-27T18:37:36.096Z" }, - { url = "https://files.pythonhosted.org/packages/cd/1b/a7782984844bd519ad4ffdbebbba2671ec5d0ebbeac34736c15fb86399e8/markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7", size = 14566, upload-time = "2025-09-27T18:37:37.09Z" }, - { url = "https://files.pythonhosted.org/packages/18/1f/8d9c20e1c9440e215a44be5ab64359e207fcb4f675543f1cf9a2a7f648d0/markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e", size = 15053, upload-time = "2025-09-27T18:37:38.054Z" }, - { url = "https://files.pythonhosted.org/packages/4e/d3/fe08482b5cd995033556d45041a4f4e76e7f0521112a9c9991d40d39825f/markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8", size = 13928, upload-time = "2025-09-27T18:37:39.037Z" }, -] - -[[package]] -name = "mccabe" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, ] [[package]] name = "packaging" -version = "25.0" +version = "26.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, -] - -[[package]] -name = "pdoc3" -version = "0.11.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] -dependencies = [ - { name = "mako", marker = "python_full_version < '3.9'" }, - { name = "markdown", version = "3.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e1/4e/741d6dbd64934c4769c055dd6952e1c6d117a1a25236f633e68ea2d375c8/pdoc3-0.11.0.tar.gz", hash = "sha256:12f28c6ee045ca8ad6a624b86d1982c51de20e83c0a721cd7b0933f44ae0a655", size = 97667, upload-time = "2024-06-22T01:25:30.594Z" } [[package]] name = "pdoc3" version = "0.11.6" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", - "python_full_version == '3.9.*'", -] dependencies = [ - { name = "mako", marker = "python_full_version >= '3.9'" }, - { name = "markdown", version = "3.9", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, - { name = "markdown", version = "3.10", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "mako" }, + { name = "markdown" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ed/f0/07d8b771b99c16a06741cd7b2639494a15357df819ecf899c33b87db6257/pdoc3-0.11.6.tar.gz", hash = "sha256:1ea5e84b87a754d191fb64bf5e517ca6c50d0d84a614c1efecf6b46d290ae387", size = 177107, upload-time = "2025-03-20T22:53:53.099Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/df/98/629f269c2bd91bdcac147aad5cf51ceb645c0196e23a41ee3c051125190f/pdoc3-0.11.6-py3-none-any.whl", hash = "sha256:8b72723767bd48d899812d2aec8375fc1c3476e179455db0b4575e6dccb44b93", size = 255188, upload-time = "2025-03-20T22:53:51.671Z" }, ] -[[package]] -name = "pluggy" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", -] -sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955, upload-time = "2024-04-20T21:34:42.531Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" }, -] - [[package]] name = "pluggy" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", - "python_full_version == '3.9.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] -[[package]] -name = "pycodestyle" -version = "2.9.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.8.1'", -] -sdist = { url = "https://files.pythonhosted.org/packages/b6/83/5bcaedba1f47200f0665ceb07bcb00e2be123192742ee0edfb66b600e5fd/pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785", size = 102127, upload-time = "2022-08-03T23:13:29.715Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/67/e4/fc77f1039c34b3612c4867b69cbb2b8a4e569720b1f19b0637002ee03aff/pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b", size = 41493, upload-time = "2022-08-03T23:13:27.416Z" }, -] - -[[package]] -name = "pycodestyle" -version = "2.12.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", -] -sdist = { url = "https://files.pythonhosted.org/packages/43/aa/210b2c9aedd8c1cbeea31a50e42050ad56187754b34eb214c46709445801/pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521", size = 39232, upload-time = "2024-08-04T20:26:54.576Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/d8/a211b3f85e99a0daa2ddec96c949cac6824bd305b040571b82a03dd62636/pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3", size = 31284, upload-time = "2024-08-04T20:26:53.173Z" }, -] - -[[package]] -name = "pycodestyle" -version = "2.14.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", - "python_full_version == '3.9.*'", -] -sdist = { url = "https://files.pythonhosted.org/packages/11/e0/abfd2a0d2efe47670df87f3e3a0e2edda42f055053c85361f19c0e2c1ca8/pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783", size = 39472, upload-time = "2025-06-20T18:49:48.75Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/27/a58ddaf8c588a3ef080db9d0b7e0b97215cee3a45df74f3a94dbbf5c893a/pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d", size = 31594, upload-time = "2025-06-20T18:49:47.491Z" }, -] - -[[package]] -name = "pyflakes" -version = "2.5.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.8.1'", -] -sdist = { url = "https://files.pythonhosted.org/packages/07/92/f0cb5381f752e89a598dd2850941e7f570ac3cb8ea4a344854de486db152/pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3", size = 66388, upload-time = "2022-07-30T17:29:05.816Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/13/63178f59f74e53acc2165aee4b002619a3cfa7eeaeac989a9eb41edf364e/pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2", size = 66116, upload-time = "2022-07-30T17:29:04.179Z" }, -] - -[[package]] -name = "pyflakes" -version = "3.2.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", -] -sdist = { url = "https://files.pythonhosted.org/packages/57/f9/669d8c9c86613c9d568757c7f5824bd3197d7b1c6c27553bc5618a27cce2/pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", size = 63788, upload-time = "2024-01-05T00:28:47.703Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/d7/f1b7db88d8e4417c5d47adad627a93547f44bdc9028372dbd2313f34a855/pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a", size = 62725, upload-time = "2024-01-05T00:28:45.903Z" }, -] - -[[package]] -name = "pyflakes" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", - "python_full_version == '3.9.*'", -] -sdist = { url = "https://files.pythonhosted.org/packages/45/dc/fd034dc20b4b264b3d015808458391acbf9df40b1e54750ef175d39180b1/pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58", size = 64669, upload-time = "2025-06-20T18:45:27.834Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/2f/81d580a0fb83baeb066698975cb14a618bdbed7720678566f1b046a95fe8/pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f", size = 63551, upload-time = "2025-06-20T18:45:26.937Z" }, -] - [[package]] name = "pygelf" version = "0.4.3" @@ -577,65 +259,32 @@ wheels = [ [[package]] name = "pytest" -version = "8.3.5" +version = "9.0.2" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", -] dependencies = [ - { name = "colorama", marker = "python_full_version < '3.9' and sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.9'" }, - { name = "iniconfig", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, - { name = "packaging", marker = "python_full_version < '3.9'" }, - { name = "pluggy", version = "1.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, - { name = "tomli", marker = "python_full_version < '3.9'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, ] - -[[package]] -name = "pytest" -version = "8.4.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.9.*'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version == '3.9.*' and sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version == '3.9.*'" }, - { name = "iniconfig", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, - { name = "packaging", marker = "python_full_version == '3.9.*'" }, - { name = "pluggy", version = "1.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, - { name = "pygments", marker = "python_full_version == '3.9.*'" }, - { name = "tomli", marker = "python_full_version == '3.9.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, ] [[package]] -name = "pytest" -version = "9.0.2" +name = "python-dateutil" +version = "2.9.0.post0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", -] dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version == '3.10.*'" }, - { name = "iniconfig", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "packaging", marker = "python_full_version >= '3.10'" }, - { name = "pluggy", version = "1.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "pygments", marker = "python_full_version >= '3.10'" }, - { name = "tomli", marker = "python_full_version == '3.10.*'" }, + { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] [[package]] @@ -647,242 +296,248 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/89/06/2c2d3034b4d6bf22f2a4ae546d16925898658a33b4400cfb7e2c1e2871a3/pytz-2020.5-py2.py3-none-any.whl", hash = "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", size = 510773, upload-time = "2020-12-24T20:58:04.098Z" }, ] +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + [[package]] name = "ruff" -version = "0.14.10" +version = "0.15.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" } +sdist = { url = "https://files.pythonhosted.org/packages/06/04/eab13a954e763b0606f460443fcbf6bb5a0faf06890ea3754ff16523dce5/ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342", size = 4558148, upload-time = "2026-02-19T22:32:20.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" }, - { url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" }, - { url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" }, - { url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" }, - { url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" }, - { url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" }, - { url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" }, - { url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" }, - { url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" }, - { url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" }, - { url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" }, - { url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" }, - { url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" }, - { url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" }, - { url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" }, - { url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" }, - { url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" }, - { url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" }, + { url = "https://files.pythonhosted.org/packages/2f/70/3a4dc6d09b13cb3e695f28307e5d889b2e1a66b7af9c5e257e796695b0e6/ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d", size = 10430565, upload-time = "2026-02-19T22:32:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/bb8457b56185ece1305c666dc895832946d24055be90692381c31d57466d/ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e", size = 10820354, upload-time = "2026-02-19T22:32:07.366Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c1/e0532d7f9c9e0b14c46f61b14afd563298b8b83f337b6789ddd987e46121/ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87", size = 10170767, upload-time = "2026-02-19T22:32:13.188Z" }, + { url = "https://files.pythonhosted.org/packages/47/e8/da1aa341d3af017a21c7a62fb5ec31d4e7ad0a93ab80e3a508316efbcb23/ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9", size = 10529591, upload-time = "2026-02-19T22:32:02.547Z" }, + { url = "https://files.pythonhosted.org/packages/93/74/184fbf38e9f3510231fbc5e437e808f0b48c42d1df9434b208821efcd8d6/ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80", size = 10260771, upload-time = "2026-02-19T22:32:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/05/ac/605c20b8e059a0bc4b42360414baa4892ff278cec1c91fff4be0dceedefd/ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f", size = 11045791, upload-time = "2026-02-19T22:32:31.642Z" }, + { url = "https://files.pythonhosted.org/packages/fd/52/db6e419908f45a894924d410ac77d64bdd98ff86901d833364251bd08e22/ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77", size = 11879271, upload-time = "2026-02-19T22:32:29.305Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d8/7992b18f2008bdc9231d0f10b16df7dda964dbf639e2b8b4c1b4e91b83af/ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea", size = 11303707, upload-time = "2026-02-19T22:32:22.492Z" }, + { url = "https://files.pythonhosted.org/packages/d7/02/849b46184bcfdd4b64cde61752cc9a146c54759ed036edd11857e9b8443b/ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a", size = 11149151, upload-time = "2026-02-19T22:32:44.234Z" }, + { url = "https://files.pythonhosted.org/packages/70/04/f5284e388bab60d1d3b99614a5a9aeb03e0f333847e2429bebd2aaa1feec/ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956", size = 11091132, upload-time = "2026-02-19T22:32:24.691Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ae/88d844a21110e14d92cf73d57363fab59b727ebeabe78009b9ccb23500af/ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4", size = 10504717, upload-time = "2026-02-19T22:32:26.75Z" }, + { url = "https://files.pythonhosted.org/packages/64/27/867076a6ada7f2b9c8292884ab44d08fd2ba71bd2b5364d4136f3cd537e1/ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de", size = 10263122, upload-time = "2026-02-19T22:32:10.036Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ef/faf9321d550f8ebf0c6373696e70d1758e20ccdc3951ad7af00c0956be7c/ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c", size = 10735295, upload-time = "2026-02-19T22:32:39.227Z" }, + { url = "https://files.pythonhosted.org/packages/2f/55/e8089fec62e050ba84d71b70e7834b97709ca9b7aba10c1a0b196e493f97/ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8", size = 11241641, upload-time = "2026-02-19T22:32:34.617Z" }, + { url = "https://files.pythonhosted.org/packages/23/01/1c30526460f4d23222d0fabd5888868262fd0e2b71a00570ca26483cd993/ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f", size = 10507885, upload-time = "2026-02-19T22:32:15.635Z" }, + { url = "https://files.pythonhosted.org/packages/5c/10/3d18e3bbdf8fc50bbb4ac3cc45970aa5a9753c5cb51bf9ed9a3cd8b79fa3/ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5", size = 11623725, upload-time = "2026-02-19T22:32:04.947Z" }, + { url = "https://files.pythonhosted.org/packages/6d/78/097c0798b1dab9f8affe73da9642bb4500e098cb27fd8dc9724816ac747b/ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e", size = 10941649, upload-time = "2026-02-19T22:32:18.108Z" }, ] [[package]] -name = "tomli" -version = "2.3.0" +name = "six" +version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, - { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, - { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, - { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, - { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, - { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, - { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, - { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, - { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, - { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, - { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, - { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, - { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, - { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, - { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, - { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, - { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, - { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, - { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, - { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, - { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, - { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, - { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, - { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, - { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, - { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, - { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, - { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, - { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, - { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, - { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, - { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, - { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] [[package]] -name = "typing-extensions" -version = "4.13.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", -] -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" }, +name = "tomli" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, ] [[package]] name = "typing-extensions" version = "4.15.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", - "python_full_version == '3.9.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] -name = "wrapt" -version = "2.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/2a/6de8a50cb435b7f42c46126cf1a54b2aab81784e74c8595c8e025e8f36d3/wrapt-2.0.1.tar.gz", hash = "sha256:9c9c635e78497cacb81e84f8b11b23e0aacac7a136e73b8e5b2109a1d9fc468f", size = 82040, upload-time = "2025-11-07T00:45:33.312Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/61/0d/12d8c803ed2ce4e5e7d5b9f5f602721f9dfef82c95959f3ce97fa584bb5c/wrapt-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:64b103acdaa53b7caf409e8d45d39a8442fe6dcfec6ba3f3d141e0cc2b5b4dbd", size = 77481, upload-time = "2025-11-07T00:43:11.103Z" }, - { url = "https://files.pythonhosted.org/packages/05/3e/4364ebe221ebf2a44d9fc8695a19324692f7dd2795e64bd59090856ebf12/wrapt-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91bcc576260a274b169c3098e9a3519fb01f2989f6d3d386ef9cbf8653de1374", size = 60692, upload-time = "2025-11-07T00:43:13.697Z" }, - { url = "https://files.pythonhosted.org/packages/1f/ff/ae2a210022b521f86a8ddcdd6058d137c051003812b0388a5e9a03d3fe10/wrapt-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab594f346517010050126fcd822697b25a7031d815bb4fbc238ccbe568216489", size = 61574, upload-time = "2025-11-07T00:43:14.967Z" }, - { url = "https://files.pythonhosted.org/packages/c6/93/5cf92edd99617095592af919cb81d4bff61c5dbbb70d3c92099425a8ec34/wrapt-2.0.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:36982b26f190f4d737f04a492a68accbfc6fa042c3f42326fdfbb6c5b7a20a31", size = 113688, upload-time = "2025-11-07T00:43:18.275Z" }, - { url = "https://files.pythonhosted.org/packages/a0/0a/e38fc0cee1f146c9fb266d8ef96ca39fb14a9eef165383004019aa53f88a/wrapt-2.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23097ed8bc4c93b7bf36fa2113c6c733c976316ce0ee2c816f64ca06102034ef", size = 115698, upload-time = "2025-11-07T00:43:19.407Z" }, - { url = "https://files.pythonhosted.org/packages/b0/85/bef44ea018b3925fb0bcbe9112715f665e4d5309bd945191da814c314fd1/wrapt-2.0.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bacfe6e001749a3b64db47bcf0341da757c95959f592823a93931a422395013", size = 112096, upload-time = "2025-11-07T00:43:16.5Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0b/733a2376e413117e497aa1a5b1b78e8f3a28c0e9537d26569f67d724c7c5/wrapt-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8ec3303e8a81932171f455f792f8df500fc1a09f20069e5c16bd7049ab4e8e38", size = 114878, upload-time = "2025-11-07T00:43:20.81Z" }, - { url = "https://files.pythonhosted.org/packages/da/03/d81dcb21bbf678fcda656495792b059f9d56677d119ca022169a12542bd0/wrapt-2.0.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:3f373a4ab5dbc528a94334f9fe444395b23c2f5332adab9ff4ea82f5a9e33bc1", size = 111298, upload-time = "2025-11-07T00:43:22.229Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d5/5e623040e8056e1108b787020d56b9be93dbbf083bf2324d42cde80f3a19/wrapt-2.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f49027b0b9503bf6c8cdc297ca55006b80c2f5dd36cecc72c6835ab6e10e8a25", size = 113361, upload-time = "2025-11-07T00:43:24.301Z" }, - { url = "https://files.pythonhosted.org/packages/a1/f3/de535ccecede6960e28c7b722e5744846258111d6c9f071aa7578ea37ad3/wrapt-2.0.1-cp310-cp310-win32.whl", hash = "sha256:8330b42d769965e96e01fa14034b28a2a7600fbf7e8f0cc90ebb36d492c993e4", size = 58035, upload-time = "2025-11-07T00:43:28.96Z" }, - { url = "https://files.pythonhosted.org/packages/21/15/39d3ca5428a70032c2ec8b1f1c9d24c32e497e7ed81aed887a4998905fcc/wrapt-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:1218573502a8235bb8a7ecaed12736213b22dcde9feab115fa2989d42b5ded45", size = 60383, upload-time = "2025-11-07T00:43:25.804Z" }, - { url = "https://files.pythonhosted.org/packages/43/c2/dfd23754b7f7a4dce07e08f4309c4e10a40046a83e9ae1800f2e6b18d7c1/wrapt-2.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:eda8e4ecd662d48c28bb86be9e837c13e45c58b8300e43ba3c9b4fa9900302f7", size = 58894, upload-time = "2025-11-07T00:43:27.074Z" }, - { url = "https://files.pythonhosted.org/packages/98/60/553997acf3939079dab022e37b67b1904b5b0cc235503226898ba573b10c/wrapt-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0e17283f533a0d24d6e5429a7d11f250a58d28b4ae5186f8f47853e3e70d2590", size = 77480, upload-time = "2025-11-07T00:43:30.573Z" }, - { url = "https://files.pythonhosted.org/packages/2d/50/e5b3d30895d77c52105c6d5cbf94d5b38e2a3dd4a53d22d246670da98f7c/wrapt-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85df8d92158cb8f3965aecc27cf821461bb5f40b450b03facc5d9f0d4d6ddec6", size = 60690, upload-time = "2025-11-07T00:43:31.594Z" }, - { url = "https://files.pythonhosted.org/packages/f0/40/660b2898703e5cbbb43db10cdefcc294274458c3ca4c68637c2b99371507/wrapt-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1be685ac7700c966b8610ccc63c3187a72e33cab53526a27b2a285a662cd4f7", size = 61578, upload-time = "2025-11-07T00:43:32.918Z" }, - { url = "https://files.pythonhosted.org/packages/5b/36/825b44c8a10556957bc0c1d84c7b29a40e05fcf1873b6c40aa9dbe0bd972/wrapt-2.0.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:df0b6d3b95932809c5b3fecc18fda0f1e07452d05e2662a0b35548985f256e28", size = 114115, upload-time = "2025-11-07T00:43:35.605Z" }, - { url = "https://files.pythonhosted.org/packages/83/73/0a5d14bb1599677304d3c613a55457d34c344e9b60eda8a737c2ead7619e/wrapt-2.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da7384b0e5d4cae05c97cd6f94faaf78cc8b0f791fc63af43436d98c4ab37bb", size = 116157, upload-time = "2025-11-07T00:43:37.058Z" }, - { url = "https://files.pythonhosted.org/packages/01/22/1c158fe763dbf0a119f985d945711d288994fe5514c0646ebe0eb18b016d/wrapt-2.0.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ec65a78fbd9d6f083a15d7613b2800d5663dbb6bb96003899c834beaa68b242c", size = 112535, upload-time = "2025-11-07T00:43:34.138Z" }, - { url = "https://files.pythonhosted.org/packages/5c/28/4f16861af67d6de4eae9927799b559c20ebdd4fe432e89ea7fe6fcd9d709/wrapt-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7de3cc939be0e1174969f943f3b44e0d79b6f9a82198133a5b7fc6cc92882f16", size = 115404, upload-time = "2025-11-07T00:43:39.214Z" }, - { url = "https://files.pythonhosted.org/packages/a0/8b/7960122e625fad908f189b59c4aae2d50916eb4098b0fb2819c5a177414f/wrapt-2.0.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:fb1a5b72cbd751813adc02ef01ada0b0d05d3dcbc32976ce189a1279d80ad4a2", size = 111802, upload-time = "2025-11-07T00:43:40.476Z" }, - { url = "https://files.pythonhosted.org/packages/3e/73/7881eee5ac31132a713ab19a22c9e5f1f7365c8b1df50abba5d45b781312/wrapt-2.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3fa272ca34332581e00bf7773e993d4f632594eb2d1b0b162a9038df0fd971dd", size = 113837, upload-time = "2025-11-07T00:43:42.921Z" }, - { url = "https://files.pythonhosted.org/packages/45/00/9499a3d14e636d1f7089339f96c4409bbc7544d0889f12264efa25502ae8/wrapt-2.0.1-cp311-cp311-win32.whl", hash = "sha256:fc007fdf480c77301ab1afdbb6ab22a5deee8885f3b1ed7afcb7e5e84a0e27be", size = 58028, upload-time = "2025-11-07T00:43:47.369Z" }, - { url = "https://files.pythonhosted.org/packages/70/5d/8f3d7eea52f22638748f74b102e38fdf88cb57d08ddeb7827c476a20b01b/wrapt-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:47434236c396d04875180171ee1f3815ca1eada05e24a1ee99546320d54d1d1b", size = 60385, upload-time = "2025-11-07T00:43:44.34Z" }, - { url = "https://files.pythonhosted.org/packages/14/e2/32195e57a8209003587bbbad44d5922f13e0ced2a493bb46ca882c5b123d/wrapt-2.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:837e31620e06b16030b1d126ed78e9383815cbac914693f54926d816d35d8edf", size = 58893, upload-time = "2025-11-07T00:43:46.161Z" }, - { url = "https://files.pythonhosted.org/packages/cb/73/8cb252858dc8254baa0ce58ce382858e3a1cf616acebc497cb13374c95c6/wrapt-2.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1fdbb34da15450f2b1d735a0e969c24bdb8d8924892380126e2a293d9902078c", size = 78129, upload-time = "2025-11-07T00:43:48.852Z" }, - { url = "https://files.pythonhosted.org/packages/19/42/44a0db2108526ee6e17a5ab72478061158f34b08b793df251d9fbb9a7eb4/wrapt-2.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3d32794fe940b7000f0519904e247f902f0149edbe6316c710a8562fb6738841", size = 61205, upload-time = "2025-11-07T00:43:50.402Z" }, - { url = "https://files.pythonhosted.org/packages/4d/8a/5b4b1e44b791c22046e90d9b175f9a7581a8cc7a0debbb930f81e6ae8e25/wrapt-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:386fb54d9cd903ee0012c09291336469eb7b244f7183d40dc3e86a16a4bace62", size = 61692, upload-time = "2025-11-07T00:43:51.678Z" }, - { url = "https://files.pythonhosted.org/packages/11/53/3e794346c39f462bcf1f58ac0487ff9bdad02f9b6d5ee2dc84c72e0243b2/wrapt-2.0.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7b219cb2182f230676308cdcacd428fa837987b89e4b7c5c9025088b8a6c9faf", size = 121492, upload-time = "2025-11-07T00:43:55.017Z" }, - { url = "https://files.pythonhosted.org/packages/c6/7e/10b7b0e8841e684c8ca76b462a9091c45d62e8f2de9c4b1390b690eadf16/wrapt-2.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:641e94e789b5f6b4822bb8d8ebbdfc10f4e4eae7756d648b717d980f657a9eb9", size = 123064, upload-time = "2025-11-07T00:43:56.323Z" }, - { url = "https://files.pythonhosted.org/packages/0e/d1/3c1e4321fc2f5ee7fd866b2d822aa89b84495f28676fd976c47327c5b6aa/wrapt-2.0.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe21b118b9f58859b5ebaa4b130dee18669df4bd111daad082b7beb8799ad16b", size = 117403, upload-time = "2025-11-07T00:43:53.258Z" }, - { url = "https://files.pythonhosted.org/packages/a4/b0/d2f0a413cf201c8c2466de08414a15420a25aa83f53e647b7255cc2fab5d/wrapt-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:17fb85fa4abc26a5184d93b3efd2dcc14deb4b09edcdb3535a536ad34f0b4dba", size = 121500, upload-time = "2025-11-07T00:43:57.468Z" }, - { url = "https://files.pythonhosted.org/packages/bd/45/bddb11d28ca39970a41ed48a26d210505120f925918592283369219f83cc/wrapt-2.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:b89ef9223d665ab255ae42cc282d27d69704d94be0deffc8b9d919179a609684", size = 116299, upload-time = "2025-11-07T00:43:58.877Z" }, - { url = "https://files.pythonhosted.org/packages/81/af/34ba6dd570ef7a534e7eec0c25e2615c355602c52aba59413411c025a0cb/wrapt-2.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a453257f19c31b31ba593c30d997d6e5be39e3b5ad9148c2af5a7314061c63eb", size = 120622, upload-time = "2025-11-07T00:43:59.962Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3e/693a13b4146646fb03254636f8bafd20c621955d27d65b15de07ab886187/wrapt-2.0.1-cp312-cp312-win32.whl", hash = "sha256:3e271346f01e9c8b1130a6a3b0e11908049fe5be2d365a5f402778049147e7e9", size = 58246, upload-time = "2025-11-07T00:44:03.169Z" }, - { url = "https://files.pythonhosted.org/packages/a7/36/715ec5076f925a6be95f37917b66ebbeaa1372d1862c2ccd7a751574b068/wrapt-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:2da620b31a90cdefa9cd0c2b661882329e2e19d1d7b9b920189956b76c564d75", size = 60492, upload-time = "2025-11-07T00:44:01.027Z" }, - { url = "https://files.pythonhosted.org/packages/ef/3e/62451cd7d80f65cc125f2b426b25fbb6c514bf6f7011a0c3904fc8c8df90/wrapt-2.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:aea9c7224c302bc8bfc892b908537f56c430802560e827b75ecbde81b604598b", size = 58987, upload-time = "2025-11-07T00:44:02.095Z" }, - { url = "https://files.pythonhosted.org/packages/ad/fe/41af4c46b5e498c90fc87981ab2972fbd9f0bccda597adb99d3d3441b94b/wrapt-2.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:47b0f8bafe90f7736151f61482c583c86b0693d80f075a58701dd1549b0010a9", size = 78132, upload-time = "2025-11-07T00:44:04.628Z" }, - { url = "https://files.pythonhosted.org/packages/1c/92/d68895a984a5ebbbfb175512b0c0aad872354a4a2484fbd5552e9f275316/wrapt-2.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cbeb0971e13b4bd81d34169ed57a6dda017328d1a22b62fda45e1d21dd06148f", size = 61211, upload-time = "2025-11-07T00:44:05.626Z" }, - { url = "https://files.pythonhosted.org/packages/e8/26/ba83dc5ae7cf5aa2b02364a3d9cf74374b86169906a1f3ade9a2d03cf21c/wrapt-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb7cffe572ad0a141a7886a1d2efa5bef0bf7fe021deeea76b3ab334d2c38218", size = 61689, upload-time = "2025-11-07T00:44:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/cf/67/d7a7c276d874e5d26738c22444d466a3a64ed541f6ef35f740dbd865bab4/wrapt-2.0.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8d60527d1ecfc131426b10d93ab5d53e08a09c5fa0175f6b21b3252080c70a9", size = 121502, upload-time = "2025-11-07T00:44:09.557Z" }, - { url = "https://files.pythonhosted.org/packages/0f/6b/806dbf6dd9579556aab22fc92908a876636e250f063f71548a8660382184/wrapt-2.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c654eafb01afac55246053d67a4b9a984a3567c3808bb7df2f8de1c1caba2e1c", size = 123110, upload-time = "2025-11-07T00:44:10.64Z" }, - { url = "https://files.pythonhosted.org/packages/e5/08/cdbb965fbe4c02c5233d185d070cabed2ecc1f1e47662854f95d77613f57/wrapt-2.0.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:98d873ed6c8b4ee2418f7afce666751854d6d03e3c0ec2a399bb039cd2ae89db", size = 117434, upload-time = "2025-11-07T00:44:08.138Z" }, - { url = "https://files.pythonhosted.org/packages/2d/d1/6aae2ce39db4cb5216302fa2e9577ad74424dfbe315bd6669725569e048c/wrapt-2.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9e850f5b7fc67af856ff054c71690d54fa940c3ef74209ad9f935b4f66a0233", size = 121533, upload-time = "2025-11-07T00:44:12.142Z" }, - { url = "https://files.pythonhosted.org/packages/79/35/565abf57559fbe0a9155c29879ff43ce8bd28d2ca61033a3a3dd67b70794/wrapt-2.0.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e505629359cb5f751e16e30cf3f91a1d3ddb4552480c205947da415d597f7ac2", size = 116324, upload-time = "2025-11-07T00:44:13.28Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e0/53ff5e76587822ee33e560ad55876d858e384158272cd9947abdd4ad42ca/wrapt-2.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2879af909312d0baf35f08edeea918ee3af7ab57c37fe47cb6a373c9f2749c7b", size = 120627, upload-time = "2025-11-07T00:44:14.431Z" }, - { url = "https://files.pythonhosted.org/packages/7c/7b/38df30fd629fbd7612c407643c63e80e1c60bcc982e30ceeae163a9800e7/wrapt-2.0.1-cp313-cp313-win32.whl", hash = "sha256:d67956c676be5a24102c7407a71f4126d30de2a569a1c7871c9f3cabc94225d7", size = 58252, upload-time = "2025-11-07T00:44:17.814Z" }, - { url = "https://files.pythonhosted.org/packages/85/64/d3954e836ea67c4d3ad5285e5c8fd9d362fd0a189a2db622df457b0f4f6a/wrapt-2.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9ca66b38dd642bf90c59b6738af8070747b610115a39af2498535f62b5cdc1c3", size = 60500, upload-time = "2025-11-07T00:44:15.561Z" }, - { url = "https://files.pythonhosted.org/packages/89/4e/3c8b99ac93527cfab7f116089db120fef16aac96e5f6cdb724ddf286086d/wrapt-2.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:5a4939eae35db6b6cec8e7aa0e833dcca0acad8231672c26c2a9ab7a0f8ac9c8", size = 58993, upload-time = "2025-11-07T00:44:16.65Z" }, - { url = "https://files.pythonhosted.org/packages/f9/f4/eff2b7d711cae20d220780b9300faa05558660afb93f2ff5db61fe725b9a/wrapt-2.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a52f93d95c8d38fed0669da2ebdb0b0376e895d84596a976c15a9eb45e3eccb3", size = 82028, upload-time = "2025-11-07T00:44:18.944Z" }, - { url = "https://files.pythonhosted.org/packages/0c/67/cb945563f66fd0f61a999339460d950f4735c69f18f0a87ca586319b1778/wrapt-2.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e54bbf554ee29fcceee24fa41c4d091398b911da6e7f5d7bffda963c9aed2e1", size = 62949, upload-time = "2025-11-07T00:44:20.074Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ca/f63e177f0bbe1e5cf5e8d9b74a286537cd709724384ff20860f8f6065904/wrapt-2.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:908f8c6c71557f4deaa280f55d0728c3bca0960e8c3dd5ceeeafb3c19942719d", size = 63681, upload-time = "2025-11-07T00:44:21.345Z" }, - { url = "https://files.pythonhosted.org/packages/39/a1/1b88fcd21fd835dca48b556daef750952e917a2794fa20c025489e2e1f0f/wrapt-2.0.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e2f84e9af2060e3904a32cea9bb6db23ce3f91cfd90c6b426757cf7cc01c45c7", size = 152696, upload-time = "2025-11-07T00:44:24.318Z" }, - { url = "https://files.pythonhosted.org/packages/62/1c/d9185500c1960d9f5f77b9c0b890b7fc62282b53af7ad1b6bd779157f714/wrapt-2.0.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3612dc06b436968dfb9142c62e5dfa9eb5924f91120b3c8ff501ad878f90eb3", size = 158859, upload-time = "2025-11-07T00:44:25.494Z" }, - { url = "https://files.pythonhosted.org/packages/91/60/5d796ed0f481ec003220c7878a1d6894652efe089853a208ea0838c13086/wrapt-2.0.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d2d947d266d99a1477cd005b23cbd09465276e302515e122df56bb9511aca1b", size = 146068, upload-time = "2025-11-07T00:44:22.81Z" }, - { url = "https://files.pythonhosted.org/packages/04/f8/75282dd72f102ddbfba137e1e15ecba47b40acff32c08ae97edbf53f469e/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7d539241e87b650cbc4c3ac9f32c8d1ac8a54e510f6dca3f6ab60dcfd48c9b10", size = 155724, upload-time = "2025-11-07T00:44:26.634Z" }, - { url = "https://files.pythonhosted.org/packages/5a/27/fe39c51d1b344caebb4a6a9372157bdb8d25b194b3561b52c8ffc40ac7d1/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4811e15d88ee62dbf5c77f2c3ff3932b1e3ac92323ba3912f51fc4016ce81ecf", size = 144413, upload-time = "2025-11-07T00:44:27.939Z" }, - { url = "https://files.pythonhosted.org/packages/83/2b/9f6b643fe39d4505c7bf926d7c2595b7cb4b607c8c6b500e56c6b36ac238/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c1c91405fcf1d501fa5d55df21e58ea49e6b879ae829f1039faaf7e5e509b41e", size = 150325, upload-time = "2025-11-07T00:44:29.29Z" }, - { url = "https://files.pythonhosted.org/packages/bb/b6/20ffcf2558596a7f58a2e69c89597128781f0b88e124bf5a4cadc05b8139/wrapt-2.0.1-cp313-cp313t-win32.whl", hash = "sha256:e76e3f91f864e89db8b8d2a8311d57df93f01ad6bb1e9b9976d1f2e83e18315c", size = 59943, upload-time = "2025-11-07T00:44:33.211Z" }, - { url = "https://files.pythonhosted.org/packages/87/6a/0e56111cbb3320151eed5d3821ee1373be13e05b376ea0870711f18810c3/wrapt-2.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:83ce30937f0ba0d28818807b303a412440c4b63e39d3d8fc036a94764b728c92", size = 63240, upload-time = "2025-11-07T00:44:30.935Z" }, - { url = "https://files.pythonhosted.org/packages/1d/54/5ab4c53ea1f7f7e5c3e7c1095db92932cc32fd62359d285486d00c2884c3/wrapt-2.0.1-cp313-cp313t-win_arm64.whl", hash = "sha256:4b55cacc57e1dc2d0991dbe74c6419ffd415fb66474a02335cb10efd1aa3f84f", size = 60416, upload-time = "2025-11-07T00:44:32.002Z" }, - { url = "https://files.pythonhosted.org/packages/73/81/d08d83c102709258e7730d3cd25befd114c60e43ef3891d7e6877971c514/wrapt-2.0.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:5e53b428f65ece6d9dad23cb87e64506392b720a0b45076c05354d27a13351a1", size = 78290, upload-time = "2025-11-07T00:44:34.691Z" }, - { url = "https://files.pythonhosted.org/packages/f6/14/393afba2abb65677f313aa680ff0981e829626fed39b6a7e3ec807487790/wrapt-2.0.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ad3ee9d0f254851c71780966eb417ef8e72117155cff04821ab9b60549694a55", size = 61255, upload-time = "2025-11-07T00:44:35.762Z" }, - { url = "https://files.pythonhosted.org/packages/c4/10/a4a1f2fba205a9462e36e708ba37e5ac95f4987a0f1f8fd23f0bf1fc3b0f/wrapt-2.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d7b822c61ed04ee6ad64bc90d13368ad6eb094db54883b5dde2182f67a7f22c0", size = 61797, upload-time = "2025-11-07T00:44:37.22Z" }, - { url = "https://files.pythonhosted.org/packages/12/db/99ba5c37cf1c4fad35349174f1e38bd8d992340afc1ff27f526729b98986/wrapt-2.0.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7164a55f5e83a9a0b031d3ffab4d4e36bbec42e7025db560f225489fa929e509", size = 120470, upload-time = "2025-11-07T00:44:39.425Z" }, - { url = "https://files.pythonhosted.org/packages/30/3f/a1c8d2411eb826d695fc3395a431757331582907a0ec59afce8fe8712473/wrapt-2.0.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e60690ba71a57424c8d9ff28f8d006b7ad7772c22a4af432188572cd7fa004a1", size = 122851, upload-time = "2025-11-07T00:44:40.582Z" }, - { url = "https://files.pythonhosted.org/packages/b3/8d/72c74a63f201768d6a04a8845c7976f86be6f5ff4d74996c272cefc8dafc/wrapt-2.0.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3cd1a4bd9a7a619922a8557e1318232e7269b5fb69d4ba97b04d20450a6bf970", size = 117433, upload-time = "2025-11-07T00:44:38.313Z" }, - { url = "https://files.pythonhosted.org/packages/c7/5a/df37cf4042cb13b08256f8e27023e2f9b3d471d553376616591bb99bcb31/wrapt-2.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b4c2e3d777e38e913b8ce3a6257af72fb608f86a1df471cb1d4339755d0a807c", size = 121280, upload-time = "2025-11-07T00:44:41.69Z" }, - { url = "https://files.pythonhosted.org/packages/54/34/40d6bc89349f9931e1186ceb3e5fbd61d307fef814f09fbbac98ada6a0c8/wrapt-2.0.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3d366aa598d69416b5afedf1faa539fac40c1d80a42f6b236c88c73a3c8f2d41", size = 116343, upload-time = "2025-11-07T00:44:43.013Z" }, - { url = "https://files.pythonhosted.org/packages/70/66/81c3461adece09d20781dee17c2366fdf0cb8754738b521d221ca056d596/wrapt-2.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c235095d6d090aa903f1db61f892fffb779c1eaeb2a50e566b52001f7a0f66ed", size = 119650, upload-time = "2025-11-07T00:44:44.523Z" }, - { url = "https://files.pythonhosted.org/packages/46/3a/d0146db8be8761a9e388cc9cc1c312b36d583950ec91696f19bbbb44af5a/wrapt-2.0.1-cp314-cp314-win32.whl", hash = "sha256:bfb5539005259f8127ea9c885bdc231978c06b7a980e63a8a61c8c4c979719d0", size = 58701, upload-time = "2025-11-07T00:44:48.277Z" }, - { url = "https://files.pythonhosted.org/packages/1a/38/5359da9af7d64554be63e9046164bd4d8ff289a2dd365677d25ba3342c08/wrapt-2.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:4ae879acc449caa9ed43fc36ba08392b9412ee67941748d31d94e3cedb36628c", size = 60947, upload-time = "2025-11-07T00:44:46.086Z" }, - { url = "https://files.pythonhosted.org/packages/aa/3f/96db0619276a833842bf36343685fa04f987dd6e3037f314531a1e00492b/wrapt-2.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:8639b843c9efd84675f1e100ed9e99538ebea7297b62c4b45a7042edb84db03e", size = 59359, upload-time = "2025-11-07T00:44:47.164Z" }, - { url = "https://files.pythonhosted.org/packages/71/49/5f5d1e867bf2064bf3933bc6cf36ade23505f3902390e175e392173d36a2/wrapt-2.0.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:9219a1d946a9b32bb23ccae66bdb61e35c62773ce7ca6509ceea70f344656b7b", size = 82031, upload-time = "2025-11-07T00:44:49.4Z" }, - { url = "https://files.pythonhosted.org/packages/2b/89/0009a218d88db66ceb83921e5685e820e2c61b59bbbb1324ba65342668bc/wrapt-2.0.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fa4184e74197af3adad3c889a1af95b53bb0466bced92ea99a0c014e48323eec", size = 62952, upload-time = "2025-11-07T00:44:50.74Z" }, - { url = "https://files.pythonhosted.org/packages/ae/18/9b968e920dd05d6e44bcc918a046d02afea0fb31b2f1c80ee4020f377cbe/wrapt-2.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c5ef2f2b8a53b7caee2f797ef166a390fef73979b15778a4a153e4b5fedce8fa", size = 63688, upload-time = "2025-11-07T00:44:52.248Z" }, - { url = "https://files.pythonhosted.org/packages/a6/7d/78bdcb75826725885d9ea26c49a03071b10c4c92da93edda612910f150e4/wrapt-2.0.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e042d653a4745be832d5aa190ff80ee4f02c34b21f4b785745eceacd0907b815", size = 152706, upload-time = "2025-11-07T00:44:54.613Z" }, - { url = "https://files.pythonhosted.org/packages/dd/77/cac1d46f47d32084a703df0d2d29d47e7eb2a7d19fa5cbca0e529ef57659/wrapt-2.0.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2afa23318136709c4b23d87d543b425c399887b4057936cd20386d5b1422b6fa", size = 158866, upload-time = "2025-11-07T00:44:55.79Z" }, - { url = "https://files.pythonhosted.org/packages/8a/11/b521406daa2421508903bf8d5e8b929216ec2af04839db31c0a2c525eee0/wrapt-2.0.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6c72328f668cf4c503ffcf9434c2b71fdd624345ced7941bc6693e61bbe36bef", size = 146148, upload-time = "2025-11-07T00:44:53.388Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c0/340b272bed297baa7c9ce0c98ef7017d9c035a17a6a71dce3184b8382da2/wrapt-2.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3793ac154afb0e5b45d1233cb94d354ef7a983708cc3bb12563853b1d8d53747", size = 155737, upload-time = "2025-11-07T00:44:56.971Z" }, - { url = "https://files.pythonhosted.org/packages/f3/93/bfcb1fb2bdf186e9c2883a4d1ab45ab099c79cbf8f4e70ea453811fa3ea7/wrapt-2.0.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fec0d993ecba3991645b4857837277469c8cc4c554a7e24d064d1ca291cfb81f", size = 144451, upload-time = "2025-11-07T00:44:58.515Z" }, - { url = "https://files.pythonhosted.org/packages/d2/6b/dca504fb18d971139d232652656180e3bd57120e1193d9a5899c3c0b7cdd/wrapt-2.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:949520bccc1fa227274da7d03bf238be15389cd94e32e4297b92337df9b7a349", size = 150353, upload-time = "2025-11-07T00:44:59.753Z" }, - { url = "https://files.pythonhosted.org/packages/1d/f6/a1de4bd3653afdf91d250ca5c721ee51195df2b61a4603d4b373aa804d1d/wrapt-2.0.1-cp314-cp314t-win32.whl", hash = "sha256:be9e84e91d6497ba62594158d3d31ec0486c60055c49179edc51ee43d095f79c", size = 60609, upload-time = "2025-11-07T00:45:03.315Z" }, - { url = "https://files.pythonhosted.org/packages/01/3a/07cd60a9d26fe73efead61c7830af975dfdba8537632d410462672e4432b/wrapt-2.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:61c4956171c7434634401db448371277d07032a81cc21c599c22953374781395", size = 64038, upload-time = "2025-11-07T00:45:00.948Z" }, - { url = "https://files.pythonhosted.org/packages/41/99/8a06b8e17dddbf321325ae4eb12465804120f699cd1b8a355718300c62da/wrapt-2.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:35cdbd478607036fee40273be8ed54a451f5f23121bd9d4be515158f9498f7ad", size = 60634, upload-time = "2025-11-07T00:45:02.087Z" }, - { url = "https://files.pythonhosted.org/packages/4d/26/ed6979672ebe0e33f6059fdc8182c4c536e575b6f03d349a542082ca03fb/wrapt-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:90897ea1cf0679763b62e79657958cd54eae5659f6360fc7d2ccc6f906342183", size = 77192, upload-time = "2025-11-07T00:45:04.493Z" }, - { url = "https://files.pythonhosted.org/packages/b5/a5/fb0974e8d21ef17f75ffa365b395c04eefa23eb6e45548e94c781e93c306/wrapt-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50844efc8cdf63b2d90cd3d62d4947a28311e6266ce5235a219d21b195b4ec2c", size = 60475, upload-time = "2025-11-07T00:45:05.671Z" }, - { url = "https://files.pythonhosted.org/packages/6b/7b/56bf38c8bd5e8a48749f1a13c743eddcbd7a616da342b4877f79ec3e7087/wrapt-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49989061a9977a8cbd6d20f2efa813f24bf657c6990a42967019ce779a878dbf", size = 61311, upload-time = "2025-11-07T00:45:06.822Z" }, - { url = "https://files.pythonhosted.org/packages/18/70/ba94af50f2145cb431163d74d405083beb16782818b20c956138e4f59299/wrapt-2.0.1-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:09c7476ab884b74dce081ad9bfd07fe5822d8600abade571cb1f66d5fc915af6", size = 118542, upload-time = "2025-11-07T00:45:08.324Z" }, - { url = "https://files.pythonhosted.org/packages/14/ac/537c8f9cec8a422cfed45b28665ea33344928fd67913e5ff98af0c11470c/wrapt-2.0.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1a8a09a004ef100e614beec82862d11fc17d601092c3599afd22b1f36e4137e", size = 120989, upload-time = "2025-11-07T00:45:09.928Z" }, - { url = "https://files.pythonhosted.org/packages/7f/b8/463284d8a74e56c88f5f2fb9b572178a294e0beb945b8ee2a7ca43a1696d/wrapt-2.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:89a82053b193837bf93c0f8a57ded6e4b6d88033a499dadff5067e912c2a41e9", size = 118937, upload-time = "2025-11-07T00:45:11.157Z" }, - { url = "https://files.pythonhosted.org/packages/3c/8e/08b8f9de6b3cfd269504b345d31679d283e50cc93cb0521a44475bb7311b/wrapt-2.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f26f8e2ca19564e2e1fdbb6a0e47f36e0efbab1acc31e15471fad88f828c75f6", size = 117150, upload-time = "2025-11-07T00:45:12.324Z" }, - { url = "https://files.pythonhosted.org/packages/4c/f3/0eab878bb4d0eadbec2b75e399cfa6aa802e634587756d59419080aae1f5/wrapt-2.0.1-cp38-cp38-win32.whl", hash = "sha256:115cae4beed3542e37866469a8a1f2b9ec549b4463572b000611e9946b86e6f6", size = 57936, upload-time = "2025-11-07T00:45:15.468Z" }, - { url = "https://files.pythonhosted.org/packages/03/e5/fc964b370bf568312deda176682138ccbd41960285a7de49002183e2aa08/wrapt-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c4012a2bd37059d04f8209916aa771dfb564cccb86079072bdcd48a308b6a5c5", size = 60308, upload-time = "2025-11-07T00:45:13.573Z" }, - { url = "https://files.pythonhosted.org/packages/c6/1f/5af0ae22368ec69067a577f9e07a0dd2619a1f63aabc2851263679942667/wrapt-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:68424221a2dc00d634b54f92441914929c5ffb1c30b3b837343978343a3512a3", size = 77478, upload-time = "2025-11-07T00:45:16.65Z" }, - { url = "https://files.pythonhosted.org/packages/8c/b7/fd6b563aada859baabc55db6aa71b8afb4a3ceb8bc33d1053e4c7b5e0109/wrapt-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6bd1a18f5a797fe740cb3d7a0e853a8ce6461cc62023b630caec80171a6b8097", size = 60687, upload-time = "2025-11-07T00:45:17.896Z" }, - { url = "https://files.pythonhosted.org/packages/0f/8c/9ededfff478af396bcd081076986904bdca336d9664d247094150c877dcb/wrapt-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb3a86e703868561c5cad155a15c36c716e1ab513b7065bd2ac8ed353c503333", size = 61563, upload-time = "2025-11-07T00:45:19.109Z" }, - { url = "https://files.pythonhosted.org/packages/ab/a7/d795a1aa2b6ab20ca21157fe03cbfc6aa7e870a88ac3b4ea189e2f6c79f0/wrapt-2.0.1-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5dc1b852337c6792aa111ca8becff5bacf576bf4a0255b0f05eb749da6a1643e", size = 113395, upload-time = "2025-11-07T00:45:21.551Z" }, - { url = "https://files.pythonhosted.org/packages/61/32/56cde2bbf95f2d5698a1850a765520aa86bc7ae0f95b8ec80b6f2e2049bb/wrapt-2.0.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c046781d422f0830de6329fa4b16796096f28a92c8aef3850674442cdcb87b7f", size = 115362, upload-time = "2025-11-07T00:45:22.809Z" }, - { url = "https://files.pythonhosted.org/packages/cf/53/8d3cc433847c219212c133a3e8305bd087b386ef44442ff39189e8fa62ac/wrapt-2.0.1-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f73f9f7a0ebd0db139253d27e5fc8d2866ceaeef19c30ab5d69dcbe35e1a6981", size = 111766, upload-time = "2025-11-07T00:45:20.294Z" }, - { url = "https://files.pythonhosted.org/packages/b8/d3/14b50c2d0463c0dcef8f388cb1527ed7bbdf0972b9fd9976905f36c77ebf/wrapt-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b667189cf8efe008f55bbda321890bef628a67ab4147ebf90d182f2dadc78790", size = 114560, upload-time = "2025-11-07T00:45:24.054Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b8/4f731ff178f77ae55385586de9ff4b4261e872cf2ced4875e6c976fbcb8b/wrapt-2.0.1-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:a9a83618c4f0757557c077ef71d708ddd9847ed66b7cc63416632af70d3e2308", size = 110999, upload-time = "2025-11-07T00:45:25.596Z" }, - { url = "https://files.pythonhosted.org/packages/fe/bb/5f1bb0f9ae9d12e19f1d71993d052082062603e83fe3e978377f918f054d/wrapt-2.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e9b121e9aeb15df416c2c960b8255a49d44b4038016ee17af03975992d03931", size = 113164, upload-time = "2025-11-07T00:45:26.8Z" }, - { url = "https://files.pythonhosted.org/packages/ad/f6/f3a3c623d3065c7bf292ee0b73566236b562d5ed894891bd8e435762b618/wrapt-2.0.1-cp39-cp39-win32.whl", hash = "sha256:1f186e26ea0a55f809f232e92cc8556a0977e00183c3ebda039a807a42be1494", size = 58028, upload-time = "2025-11-07T00:45:30.943Z" }, - { url = "https://files.pythonhosted.org/packages/24/78/647c609dfa18063a7fcd5c23f762dd006be401cc9206314d29c9b0b12078/wrapt-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:bf4cb76f36be5de950ce13e22e7fdf462b35b04665a12b64f3ac5c1bbbcf3728", size = 60380, upload-time = "2025-11-07T00:45:28.341Z" }, - { url = "https://files.pythonhosted.org/packages/07/90/0c14b241d18d80ddf4c847a5f52071e126e8a6a9e5a8a7952add8ef0d766/wrapt-2.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:d6cc985b9c8b235bd933990cdbf0f891f8e010b65a3911f7a55179cd7b0fc57b", size = 58895, upload-time = "2025-11-07T00:45:29.527Z" }, - { url = "https://files.pythonhosted.org/packages/15/d1/b51471c11592ff9c012bd3e2f7334a6ff2f42a7aed2caffcf0bdddc9cb89/wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca", size = 44046, upload-time = "2025-11-07T00:45:32.116Z" }, -] - -[[package]] -name = "zipp" -version = "3.20.2" +name = "vcrpy" +version = "8.1.1" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.8.1' and python_full_version < '3.9'", - "python_full_version < '3.8.1'", +dependencies = [ + { name = "pyyaml" }, + { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199, upload-time = "2024-09-13T13:44:16.101Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/07/bcfd5ebd7cb308026ab78a353e091bd699593358be49197d39d004e5ad83/vcrpy-8.1.1.tar.gz", hash = "sha256:58e3053e33b423f3594031cb758c3f4d1df931307f1e67928e30cf352df7709f", size = 85770, upload-time = "2026-01-04T19:22:03.886Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200, upload-time = "2024-09-13T13:44:14.38Z" }, + { url = "https://files.pythonhosted.org/packages/3a/d7/f79b05a5d728f8786876a7d75dfb0c5cae27e428081b2d60152fb52f155f/vcrpy-8.1.1-py3-none-any.whl", hash = "sha256:2d16f31ad56493efb6165182dd99767207031b0da3f68b18f975545ede8ac4b9", size = 42445, upload-time = "2026-01-04T19:22:02.532Z" }, ] [[package]] -name = "zipp" -version = "3.23.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.9.*'", -] -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +name = "wrapt" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/37/ae31f40bec90de2f88d9597d0b5281e23ffe85b893a47ca5d9c05c63a4f6/wrapt-2.1.1.tar.gz", hash = "sha256:5fdcb09bf6db023d88f312bd0767594b414655d58090fc1c46b3414415f67fac", size = 81329, upload-time = "2026-02-03T02:12:13.786Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/21/293b657a27accfbbbb6007ebd78af0efa2083dac83e8f523272ea09b4638/wrapt-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e927375e43fd5a985b27a8992327c22541b6dede1362fc79df337d26e23604f", size = 60554, upload-time = "2026-02-03T02:11:17.362Z" }, + { url = "https://files.pythonhosted.org/packages/25/e9/96dd77728b54a899d4ce2798d7b1296989ce687ed3c0cb917d6b3154bf5d/wrapt-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c99544b6a7d40ca22195563b6d8bc3986ee8bb82f272f31f0670fe9440c869", size = 61496, upload-time = "2026-02-03T02:12:54.732Z" }, + { url = "https://files.pythonhosted.org/packages/44/79/4c755b45df6ef30c0dd628ecfaa0c808854be147ca438429da70a162833c/wrapt-2.1.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b2be3fa5f4efaf16ee7c77d0556abca35f5a18ad4ac06f0ef3904c3399010ce9", size = 113528, upload-time = "2026-02-03T02:12:26.405Z" }, + { url = "https://files.pythonhosted.org/packages/9f/63/23ce28f7b841217d9a6337a340fbb8d4a7fbd67a89d47f377c8550fa34aa/wrapt-2.1.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67c90c1ae6489a6cb1a82058902caa8006706f7b4e8ff766f943e9d2c8e608d0", size = 115536, upload-time = "2026-02-03T02:11:54.397Z" }, + { url = "https://files.pythonhosted.org/packages/23/7b/5ca8d3b12768670d16c8329e29960eedd56212770365a02a8de8bf73dc01/wrapt-2.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:05c0db35ccffd7480143e62df1e829d101c7b86944ae3be7e4869a7efa621f53", size = 114716, upload-time = "2026-02-03T02:12:20.771Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3a/9789ccb14a096d30bb847bf3ee137bf682cc9750c2ce155f4c5ae1962abf/wrapt-2.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0c2ec9f616755b2e1e0bf4d0961f59bb5c2e7a77407e7e2c38ef4f7d2fdde12c", size = 113200, upload-time = "2026-02-03T02:12:07.688Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e5/4ec3526ce6ce920b267c8d35d2c2f0874d3fad2744c8b7259353f1132baa/wrapt-2.1.1-cp310-cp310-win32.whl", hash = "sha256:203ba6b3f89e410e27dbd30ff7dccaf54dcf30fda0b22aa1b82d560c7f9fe9a1", size = 57876, upload-time = "2026-02-03T02:11:42.61Z" }, + { url = "https://files.pythonhosted.org/packages/d1/4e/661c7c76ecd85375b2bc03488941a3a1078642af481db24949e2b9de01f4/wrapt-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:6f9426d9cfc2f8732922fc96198052e55c09bb9db3ddaa4323a18e055807410e", size = 60224, upload-time = "2026-02-03T02:11:19.096Z" }, + { url = "https://files.pythonhosted.org/packages/5f/b7/53c7252d371efada4cb119e72e774fa2c6b3011fc33e3e552cdf48fb9488/wrapt-2.1.1-cp310-cp310-win_arm64.whl", hash = "sha256:69c26f51b67076b40714cff81bdd5826c0b10c077fb6b0678393a6a2f952a5fc", size = 58645, upload-time = "2026-02-03T02:12:10.396Z" }, + { url = "https://files.pythonhosted.org/packages/b8/a8/9254e4da74b30a105935197015b18b31b7a298bf046e67d8952ef74967bd/wrapt-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c366434a7fb914c7a5de508ed735ef9c133367114e1a7cb91dfb5cd806a1549", size = 60554, upload-time = "2026-02-03T02:11:13.038Z" }, + { url = "https://files.pythonhosted.org/packages/9e/a1/378579880cc7af226354054a2c255f69615b379d8adad482bfe2f22a0dc2/wrapt-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d6a2068bd2e1e19e5a317c8c0b288267eec4e7347c36bc68a6e378a39f19ee7", size = 61491, upload-time = "2026-02-03T02:12:56.077Z" }, + { url = "https://files.pythonhosted.org/packages/dc/72/957b51c56acca35701665878ad31626182199fc4afecfe67dea072210f95/wrapt-2.1.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:891ab4713419217b2aed7dd106c9200f64e6a82226775a0d2ebd6bef2ebd1747", size = 113949, upload-time = "2026-02-03T02:11:04.516Z" }, + { url = "https://files.pythonhosted.org/packages/cd/74/36bbebb4a3d2ae9c3e6929639721f8606cd0710a82a777c371aa69e36504/wrapt-2.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8ef36a0df38d2dc9d907f6617f89e113c5892e0a35f58f45f75901af0ce7d81", size = 115989, upload-time = "2026-02-03T02:12:19.398Z" }, + { url = "https://files.pythonhosted.org/packages/ae/0d/f1177245a083c7be284bc90bddfe5aece32cdd5b858049cb69ce001a0e8d/wrapt-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76e9af3ebd86f19973143d4d592cbf3e970cf3f66ddee30b16278c26ae34b8ab", size = 115242, upload-time = "2026-02-03T02:11:08.111Z" }, + { url = "https://files.pythonhosted.org/packages/62/3e/3b7cf5da27e59df61b1eae2d07dd03ff5d6f75b5408d694873cca7a8e33c/wrapt-2.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ff562067485ebdeaef2fa3fe9b1876bc4e7b73762e0a01406ad81e2076edcebf", size = 113676, upload-time = "2026-02-03T02:12:41.026Z" }, + { url = "https://files.pythonhosted.org/packages/f7/65/8248d3912c705f2c66f81cb97c77436f37abcbedb16d633b5ab0d795d8cd/wrapt-2.1.1-cp311-cp311-win32.whl", hash = "sha256:9e60a30aa0909435ec4ea2a3c53e8e1b50ac9f640c0e9fe3f21fd248a22f06c5", size = 57863, upload-time = "2026-02-03T02:12:18.112Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/d29310ab335f71f00c50466153b3dc985aaf4a9fc03263e543e136859541/wrapt-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:7d79954f51fcf84e5ec4878ab4aea32610d70145c5bbc84b3370eabfb1e096c2", size = 60224, upload-time = "2026-02-03T02:12:29.289Z" }, + { url = "https://files.pythonhosted.org/packages/0c/90/a6ec319affa6e2894962a0cb9d73c67f88af1a726d15314bfb5c88b8a08d/wrapt-2.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:d3ffc6b0efe79e08fd947605fd598515aebefe45e50432dc3b5cd437df8b1ada", size = 58643, upload-time = "2026-02-03T02:12:43.022Z" }, + { url = "https://files.pythonhosted.org/packages/df/cb/4d5255d19bbd12be7f8ee2c1fb4269dddec9cef777ef17174d357468efaa/wrapt-2.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab8e3793b239db021a18782a5823fcdea63b9fe75d0e340957f5828ef55fcc02", size = 61143, upload-time = "2026-02-03T02:11:46.313Z" }, + { url = "https://files.pythonhosted.org/packages/6f/07/7ed02daa35542023464e3c8b7cb937fa61f6c61c0361ecf8f5fecf8ad8da/wrapt-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c0300007836373d1c2df105b40777986accb738053a92fe09b615a7a4547e9f", size = 61740, upload-time = "2026-02-03T02:12:51.966Z" }, + { url = "https://files.pythonhosted.org/packages/c4/60/a237a4e4a36f6d966061ccc9b017627d448161b19e0a3ab80a7c7c97f859/wrapt-2.1.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2b27c070fd1132ab23957bcd4ee3ba707a91e653a9268dc1afbd39b77b2799f7", size = 121327, upload-time = "2026-02-03T02:11:06.796Z" }, + { url = "https://files.pythonhosted.org/packages/ae/fe/9139058a3daa8818fc67e6460a2340e8bbcf3aef8b15d0301338bbe181ca/wrapt-2.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b0e36d845e8b6f50949b6b65fc6cd279f47a1944582ed4ec8258cd136d89a64", size = 122903, upload-time = "2026-02-03T02:12:48.657Z" }, + { url = "https://files.pythonhosted.org/packages/91/10/b8479202b4164649675846a531763531f0a6608339558b5a0a718fc49a8d/wrapt-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4aeea04a9889370fcfb1ef828c4cc583f36a875061505cd6cd9ba24d8b43cc36", size = 121333, upload-time = "2026-02-03T02:11:32.148Z" }, + { url = "https://files.pythonhosted.org/packages/5f/75/75fc793b791d79444aca2c03ccde64e8b99eda321b003f267d570b7b0985/wrapt-2.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d88b46bb0dce9f74b6817bc1758ff2125e1ca9e1377d62ea35b6896142ab6825", size = 120458, upload-time = "2026-02-03T02:11:16.039Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8f/c3f30d511082ca6d947c405f9d8f6c8eaf83cfde527c439ec2c9a30eb5ea/wrapt-2.1.1-cp312-cp312-win32.whl", hash = "sha256:63decff76ca685b5c557082dfbea865f3f5f6d45766a89bff8dc61d336348833", size = 58086, upload-time = "2026-02-03T02:12:35.041Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c8/37625b643eea2849f10c3b90f69c7462faa4134448d4443234adaf122ae5/wrapt-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:b828235d26c1e35aca4107039802ae4b1411be0fe0367dd5b7e4d90e562fcbcd", size = 60328, upload-time = "2026-02-03T02:12:45.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/79/56242f07572d5682ba8065a9d4d9c2218313f576e3c3471873c2a5355ffd/wrapt-2.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:75128507413a9f1bcbe2db88fd18fbdbf80f264b82fa33a6996cdeaf01c52352", size = 58722, upload-time = "2026-02-03T02:12:27.949Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ca/3cf290212855b19af9fcc41b725b5620b32f470d6aad970c2593500817eb/wrapt-2.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9646e17fa7c3e2e7a87e696c7de66512c2b4f789a8db95c613588985a2e139", size = 61150, upload-time = "2026-02-03T02:12:50.575Z" }, + { url = "https://files.pythonhosted.org/packages/9d/33/5b8f89a82a9859ce82da4870c799ad11ce15648b6e1c820fec3e23f4a19f/wrapt-2.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:428cfc801925454395aa468ba7ddb3ed63dc0d881df7b81626cdd433b4e2b11b", size = 61743, upload-time = "2026-02-03T02:11:55.733Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2f/60c51304fbdf47ce992d9eefa61fbd2c0e64feee60aaa439baf42ea6f40b/wrapt-2.1.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5797f65e4d58065a49088c3b32af5410751cd485e83ba89e5a45e2aa8905af98", size = 121341, upload-time = "2026-02-03T02:11:20.461Z" }, + { url = "https://files.pythonhosted.org/packages/ad/03/ce5256e66dd94e521ad5e753c78185c01b6eddbed3147be541f4d38c0cb7/wrapt-2.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a2db44a71202c5ae4bb5f27c6d3afbc5b23053f2e7e78aa29704541b5dad789", size = 122947, upload-time = "2026-02-03T02:11:33.596Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ae/50ca8854b81b946a11a36fcd6ead32336e6db2c14b6e4a8b092b80741178/wrapt-2.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8d5350c3590af09c1703dd60ec78a7370c0186e11eaafb9dda025a30eee6492d", size = 121370, upload-time = "2026-02-03T02:11:09.886Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d9/d6a7c654e0043319b4cc137a4caaf7aa16b46b51ee8df98d1060254705b7/wrapt-2.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d9b076411bed964e752c01b49fd224cc385f3a96f520c797d38412d70d08359", size = 120465, upload-time = "2026-02-03T02:11:37.592Z" }, + { url = "https://files.pythonhosted.org/packages/55/90/65be41e40845d951f714b5a77e84f377a3787b1e8eee6555a680da6d0db5/wrapt-2.1.1-cp313-cp313-win32.whl", hash = "sha256:0bb7207130ce6486727baa85373503bf3334cc28016f6928a0fa7e19d7ecdc06", size = 58090, upload-time = "2026-02-03T02:12:53.342Z" }, + { url = "https://files.pythonhosted.org/packages/5f/66/6a09e0294c4fc8c26028a03a15191721c9271672467cc33e6617ee0d91d2/wrapt-2.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:cbfee35c711046b15147b0ae7db9b976f01c9520e6636d992cd9e69e5e2b03b1", size = 60341, upload-time = "2026-02-03T02:12:36.384Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f0/20ceb8b701e9a71555c87a5ddecbed76ec16742cf1e4b87bbaf26735f998/wrapt-2.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:7d2756061022aebbf57ba14af9c16e8044e055c22d38de7bf40d92b565ecd2b0", size = 58731, upload-time = "2026-02-03T02:12:01.328Z" }, + { url = "https://files.pythonhosted.org/packages/80/b4/fe95beb8946700b3db371f6ce25115217e7075ca063663b8cca2888ba55c/wrapt-2.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4814a3e58bc6971e46baa910ecee69699110a2bf06c201e24277c65115a20c20", size = 62969, upload-time = "2026-02-03T02:11:51.245Z" }, + { url = "https://files.pythonhosted.org/packages/b8/89/477b0bdc784e3299edf69c279697372b8bd4c31d9c6966eae405442899df/wrapt-2.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:106c5123232ab9b9f4903692e1fa0bdc231510098f04c13c3081f8ad71c3d612", size = 63606, upload-time = "2026-02-03T02:12:02.64Z" }, + { url = "https://files.pythonhosted.org/packages/ed/55/9d0c1269ab76de87715b3b905df54dd25d55bbffd0b98696893eb613469f/wrapt-2.1.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1a40b83ff2535e6e56f190aff123821eea89a24c589f7af33413b9c19eb2c738", size = 152536, upload-time = "2026-02-03T02:11:24.492Z" }, + { url = "https://files.pythonhosted.org/packages/44/18/2004766030462f79ad86efaa62000b5e39b1ff001dcce86650e1625f40ae/wrapt-2.1.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:789cea26e740d71cf1882e3a42bb29052bc4ada15770c90072cb47bf73fb3dbf", size = 158697, upload-time = "2026-02-03T02:12:32.214Z" }, + { url = "https://files.pythonhosted.org/packages/e1/bb/0a880fa0f35e94ee843df4ee4dd52a699c9263f36881311cfb412c09c3e5/wrapt-2.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ba49c14222d5e5c0ee394495a8655e991dc06cbca5398153aefa5ac08cd6ccd7", size = 155563, upload-time = "2026-02-03T02:11:49.737Z" }, + { url = "https://files.pythonhosted.org/packages/42/ff/cd1b7c4846c8678fac359a6eb975dc7ab5bd606030adb22acc8b4a9f53f1/wrapt-2.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ac8cda531fe55be838a17c62c806824472bb962b3afa47ecbd59b27b78496f4e", size = 150161, upload-time = "2026-02-03T02:12:33.613Z" }, + { url = "https://files.pythonhosted.org/packages/38/ec/67c90a7082f452964b4621e4890e9a490f1add23cdeb7483cc1706743291/wrapt-2.1.1-cp313-cp313t-win32.whl", hash = "sha256:b8af75fe20d381dd5bcc9db2e86a86d7fcfbf615383a7147b85da97c1182225b", size = 59783, upload-time = "2026-02-03T02:11:39.863Z" }, + { url = "https://files.pythonhosted.org/packages/ec/08/466afe4855847d8febdfa2c57c87e991fc5820afbdef01a273683dfd15a0/wrapt-2.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:45c5631c9b6c792b78be2d7352129f776dd72c605be2c3a4e9be346be8376d83", size = 63082, upload-time = "2026-02-03T02:12:09.075Z" }, + { url = "https://files.pythonhosted.org/packages/9a/62/60b629463c28b15b1eeadb3a0691e17568622b12aa5bfa7ebe9b514bfbeb/wrapt-2.1.1-cp313-cp313t-win_arm64.whl", hash = "sha256:da815b9263947ac98d088b6414ac83507809a1d385e4632d9489867228d6d81c", size = 60251, upload-time = "2026-02-03T02:11:21.794Z" }, + { url = "https://files.pythonhosted.org/packages/95/a0/1c2396e272f91efe6b16a6a8bce7ad53856c8f9ae4f34ceaa711d63ec9e1/wrapt-2.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9aa1765054245bb01a37f615503290d4e207e3fd59226e78341afb587e9c1236", size = 61311, upload-time = "2026-02-03T02:12:44.41Z" }, + { url = "https://files.pythonhosted.org/packages/b0/9a/d2faba7e61072a7507b5722db63562fdb22f5a24e237d460d18755627f15/wrapt-2.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:feff14b63a6d86c1eee33a57f77573649f2550935981625be7ff3cb7342efe05", size = 61805, upload-time = "2026-02-03T02:11:59.905Z" }, + { url = "https://files.pythonhosted.org/packages/db/56/073989deb4b5d7d6e7ea424476a4ae4bda02140f2dbeaafb14ba4864dd60/wrapt-2.1.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:81fc5f22d5fcfdbabde96bb3f5379b9f4476d05c6d524d7259dc5dfb501d3281", size = 120308, upload-time = "2026-02-03T02:12:04.46Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b6/84f37261295e38167a29eb82affaf1dc15948dc416925fe2091beee8e4ac/wrapt-2.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:951b228ecf66def855d22e006ab9a1fc12535111ae7db2ec576c728f8ddb39e8", size = 122688, upload-time = "2026-02-03T02:11:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/ea/80/32db2eec6671f80c65b7ff175be61bc73d7f5223f6910b0c921bbc4bd11c/wrapt-2.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ddf582a95641b9a8c8bd643e83f34ecbbfe1b68bc3850093605e469ab680ae3", size = 121115, upload-time = "2026-02-03T02:12:39.068Z" }, + { url = "https://files.pythonhosted.org/packages/49/ef/dcd00383df0cd696614127902153bf067971a5aabcd3c9dcb2d8ef354b2a/wrapt-2.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fc5c500966bf48913f795f1984704e6d452ba2414207b15e1f8c339a059d5b16", size = 119484, upload-time = "2026-02-03T02:11:48.419Z" }, + { url = "https://files.pythonhosted.org/packages/76/29/0630280cdd2bd8f86f35cb6854abee1c9d6d1a28a0c6b6417cd15d378325/wrapt-2.1.1-cp314-cp314-win32.whl", hash = "sha256:4aa4baadb1f94b71151b8e44a0c044f6af37396c3b8bcd474b78b49e2130a23b", size = 58514, upload-time = "2026-02-03T02:11:58.616Z" }, + { url = "https://files.pythonhosted.org/packages/db/19/5bed84f9089ed2065f6aeda5dfc4f043743f642bc871454b261c3d7d322b/wrapt-2.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:860e9d3fd81816a9f4e40812f28be4439ab01f260603c749d14be3c0a1170d19", size = 60763, upload-time = "2026-02-03T02:12:24.553Z" }, + { url = "https://files.pythonhosted.org/packages/e4/cb/b967f2f9669e4249b4fe82e630d2a01bc6b9e362b9b12ed91bbe23ae8df4/wrapt-2.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:3c59e103017a2c1ea0ddf589cbefd63f91081d7ce9d491d69ff2512bb1157e23", size = 59051, upload-time = "2026-02-03T02:11:29.602Z" }, + { url = "https://files.pythonhosted.org/packages/eb/19/6fed62be29f97eb8a56aff236c3f960a4b4a86e8379dc7046a8005901a97/wrapt-2.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9fa7c7e1bee9278fc4f5dd8275bc8d25493281a8ec6c61959e37cc46acf02007", size = 63059, upload-time = "2026-02-03T02:12:06.368Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1c/b757fd0adb53d91547ed8fad76ba14a5932d83dde4c994846a2804596378/wrapt-2.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:39c35e12e8215628984248bd9c8897ce0a474be2a773db207eb93414219d8469", size = 63618, upload-time = "2026-02-03T02:12:23.197Z" }, + { url = "https://files.pythonhosted.org/packages/10/fe/e5ae17b1480957c7988d991b93df9f2425fc51f128cf88144d6a18d0eb12/wrapt-2.1.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:94ded4540cac9125eaa8ddf5f651a7ec0da6f5b9f248fe0347b597098f8ec14c", size = 152544, upload-time = "2026-02-03T02:11:43.915Z" }, + { url = "https://files.pythonhosted.org/packages/3e/cc/99aed210c6b547b8a6e4cb9d1425e4466727158a6aeb833aa7997e9e08dd/wrapt-2.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da0af328373f97ed9bdfea24549ac1b944096a5a71b30e41c9b8b53ab3eec04a", size = 158700, upload-time = "2026-02-03T02:12:30.684Z" }, + { url = "https://files.pythonhosted.org/packages/81/0e/d442f745f4957944d5f8ad38bc3a96620bfff3562533b87e486e979f3d99/wrapt-2.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4ad839b55f0bf235f8e337ce060572d7a06592592f600f3a3029168e838469d3", size = 155561, upload-time = "2026-02-03T02:11:28.164Z" }, + { url = "https://files.pythonhosted.org/packages/51/ac/9891816280e0018c48f8dfd61b136af7b0dcb4a088895db2531acde5631b/wrapt-2.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0d89c49356e5e2a50fa86b40e0510082abcd0530f926cbd71cf25bee6b9d82d7", size = 150188, upload-time = "2026-02-03T02:11:57.053Z" }, + { url = "https://files.pythonhosted.org/packages/24/98/e2f273b6d70d41f98d0739aa9a269d0b633684a5fb17b9229709375748d4/wrapt-2.1.1-cp314-cp314t-win32.whl", hash = "sha256:f4c7dd22cf7f36aafe772f3d88656559205c3af1b7900adfccb70edeb0d2abc4", size = 60425, upload-time = "2026-02-03T02:11:35.007Z" }, + { url = "https://files.pythonhosted.org/packages/1e/06/b500bfc38a4f82d89f34a13069e748c82c5430d365d9e6b75afb3ab74457/wrapt-2.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:f76bc12c583ab01e73ba0ea585465a41e48d968f6d1311b4daec4f8654e356e3", size = 63855, upload-time = "2026-02-03T02:12:15.47Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cc/5f6193c32166faee1d2a613f278608e6f3b95b96589d020f0088459c46c9/wrapt-2.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7ea74fc0bec172f1ae5f3505b6655c541786a5cabe4bbc0d9723a56ac32eb9b9", size = 60443, upload-time = "2026-02-03T02:11:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/c4/da/5a086bf4c22a41995312db104ec2ffeee2cf6accca9faaee5315c790377d/wrapt-2.1.1-py3-none-any.whl", hash = "sha256:3b0f4629eb954394a3d7c7a1c8cca25f0b07cefe6aa8545e862e9778152de5b7", size = 43886, upload-time = "2026-02-03T02:11:45.048Z" }, ] From 4341ce99f72f37ffc59af184df636403bd11128c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maty=C3=A1=C5=A1=20Jir=C3=A1t?= Date: Tue, 24 Feb 2026 16:54:56 +0100 Subject: [PATCH 3/7] Added VCR replay if cassettes file is found in data/cassettes --- src/keboola/component/base.py | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/src/keboola/component/base.py b/src/keboola/component/base.py index 468d3a2..de04c6c 100644 --- a/src/keboola/component/base.py +++ b/src/keboola/component/base.py @@ -237,10 +237,15 @@ def execute_action(self): Executes action defined in the configuration. The default action is 'run'. See base._SYNC_ACTION_MAPPING - When ``KBC_COMPONENT_RUN_MODE=debug`` is set (platform debug mode), - the action execution is automatically wrapped with VCR recording - so that HTTP interactions are captured for later replay in tests. + Auto-detects two special modes (transparent to the caller): + + - If ``{KBC_DATADIR}/cassettes/requests.json`` exists, replays HTTP + interactions from the cassette instead of hitting the real API. + - If ``KBC_COMPONENT_RUN_MODE=debug`` is set (platform debug mode), + records HTTP interactions via keboola.vcr for later use as a cassette. """ + if self._should_vcr_replay(): + return self._execute_with_vcr_replay() if self._should_vcr_record(): return self._execute_with_vcr_recording() return self._do_execute_action() @@ -259,6 +264,20 @@ def _do_execute_action(self): raise AttributeError(f"The defined action {action} is not implemented!") from e return action_method() + @staticmethod + def _should_vcr_replay() -> bool: + """Check if a VCR cassette exists at the default location in data/cassettes/.""" + data_dir = os.environ.get("KBC_DATADIR", "/data") + return (Path(data_dir) / "cassettes" / "requests.json").exists() + + def _execute_with_vcr_replay(self) -> None: + """Replay HTTP interactions from data/cassettes/requests.json.""" + from keboola.vcr import VCRRecorder + + data_dir = os.environ.get("KBC_DATADIR", "/data") + recorder = VCRRecorder(cassette_dir=Path(data_dir) / "cassettes") + recorder.replay(self._do_execute_action) + @staticmethod def _should_vcr_record(): """Check if running in platform debug mode.""" From 96a1116a15042593c71d4e48ae04b92fe618f92f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maty=C3=A1=C5=A1=20Jir=C3=A1t?= Date: Tue, 24 Feb 2026 17:15:14 +0100 Subject: [PATCH 4/7] Ruff check --fix --- src/keboola/component/base.py | 193 ++-- src/keboola/component/dao.py | 1026 ++++++++++----------- src/keboola/component/interface.py | 874 +++++++++--------- src/keboola/component/sync_actions.py | 19 +- src/keboola/component/table_schema.py | 14 +- tests/__init__.py | 2 +- tests/test_base.py | 39 +- tests/test_dao.py | 1096 ++++++++++++----------- tests/test_interface.py | 1175 +++++++++++++------------ tests/test_schema.py | 112 ++- tests/test_sync_actions.py | 9 +- 11 files changed, 2390 insertions(+), 2169 deletions(-) diff --git a/src/keboola/component/base.py b/src/keboola/component/base.py index de04c6c..99414f9 100644 --- a/src/keboola/component/base.py +++ b/src/keboola/component/base.py @@ -3,19 +3,17 @@ import logging import os import sys -from abc import ABC -from abc import abstractmethod +from abc import ABC, abstractmethod from functools import wraps from pathlib import Path -from typing import Dict -from typing import Union, List, Optional +from typing import Dict, List, Optional, Union from . import dao from . import table_schema as ts from .interface import CommonInterface from .sync_actions import SyncActionResult, process_sync_action_result -KEY_DEBUG = 'debug' +KEY_DEBUG = "debug" # Mapping of sync actions "action name":"method_name" _SYNC_ACTION_MAPPING = {"run": "run"} @@ -76,14 +74,14 @@ def test_connection(self): def decorate(func): # to allow pythonic names / action name mapping - if action_name == 'run': + if action_name == "run": raise ValueError('Sync action name "run" is reserved base action! Use different name.') _SYNC_ACTION_MAPPING[action_name] = func.__name__ @wraps(func) def action_wrapper(self, *args, **kwargs): # override when run as sync action, because it could be also called normally within run - is_sync_action = self.configuration.action != 'run' + is_sync_action = self.configuration.action != "run" # do operations with func if is_sync_action: @@ -119,10 +117,13 @@ def action_wrapper(self, *args, **kwargs): class ComponentBase(ABC, CommonInterface): - def __init__(self, data_path_override: Optional[str] = None, - schema_path_override: Optional[str] = None, - required_parameters: Optional[list] = None, - required_image_parameters: Optional[list] = None): + def __init__( + self, + data_path_override: Optional[str] = None, + schema_path_override: Optional[str] = None, + required_parameters: Optional[list] = None, + required_image_parameters: Optional[list] = None, + ): """ Base class for general Python components. Initializes the CommonInterface and performs configuration validation. @@ -171,7 +172,7 @@ def _get_default_data_path() -> str: Returns: """ - return Path(os.getcwd()).resolve().parent.joinpath('data').as_posix() + return Path(os.getcwd()).resolve().parent.joinpath("data").as_posix() def _get_data_folder_override_path(self, data_path_override: str = None) -> str: """ @@ -189,14 +190,14 @@ def _get_data_folder_override_path(self, data_path_override: str = None) -> str: data_folder_path = None if data_path_override: data_folder_path = data_path_override - elif not os.environ.get('KBC_DATADIR'): + elif not os.environ.get("KBC_DATADIR"): data_folder_path = self._get_default_data_path() return data_folder_path def _get_schema_folder_path(self, schema_path_override: str = None) -> str: """ - Returns value of the schema_folder_path in case the schema_path_override variable is provided or - the default schema_folder_path is found. + Returns value of the schema_folder_path in case the schema_path_override variable is provided or + the default schema_folder_path is found. """ return schema_path_override or self._get_default_schema_folder_path() @@ -204,7 +205,7 @@ def _get_schema_folder_path(self, schema_path_override: str = None) -> str: @staticmethod def _get_default_schema_folder_path() -> Optional[str]: """ - Finds the default schema_folder_path if it exists. + Finds the default schema_folder_path if it exists. """ container_schema_dir = Path("./src/schemas/").absolute().as_posix() @@ -255,7 +256,7 @@ def _do_execute_action(self): action = self.configuration.action if not action: logging.warning("No action defined in the configuration, using the default run action.") - action = 'run' + action = "run" try: action = _SYNC_ACTION_MAPPING[action] @@ -286,17 +287,18 @@ def _should_vcr_record(): def _execute_with_vcr_recording(self): """Wrap action execution with VCR recording for debug runs.""" import inspect + from keboola.vcr import VCRRecorder module = inspect.getmodule(type(self)) VCRRecorder.record_debug_run( self._do_execute_action, - sanitizers=getattr(module, 'VCR_SANITIZERS', None), + sanitizers=getattr(module, "VCR_SANITIZERS", None), ) def _generate_table_metadata_legacy(self, table_schema: ts.TableSchema) -> dao.TableMetadata: """ - Generates a TableMetadata object for the table definition using a TableSchema object. + Generates a TableMetadata object for the table definition using a TableSchema object. """ table_metadata = dao.TableMetadata() @@ -306,69 +308,78 @@ def _generate_table_metadata_legacy(self, table_schema: ts.TableSchema) -> dao.T table_metadata = self._add_field_data_types_to_table_metadata(table_schema, table_metadata) return table_metadata - def create_out_table_definition_from_schema(self, table_schema: ts.TableSchema, is_sliced: bool = False, - destination: str = '', incremental: bool = None, - enclosure: str = '"', delimiter: str = ',', - delete_where: dict = None) -> dao.TableDefinition: + def create_out_table_definition_from_schema( + self, + table_schema: ts.TableSchema, + is_sliced: bool = False, + destination: str = "", + incremental: bool = None, + enclosure: str = '"', + delimiter: str = ",", + delete_where: dict = None, + ) -> dao.TableDefinition: """ - Creates an out table definition using a defined table schema. - This method uses the given table schema and generates metadata of the table. Along with the additional - key word arguments it creates an out table definition. - - Args: - table_schema : table of the schema for which a table definition will be created - is_sliced: True if the full_path points to a folder with sliced tables - destination: String name of the table in Storage. - incremental: Set to true to enable incremental loading - enclosure: str: CSV enclosure, by default " - delimiter: str: CSV delimiter, by default , - delete_where: Dict with settings for deleting rows - - Returns: - TableDefinition object initialized with all table metadata defined in a schema + Creates an out table definition using a defined table schema. + This method uses the given table schema and generates metadata of the table. Along with the additional + key word arguments it creates an out table definition. + + Args: + table_schema : table of the schema for which a table definition will be created + is_sliced: True if the full_path points to a folder with sliced tables + destination: String name of the table in Storage. + incremental: Set to true to enable incremental loading + enclosure: str: CSV enclosure, by default " + delimiter: str: CSV delimiter, by default , + delete_where: Dict with settings for deleting rows + + Returns: + TableDefinition object initialized with all table metadata defined in a schema """ if self._expects_legacy_manifest(): table_metadata = self._generate_table_metadata_legacy(table_schema) - table_def = self.create_out_table_definition(name=table_schema.csv_name, - columns=table_schema.field_names, - primary_key=table_schema.primary_keys, - table_metadata=table_metadata, - is_sliced=is_sliced, - destination=destination, - incremental=incremental, - enclosure=enclosure, - delimiter=delimiter, - delete_where=delete_where) + table_def = self.create_out_table_definition( + name=table_schema.csv_name, + columns=table_schema.field_names, + primary_key=table_schema.primary_keys, + table_metadata=table_metadata, + is_sliced=is_sliced, + destination=destination, + incremental=incremental, + enclosure=enclosure, + delimiter=delimiter, + delete_where=delete_where, + ) else: schema = self._generate_schema_definition(table_schema) - table_def = self.create_out_table_definition(name=table_schema.csv_name, - primary_key=table_schema.primary_keys, - schema=schema, - is_sliced=is_sliced, - destination=destination, - incremental=incremental, - enclosure=enclosure, - delimiter=delimiter, - delete_where=delete_where, - description=table_schema.description) + table_def = self.create_out_table_definition( + name=table_schema.csv_name, + primary_key=table_schema.primary_keys, + schema=schema, + is_sliced=is_sliced, + destination=destination, + incremental=incremental, + enclosure=enclosure, + delimiter=delimiter, + delete_where=delete_where, + description=table_schema.description, + ) return table_def - def get_table_schema_by_name(self, schema_name: str, - schema_folder_path: Optional[str] = None) -> ts.TableSchema: + def get_table_schema_by_name(self, schema_name: str, schema_folder_path: Optional[str] = None) -> ts.TableSchema: """ - The method finds a table schema JSON based on it's name in a defined schema_folder_path and generates - a TableSchema object. + The method finds a table schema JSON based on it's name in a defined schema_folder_path and generates + a TableSchema object. - Args: - schema_name : name of the schema in the schema_folder_path. e.g. for schema in 'src/schemas/order.json' - schema_name is 'order' - schema_folder_path : directory path to the schema folder, by default the schema folder is set at - 'src/schemas' - Returns: - TableSchema object initialized with all available table metadata + Args: + schema_name : name of the schema in the schema_folder_path. e.g. for schema in 'src/schemas/order.json' + schema_name is 'order' + schema_folder_path : directory path to the schema folder, by default the schema folder is set at + 'src/schemas' + Returns: + TableSchema object initialized with all available table metadata """ @@ -381,53 +392,57 @@ def get_table_schema_by_name(self, schema_name: str, @staticmethod def _load_table_schema_dict(schema_name: str, schema_folder_path: str) -> Dict: try: - with open(os.path.join(schema_folder_path, f"{schema_name}.json"), 'r') as schema_file: + with open(os.path.join(schema_folder_path, f"{schema_name}.json"), "r") as schema_file: json_schema = json.loads(schema_file.read()) except FileNotFoundError as file_err: raise FileNotFoundError( f"Schema for corresponding schema name : {schema_name} is not found in the schema directory. " f"Make sure that '{schema_name}'.json " - f"exists in the directory '{schema_folder_path}'") from file_err + f"exists in the directory '{schema_folder_path}'" + ) from file_err return json_schema @staticmethod def _validate_schema_folder_path(schema_folder_path: str): if not schema_folder_path or not os.path.isdir(schema_folder_path): - raise FileNotFoundError("A schema folder path must be defined in order to create a out table definition " - "from a schema. If a schema folder path is not defined, the schemas folder must be" - " located in the 'src' directory of a component : src/schemas") + raise FileNotFoundError( + "A schema folder path must be defined in order to create a out table definition " + "from a schema. If a schema folder path is not defined, the schemas folder must be" + " located in the 'src' directory of a component : src/schemas" + ) def _generate_schema_definition(self, table_schema: ts.TableSchema) -> Dict[str, dao.ColumnDefinition]: """ - Generates a TableMetadata object for the table definition using a TableSchema object. + Generates a TableMetadata object for the table definition using a TableSchema object. """ column_definitions = {} for field in table_schema.fields: if field.base_type: - data_types = dao.BaseType(field.base_type, - length=field.length, - default=field.default) + data_types = dao.BaseType(field.base_type, length=field.length, default=field.default) else: data_types = dao.BaseType() - column_definitions[field.name] = dao.ColumnDefinition(data_types=data_types, - nullable=field.nullable, - description=field.description) + column_definitions[field.name] = dao.ColumnDefinition( + data_types=data_types, nullable=field.nullable, description=field.description + ) return column_definitions @staticmethod - def _add_field_data_types_to_table_metadata(table_schema: ts.TableSchema, - table_metadata: dao.TableMetadata) -> dao.TableMetadata: + def _add_field_data_types_to_table_metadata( + table_schema: ts.TableSchema, table_metadata: dao.TableMetadata + ) -> dao.TableMetadata: """ - Adds data types of all fields specified in a TableSchema object to a given TableMetadata object + Adds data types of all fields specified in a TableSchema object to a given TableMetadata object """ for field in table_schema.fields: if field.base_type: - table_metadata.add_column_data_type(field.name, - data_type=field.base_type, - nullable=field.nullable, - length=field.length, - default=field.default) + table_metadata.add_column_data_type( + field.name, + data_type=field.base_type, + nullable=field.nullable, + length=field.length, + default=field.default, + ) return table_metadata diff --git a/src/keboola/component/dao.py b/src/keboola/component/dao.py index 25d9f60..8cbed0e 100644 --- a/src/keboola/component/dao.py +++ b/src/keboola/component/dao.py @@ -11,7 +11,8 @@ from datetime import datetime from enum import Enum from pathlib import Path -from typing import List, Union, Dict, Optional, OrderedDict as TypeOrderedDict +from typing import Dict, List, Optional, Union +from typing import OrderedDict as TypeOrderedDict from deprecated import deprecated @@ -22,7 +23,7 @@ except ImportError: from typing_extensions import Literal -KBC_DEFAULT_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S%z' +KBC_DEFAULT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z" @dataclass @@ -45,6 +46,7 @@ class EnvironmentVariables: Dataclass for variables available in the docker environment https://developers.keboola.com/extend/common-interface/environment/#environment-variables """ + data_dir: str run_id: str project_id: str @@ -73,13 +75,14 @@ class SupportedDataTypes(str, Enum): """ Enum of [supported datatypes](https://help.keboola.com/storage/tables/data-types/) """ - STRING = 'STRING' - INTEGER = 'INTEGER' - NUMERIC = 'NUMERIC' - FLOAT = 'FLOAT' - BOOLEAN = 'BOOLEAN' - DATE = 'DATE' - TIMESTAMP = 'TIMESTAMP' + + STRING = "STRING" + INTEGER = "INTEGER" + NUMERIC = "NUMERIC" + FLOAT = "FLOAT" + BOOLEAN = "BOOLEAN" + DATE = "DATE" + TIMESTAMP = "TIMESTAMP" @classmethod def list(cls): @@ -91,19 +94,19 @@ def is_valid_type(cls, data_type: str): class KBCMetadataKeys(Enum): - base_data_type = 'KBC.datatype.basetype' # base type of a column as defined in php-datatypes - source_data_type = 'KBC.datatype.type' # data type of a column - extracted value from the source - data_type_nullable = 'KBC.datatype.nullable' - data_type_length = 'KBC.datatype.length' # data type length (e.g., VARCHAR(255) - this is the 255 - data_type_default = 'KBC.datatype.default' - description = 'KBC.description' - created_by_component = 'KBC.createdBy.component.id' - last_updated_by_component = 'KBC.lastUpdatedBy.component.id' - createdBy_configuration_id = 'KBC.createdBy.configuration.id' - createdBy_branch_id = 'KBC.createdBy.branch.id' # ID of the branch whose job created the table/bucket - lastUpdatedBy_configuration_id = 'KBC.lastUpdatedBy.configuration.id' - lastUpdatedBy_branch_id = 'KBC.lastUpdatedBy.branch.id' # ID of the branch whose job last touched the bucket/table - shared_description = 'KBC.sharedDescription' # description of the bucket; + base_data_type = "KBC.datatype.basetype" # base type of a column as defined in php-datatypes + source_data_type = "KBC.datatype.type" # data type of a column - extracted value from the source + data_type_nullable = "KBC.datatype.nullable" + data_type_length = "KBC.datatype.length" # data type length (e.g., VARCHAR(255) - this is the 255 + data_type_default = "KBC.datatype.default" + description = "KBC.description" + created_by_component = "KBC.createdBy.component.id" + last_updated_by_component = "KBC.lastUpdatedBy.component.id" + createdBy_configuration_id = "KBC.createdBy.configuration.id" + createdBy_branch_id = "KBC.createdBy.branch.id" # ID of the branch whose job created the table/bucket + lastUpdatedBy_configuration_id = "KBC.lastUpdatedBy.configuration.id" + lastUpdatedBy_branch_id = "KBC.lastUpdatedBy.branch.id" # ID of the branch whose job last touched the bucket/table + shared_description = "KBC.sharedDescription" # description of the bucket; # it will be used when the bucket is shared @@ -166,27 +169,27 @@ def load_table_metadata_from_manifest(self, manifest: dict): """ - if manifest.get('schema') and ( - manifest.get('metadata') or manifest.get('column_metadata') or manifest.get('columns')): # noqa + if manifest.get("schema") and ( + manifest.get("metadata") or manifest.get("column_metadata") or manifest.get("columns") + ): # noqa raise UserException("Manifest can't contain new 'schema' and old 'metadata'/'column_metadata'/'columns'") - if not manifest.get('schema'): - + if not manifest.get("schema"): # column metadata - for column, metadata_list in manifest.get('column_metadata', {}).items(): + for column, metadata_list in manifest.get("column_metadata", {}).items(): for metadata in metadata_list: - if not metadata.get('key') and metadata.get('value'): + if not metadata.get("key") and metadata.get("value"): continue - key = metadata['key'] - value = metadata['value'] + key = metadata["key"] + value = metadata["value"] self.add_column_metadata(column, key, value) # table metadata - for metadata in manifest.get('metadata', []): - if not metadata.get('key') and metadata.get('value'): + for metadata in manifest.get("metadata", []): + if not metadata.get("key") and metadata.get("value"): continue - key = metadata['key'] - value = metadata['value'] + key = metadata["key"] + value = metadata["value"] self.add_table_metadata(key, value) def get_table_metadata_for_manifest(self, legacy_manifest: bool = False) -> List[dict]: @@ -203,27 +206,26 @@ def get_table_metadata_for_manifest(self, legacy_manifest: bool = False) -> List """ if legacy_manifest: - final_metadata_list = [{'key': key, - 'value': value} - for key, value in self.table_metadata.items() if value not in [None, '']] + final_metadata_list = [ + {"key": key, "value": value} for key, value in self.table_metadata.items() if value not in [None, ""] + ] else: - final_metadata_list = {key: value - for key, value in self.table_metadata.items() if value not in [None, '']} + final_metadata_list = {key: value for key, value in self.table_metadata.items() if value not in [None, ""]} return final_metadata_list - @deprecated(version='1.5.1', reason="Please use schema instead of Column Metadata") + @deprecated(version="1.5.1", reason="Please use schema instead of Column Metadata") def get_column_metadata_for_manifest(self) -> dict: """ - Returns column metadata dict as required by the - [manifest format](https://developers.keboola.com/extend/common-interface/manifest-files/#dataintables - -manifests) + Returns column metadata dict as required by the + [manifest format](https://developers.keboola.com/extend/common-interface/manifest-files/#dataintables + -manifests) - e.g. - tm = TableMetadata() - manifest['column_metadata'] = tm.column_metadata + e.g. + tm = TableMetadata() + manifest['column_metadata'] = tm.column_metadata - Returns: dict + Returns: dict """ @@ -231,15 +233,15 @@ def get_column_metadata_for_manifest(self) -> dict: def _get_legacy_column_metadata_for_manifest(self) -> dict: """ - Returns column metadata dict as required by the - [manifest format](https://developers.keboola.com/extend/common-interface/manifest-files/#dataintables - -manifests) + Returns column metadata dict as required by the + [manifest format](https://developers.keboola.com/extend/common-interface/manifest-files/#dataintables + -manifests) - e.g. - tm = TableMetadata() - manifest['column_metadata'] = tm.column_metadata + e.g. + tm = TableMetadata() + manifest['column_metadata'] = tm.column_metadata - Returns: dict + Returns: dict """ final_column_metadata = dict() @@ -250,15 +252,15 @@ def _get_legacy_column_metadata_for_manifest(self) -> dict: if not final_column_metadata.get(column): final_column_metadata[column] = list() - column_metadata = [{'key': key, - 'value': value} for key, value in - column_metadata_dicts.items() if value not in [None, '']] + column_metadata = [ + {"key": key, "value": value} for key, value in column_metadata_dicts.items() if value not in [None, ""] + ] final_column_metadata[column].extend(column_metadata) return final_column_metadata @property - @deprecated(version='1.5.1', reason="Please use TableDefinition.description instead of TableMetadata") + @deprecated(version="1.5.1", reason="Please use TableDefinition.description instead of TableMetadata") def table_description(self) -> str: """ Returns table description (KBC.description) @@ -269,8 +271,11 @@ def table_description(self) -> str: return self.table_metadata.get(KBCMetadataKeys.description.value) @property - @deprecated(version='1.5.1', reason="Column datatypes were moved to dao.TableDefinition.schema property." - "Please use the dao.ColumnDefinition objects") + @deprecated( + version="1.5.1", + reason="Column datatypes were moved to dao.TableDefinition.schema property." + "Please use the dao.ColumnDefinition objects", + ) def column_datatypes(self) -> dict: """ Return dictionary of column base datatypes @@ -283,8 +288,11 @@ def column_datatypes(self) -> dict: return self.get_columns_metadata_by_key(KBCMetadataKeys.base_data_type.value) @property - @deprecated(version='1.5.1', reason="Column datatypes were moved to dao.TableDefinition.schema property." - " Please use the dao.ColumnDefinition objects") + @deprecated( + version="1.5.1", + reason="Column datatypes were moved to dao.TableDefinition.schema property." + " Please use the dao.ColumnDefinition objects", + ) def column_descriptions(self) -> dict: """ Return dictionary of column descriptions @@ -296,7 +304,7 @@ def column_descriptions(self) -> dict: return self.get_columns_metadata_by_key(KBCMetadataKeys.description.value) - @deprecated(version='1.5.1', reason="Please use schema instead of Table Metadata") + @deprecated(version="1.5.1", reason="Please use schema instead of Table Metadata") def get_columns_metadata_by_key(self, metadata_key) -> dict: """ Returns all columns with specified metadata_key as dictionary of column:metadata_key pairs @@ -314,19 +322,22 @@ def get_columns_metadata_by_key(self, metadata_key) -> dict: def add_column_descriptions(self, column_descriptions: dict): """ - Add column description metadata. It will be shown in the KBC Storage UI. + Add column description metadata. It will be shown in the KBC Storage UI. - Args: - column_descriptions: dict -> {"colname":"description"} + Args: + column_descriptions: dict -> {"colname":"description"} - """ + """ for col in column_descriptions: self.add_column_metadata(col, KBCMetadataKeys.description.value, column_descriptions[col]) - @deprecated(version='1.5.1', reason="Column datatypes were moved to dao.TableDefinition.schema property." - "Please use the dao.ColumnDefinition objects and associated" - "dao.TableDefinition methods to define columns. e.g." - "dao.TableDefinition.add_columns()") + @deprecated( + version="1.5.1", + reason="Column datatypes were moved to dao.TableDefinition.schema property." + "Please use the dao.ColumnDefinition objects and associated" + "dao.TableDefinition methods to define columns. e.g." + "dao.TableDefinition.add_columns()", + ) def add_column_data_types(self, column_types: Dict[str, Union[SupportedDataTypes, str]]): """ Add column types metadata. Note that only supported datatypes @@ -343,14 +354,22 @@ def add_column_data_types(self, column_types: Dict[str, Union[SupportedDataTypes for col in column_types: self.add_column_data_type(col, column_types[col]) - @deprecated(version='1.5.1', reason="Column datatypes were moved to dao.TableDefinition.schema property." - "Please use the dao.ColumnDefinition objects and associated" - "dao.TableDefinition methods to define columns. e.g." - "dao.TableDefinition.add_column()") - def add_column_data_type(self, column: str, data_type: Union[SupportedDataTypes, str], - source_data_type: str = None, - nullable: bool = False, - length: str = None, default=None): + @deprecated( + version="1.5.1", + reason="Column datatypes were moved to dao.TableDefinition.schema property." + "Please use the dao.ColumnDefinition objects and associated" + "dao.TableDefinition methods to define columns. e.g." + "dao.TableDefinition.add_column()", + ) + def add_column_data_type( + self, + column: str, + data_type: Union[SupportedDataTypes, str], + source_data_type: str = None, + nullable: bool = False, + length: str = None, + default=None, + ): """ Add single column data type Args: @@ -399,16 +418,19 @@ def add_table_description(self, description: str): def add_table_metadata(self, key: str, value: str): """ - Add/Updates table metadata and ensures the Key is unique. - Args: + Add/Updates table metadata and ensures the Key is unique. + Args: """ if value is None: return self.table_metadata = {**self.table_metadata, **{key: value}} - @deprecated(version='1.5.1', reason="Column metadata ere moved to dao.TableDefinition.schema property." - "Please use the dao.ColumnDefinition.metadata") + @deprecated( + version="1.5.1", + reason="Column metadata ere moved to dao.TableDefinition.schema property." + "Please use the dao.ColumnDefinition.metadata", + ) def add_column_metadata(self, column: str, key: str, value: Union[str, bool, int], backend="base"): """ Add/Updates column metadata and ensures the Key is unique. @@ -424,8 +446,11 @@ def add_column_metadata(self, column: str, key: str, value: Union[str, bool, int # self.schema = [ColumnDefinition(name=column, data_type={backend: DataType(type=value)})] - @deprecated(version='1.5.1', reason="Column metadata ere moved to dao.TableDefinition.schema property." - "Please use the dao.ColumnDefinition.metadata") + @deprecated( + version="1.5.1", + reason="Column metadata ere moved to dao.TableDefinition.schema property." + "Please use the dao.ColumnDefinition.metadata", + ) def add_multiple_column_metadata(self, column_metadata: Dict[str, List[dict]]): """ Add key-value pairs to column metadata. @@ -448,7 +473,7 @@ def _validate_data_types(column_types: dict): if not SupportedDataTypes.is_valid_type(dtype): errors.append(f'Datatype "{dtype}" is not valid KBC Basetype!') if errors: - raise ValueError(', '.join(errors) + f'\n Supported base types are: [{SupportedDataTypes.list()}]') + raise ValueError(", ".join(errors) + f"\n Supported base types are: [{SupportedDataTypes.list()}]") @dataclass @@ -463,39 +488,40 @@ def __post_init__(self): class BaseType(dict): - def __init__(self, dtype: SupportedDataTypes = SupportedDataTypes.STRING, length: Optional[str] = None, - default: Optional[str] = None): + def __init__( + self, + dtype: SupportedDataTypes = SupportedDataTypes.STRING, + length: Optional[str] = None, + default: Optional[str] = None, + ): super().__init__(base=DataType(dtype=dtype, length=length, default=default)) @classmethod - def string(cls, length: Optional[str] = None, default: Optional[str] = None) -> 'BaseType': + def string(cls, length: Optional[str] = None, default: Optional[str] = None) -> "BaseType": return BaseType(dtype=SupportedDataTypes.STRING, length=length, default=default) @classmethod - def integer(cls, length: Optional[str] = None, - default: Optional[str] = None) -> 'BaseType': + def integer(cls, length: Optional[str] = None, default: Optional[str] = None) -> "BaseType": return BaseType(dtype=SupportedDataTypes.INTEGER, length=length, default=default) @classmethod - def numeric(cls, length: Optional[str] = None, - default: Optional[str] = None) -> 'BaseType': + def numeric(cls, length: Optional[str] = None, default: Optional[str] = None) -> "BaseType": return BaseType(dtype=SupportedDataTypes.NUMERIC, length=length, default=default) @classmethod - def float(cls, length: Optional[str] = None, - default: Optional[str] = None) -> 'BaseType': + def float(cls, length: Optional[str] = None, default: Optional[str] = None) -> "BaseType": return BaseType(dtype=SupportedDataTypes.FLOAT, length=length, default=default) @classmethod - def boolean(cls, default: Optional[str] = None) -> 'BaseType': + def boolean(cls, default: Optional[str] = None) -> "BaseType": return BaseType(dtype=SupportedDataTypes.BOOLEAN, default=default) @classmethod - def date(cls, default: Optional[str] = None) -> 'BaseType': + def date(cls, default: Optional[str] = None) -> "BaseType": return BaseType(dtype=SupportedDataTypes.DATE, default=default) @classmethod - def timestamp(cls, default: Optional[str] = None) -> 'BaseType': + def timestamp(cls, default: Optional[str] = None) -> "BaseType": return BaseType(dtype=SupportedDataTypes.TIMESTAMP, default=default) @@ -513,6 +539,7 @@ class ColumnDefinition: description (Optional[str]): A description of the column's purpose or contents. Defaults to None. metadata (Optional[Dict[str, str]]): Additional metadata associated with the column. Defaults to None. """ + data_types: Optional[Union[Dict[str, DataType], BaseType]] = field(default_factory=lambda: BaseType()) nullable: Optional[bool] = True primary_key: Optional[bool] = False @@ -528,12 +555,15 @@ def update_properties(self, **kwargs): def from_dict(self, col: dict): return ColumnDefinition( - data_types={key: DataType(dtype=v.get('type'), default=v.get('default'), length=v.get('length')) - for key, v in col.get('data_type', {}).items()}, - nullable=col.get('nullable'), - primary_key=col.get('primary_key'), - description=col.get('description'), - metadata=col.get('metadata')) + data_types={ + key: DataType(dtype=v.get("type"), default=v.get("default"), length=v.get("length")) + for key, v in col.get("data_type", {}).items() + }, + nullable=col.get("nullable"), + primary_key=col.get("primary_key"), + description=col.get("description"), + metadata=col.get("metadata"), + ) def add_datatype(self, backend: str, data_type: DataType): if backend in self.data_types: @@ -551,16 +581,17 @@ def to_dict(self, name: str): for key, value in self.data_types.items(): datatypes_dict[key] = dataclasses.asdict(value) - datatypes_dict = {key: {k.replace('dtype', 'type'): v for k, v in value.items()} - for key, value in datatypes_dict.items()} + datatypes_dict = { + key: {k.replace("dtype", "type"): v for k, v in value.items()} for key, value in datatypes_dict.items() + } result = { - 'name': name, - 'data_type': datatypes_dict, - 'nullable': self.nullable, - 'primary_key': self.primary_key, - 'description': self.description, - 'metadata': self.metadata + "name": name, + "data_type": datatypes_dict, + "nullable": self.nullable, + "primary_key": self.primary_key, + "description": self.description, + "metadata": self.metadata, } # TODO: tohle bych delal az pri zapisu manifestu celkove, chceme vyhodit None values, false nechat filtered = {k: v for k, v in result.items() if v not in [False]} @@ -575,50 +606,50 @@ class SupportedManifestAttributes(SubscriptableDataclass): out_legacy_exclude: List[str] = dataclasses.field(default_factory=lambda: []) in_legacy_exclude: List[str] = dataclasses.field(default_factory=lambda: []) - def get_attributes_by_stage(self, stage: Literal['in', 'out'], legacy_queue: bool = False, - legacy_manifest: bool = False) -> List[str]: - if stage == 'out': + def get_attributes_by_stage( + self, stage: Literal["in", "out"], legacy_queue: bool = False, legacy_manifest: bool = False + ) -> List[str]: + if stage == "out": attributes = self.out_attributes exclude = self.out_legacy_exclude if not legacy_manifest: - to_remove = ['primary_key', 'columns', 'distribution_key', 'column_metadata', 'metadata'] + to_remove = ["primary_key", "columns", "distribution_key", "column_metadata", "metadata"] attributes = list(set(attributes).difference(to_remove)) - to_add = ['manifest_type', 'has_header', 'table_metadata', 'schema'] + to_add = ["manifest_type", "has_header", "table_metadata", "schema"] attributes.extend(to_add) - elif stage == 'in': + elif stage == "in": attributes = self.in_attributes exclude = self.in_legacy_exclude else: - raise ValueError(f'Unsupported stage {stage}') + raise ValueError(f"Unsupported stage {stage}") if legacy_queue: - logging.warning(f'Running on legacy queue some manifest properties will be ignored: {exclude}') + logging.warning(f"Running on legacy queue some manifest properties will be ignored: {exclude}") attributes = list(set(attributes).difference(exclude)) return attributes class IODefinition(ABC): - def __init__(self, full_path): self.full_path = full_path @classmethod - def build_from_manifest(cls, - manifest_file_path: str - ): + def build_from_manifest(cls, manifest_file_path: str): raise NotImplementedError - def _filter_attributes_by_manifest_type(self, manifest_type: Literal["in", "out"], legacy_queue: bool = False, - native_types: bool = False): + def _filter_attributes_by_manifest_type( + self, manifest_type: Literal["in", "out"], legacy_queue: bool = False, native_types: bool = False + ): raise NotImplementedError - def get_manifest_dictionary(self, manifest_type: Optional[str] = None, legacy_queue: bool = False, - legacy_manifest: Optional[bool] = None) -> dict: + def get_manifest_dictionary( + self, manifest_type: Optional[str] = None, legacy_queue: bool = False, legacy_manifest: Optional[bool] = None + ) -> dict: raise NotImplementedError @property @@ -630,7 +661,7 @@ def stage(self) -> str: @stage.setter def stage(self, stage: str): - if stage not in ['in', 'out']: + if stage not in ["in", "out"]: raise ValueError(f'Invalid stage "{stage}", supported values are: "in", "out"') self._stage = stage @@ -675,14 +706,15 @@ class ABSStaging: def s3_staging(self) -> Union[S3Staging, None]: s3 = self._s3 if s3: - return IODefinition.S3Staging(is_sliced=s3['isSliced'], - region=s3['region'], - bucket=s3['bucket'], - key=s3['key'], - credentials_access_key_id=s3['credentials']['access_key_id'], - credentials_secret_access_key=s3['credentials']['secret_access_key'], - credentials_session_token=s3['credentials']['session_token'] - ) + return IODefinition.S3Staging( + is_sliced=s3["isSliced"], + region=s3["region"], + bucket=s3["bucket"], + key=s3["key"], + credentials_access_key_id=s3["credentials"]["access_key_id"], + credentials_secret_access_key=s3["credentials"]["secret_access_key"], + credentials_session_token=s3["credentials"]["session_token"], + ) else: return None @@ -690,14 +722,14 @@ def s3_staging(self) -> Union[S3Staging, None]: def abs_staging(self) -> Union[ABSStaging, None]: _abs = self._abs if _abs: - return IODefinition.ABSStaging(is_sliced=_abs['is_sliced'], - region=_abs['region'], - container=_abs['container'], - name=_abs['name'], - credentials_sas_connection_string=_abs['credentials'][ - 'sas_connection_string'], - credentials_expiration=_abs['credentials']['expiration'] - ) + return IODefinition.ABSStaging( + is_sliced=_abs["is_sliced"], + region=_abs["region"], + container=_abs["container"], + name=_abs["name"], + credentials_sas_connection_string=_abs["credentials"]["sas_connection_string"], + credentials_expiration=_abs["credentials"]["expiration"], + ) else: return None @@ -757,7 +789,7 @@ class TableDefinition(IODefinition): "data_size_bytes", "is_alias", "attributes", - "indexed_columns" + "indexed_columns", ] OUTPUT_MANIFEST_ATTRIBUTES = [ @@ -775,35 +807,33 @@ class TableDefinition(IODefinition): "delete_where_operator", ] - OUTPUT_MANIFEST_LEGACY_EXCLUDES = [ - "write_always" - ] + OUTPUT_MANIFEST_LEGACY_EXCLUDES = ["write_always"] - MANIFEST_ATTRIBUTES = {'in': INPUT_MANIFEST_ATTRIBUTES, - 'out': OUTPUT_MANIFEST_ATTRIBUTES} + MANIFEST_ATTRIBUTES = {"in": INPUT_MANIFEST_ATTRIBUTES, "out": OUTPUT_MANIFEST_ATTRIBUTES} SCHEMA_TYPE = Union[Dict[str, ColumnDefinition], TypeOrderedDict[str, ColumnDefinition], List[str]] - def __init__(self, name: str, - full_path: Optional[Union[str, None]] = None, - is_sliced: Optional[bool] = False, - destination: Optional[str] = '', - primary_key: Optional[List[str]] = None, - schema: SCHEMA_TYPE = None, - incremental: Optional[bool] = None, - table_metadata: Optional[TableMetadata] = None, - enclosure: Optional[str] = '"', - delimiter: Optional[str] = ',', - delete_where: Optional[dict] = None, - stage: Optional[str] = 'out', - write_always: Optional[bool] = False, - has_header: Optional[bool] = None, - description: Optional[str] = None, - - # storage staging - s3: Optional[dict] = None, - abs: Optional[dict] = None, - **kwargs - ): + def __init__( + self, + name: str, + full_path: Optional[Union[str, None]] = None, + is_sliced: Optional[bool] = False, + destination: Optional[str] = "", + primary_key: Optional[List[str]] = None, + schema: SCHEMA_TYPE = None, + incremental: Optional[bool] = None, + table_metadata: Optional[TableMetadata] = None, + enclosure: Optional[str] = '"', + delimiter: Optional[str] = ",", + delete_where: Optional[dict] = None, + stage: Optional[str] = "out", + write_always: Optional[bool] = False, + has_header: Optional[bool] = None, + description: Optional[str] = None, + # storage staging + s3: Optional[dict] = None, + abs: Optional[dict] = None, + **kwargs, + ): """ Args: @@ -844,10 +874,10 @@ def __init__(self, name: str, self.schema = schema # deprecated argument for backward compatibility self._legacy_mode = False - if kwargs.get('force_legacy_mode'): + if kwargs.get("force_legacy_mode"): self._legacy_mode = True - if kwargs.get('columns'): - self.columns = kwargs['columns'] + if kwargs.get("columns"): + self.columns = kwargs["columns"] self._legacy_primary_key = list() self.primary_key = primary_key @@ -866,29 +896,29 @@ def __init__(self, name: str, self.delete_where_column = None self.delete_where_operator = None - if kwargs.get('delete_where_values'): - self.delete_where_values = kwargs['delete_where_values'] + if kwargs.get("delete_where_values"): + self.delete_where_values = kwargs["delete_where_values"] - if kwargs.get('delete_where_column'): - self.delete_where_column = kwargs['delete_where_column'] + if kwargs.get("delete_where_column"): + self.delete_where_column = kwargs["delete_where_column"] - if kwargs.get('delete_where_operator'): - self.delete_where_operator = kwargs['delete_where_operator'] + if kwargs.get("delete_where_operator"): + self.delete_where_operator = kwargs["delete_where_operator"] self.set_delete_where_from_dict(delete_where) self.write_always = write_always # input manifest properties - self._id = kwargs.get('id') - self._uri = kwargs.get('uri') - self._created = kwargs.get('created') - self._last_change_date = kwargs.get('last_change_date') - self._last_import_date = kwargs.get('last_import_date') - self._rows_count = kwargs.get('rows_count') - self._data_size_bytes = kwargs.get('data_size_bytes') - self._is_alias = kwargs.get('is_alias') - self._indexed_columns = kwargs.get('indexed_columns') - self._attributes = kwargs.get('attributes') + self._id = kwargs.get("id") + self._uri = kwargs.get("uri") + self._created = kwargs.get("created") + self._last_change_date = kwargs.get("last_change_date") + self._last_import_date = kwargs.get("last_import_date") + self._rows_count = kwargs.get("rows_count") + self._data_size_bytes = kwargs.get("data_size_bytes") + self._is_alias = kwargs.get("is_alias") + self._indexed_columns = kwargs.get("indexed_columns") + self._attributes = kwargs.get("attributes") self.stage = stage self.has_header = has_header or self._has_header_in_file() @@ -899,24 +929,26 @@ def __init__(self, name: str, def __get_stage_inferred(self): if self._uri: - return 'in' - return 'out' + return "in" + return "out" @classmethod - def build_output_definition(cls, name: str, - destination: Optional[str] = '', - columns: Optional[List[str]] = None, - primary_key: Optional[List[str]] = None, - incremental: Optional[bool] = False, - table_metadata: Optional[TableMetadata] = None, - enclosure: Optional[str] = '"', - delimiter: Optional[str] = ',', - delete_where: Optional[dict] = None, - write_always: Optional[bool] = False, - schema: SCHEMA_TYPE = None, - description: Optional[str] = None, - **kwargs - ): + def build_output_definition( + cls, + name: str, + destination: Optional[str] = "", + columns: Optional[List[str]] = None, + primary_key: Optional[List[str]] = None, + incremental: Optional[bool] = False, + table_metadata: Optional[TableMetadata] = None, + enclosure: Optional[str] = '"', + delimiter: Optional[str] = ",", + delete_where: Optional[dict] = None, + write_always: Optional[bool] = False, + schema: SCHEMA_TYPE = None, + description: Optional[str] = None, + **kwargs, + ): """ Factory method for creating a TableDefinition instance for output tables. @@ -940,53 +972,53 @@ def build_output_definition(cls, name: str, Returns: TableDefinition: An instance of TableDefinition configured for output tables. """ - return cls(name=name, - destination=destination, - columns=columns, - primary_key=primary_key, - incremental=incremental, - table_metadata=table_metadata, - enclosure=enclosure, - delimiter=delimiter, - delete_where=delete_where, - write_always=write_always, - schema=schema, - description=description, - **kwargs - ) + return cls( + name=name, + destination=destination, + columns=columns, + primary_key=primary_key, + incremental=incremental, + table_metadata=table_metadata, + enclosure=enclosure, + delimiter=delimiter, + delete_where=delete_where, + write_always=write_always, + schema=schema, + description=description, + **kwargs, + ) @classmethod - def build_input_definition(cls, name: str, - full_path: Optional[Union[str, None]] = None, - is_sliced: Optional[bool] = False, - - destination: Optional[str] = '', - primary_key: Optional[List[str]] = None, - columns: Optional[List[str]] = None, - incremental: Optional[bool] = None, - table_metadata: Optional[TableMetadata] = None, - enclosure: Optional[str] = '"', - delimiter: Optional[str] = ',', - delete_where: Optional[dict] = None, - stage: Optional[str] = 'in', - write_always: Optional[bool] = False, - schema: Optional[Union[TypeOrderedDict[str, ColumnDefinition], list[str]]] = None, - rows_count: Optional[int] = None, - data_size_bytes: Optional[int] = None, - is_alias: Optional[bool] = False, - - # input - uri: Optional[str] = None, - id: Optional[str] = '', - created: Optional[str] = None, - last_change_date: Optional[str] = None, - last_import_date: Optional[str] = None, - - # storage staging - s3: Optional[dict] = None, - abs: Optional[dict] = None, - **kwargs - ): + def build_input_definition( + cls, + name: str, + full_path: Optional[Union[str, None]] = None, + is_sliced: Optional[bool] = False, + destination: Optional[str] = "", + primary_key: Optional[List[str]] = None, + columns: Optional[List[str]] = None, + incremental: Optional[bool] = None, + table_metadata: Optional[TableMetadata] = None, + enclosure: Optional[str] = '"', + delimiter: Optional[str] = ",", + delete_where: Optional[dict] = None, + stage: Optional[str] = "in", + write_always: Optional[bool] = False, + schema: Optional[Union[TypeOrderedDict[str, ColumnDefinition], list[str]]] = None, + rows_count: Optional[int] = None, + data_size_bytes: Optional[int] = None, + is_alias: Optional[bool] = False, + # input + uri: Optional[str] = None, + id: Optional[str] = "", + created: Optional[str] = None, + last_change_date: Optional[str] = None, + last_import_date: Optional[str] = None, + # storage staging + s3: Optional[dict] = None, + abs: Optional[dict] = None, + **kwargs, + ): """ Factory method for creating a TableDefinition instance for input tables. @@ -1022,56 +1054,57 @@ def build_input_definition(cls, name: str, Returns: TableDefinition: An instance of TableDefinition configured for input tables. """ - return cls(name=name, - full_path=full_path, - is_sliced=is_sliced, - destination=destination, - primary_key=primary_key, - columns=columns, - incremental=incremental, - table_metadata=table_metadata, - enclosure=enclosure, - delimiter=delimiter, - delete_where=delete_where, - stage=stage, - write_always=write_always, - schema=schema, - rows_count=rows_count, - data_size_bytes=data_size_bytes, - is_alias=is_alias, - uri=uri, - id=id, - created=created, - last_change_date=last_change_date, - last_import_date=last_import_date, - s3=s3, - abs=abs, - **kwargs - ) + return cls( + name=name, + full_path=full_path, + is_sliced=is_sliced, + destination=destination, + primary_key=primary_key, + columns=columns, + incremental=incremental, + table_metadata=table_metadata, + enclosure=enclosure, + delimiter=delimiter, + delete_where=delete_where, + stage=stage, + write_always=write_always, + schema=schema, + rows_count=rows_count, + data_size_bytes=data_size_bytes, + is_alias=is_alias, + uri=uri, + id=id, + created=created, + last_change_date=last_change_date, + last_import_date=last_import_date, + s3=s3, + abs=abs, + **kwargs, + ) @classmethod def convert_to_column_definition(cls, column_name, column_metadata, primary_key=False): - data_type = {'base': DataType(dtype='STRING')} + data_type = {"base": DataType(dtype="STRING")} nullable = True for item in column_metadata: - if item['key'] == 'KBC.datatype.basetype': - data_type = {'base': DataType(dtype=item['value'])} - elif item['key'] == 'KBC.datatype.nullable': - nullable = item['value'] + if item["key"] == "KBC.datatype.basetype": + data_type = {"base": DataType(dtype=item["value"])} + elif item["key"] == "KBC.datatype.nullable": + nullable = item["value"] return ColumnDefinition(data_types=data_type, nullable=nullable, primary_key=primary_key) @classmethod def return_schema_from_manifest(cls, json_data): if TableDefinition.is_new_manifest(json_data): schema = OrderedDict() - for col in json_data.get('schema'): + for col in json_data.get("schema"): schema[col.get("name")] = ColumnDefinition().from_dict(col) else: # legacy support - columns_metadata = json_data.get('column_metadata', {}) - primary_key = json_data.get('primary_key', []) - columns = json_data.get('columns', []) + columns_metadata = json_data.get("column_metadata", {}) + primary_key = json_data.get("primary_key", []) + columns = json_data.get("columns", []) all_columns = columns schema = OrderedDict() @@ -1087,12 +1120,10 @@ def return_schema_from_manifest(cls, json_data): @classmethod def is_new_manifest(cls, json_data): - return json_data.get('schema') + return json_data.get("schema") @classmethod - def build_from_manifest(cls, - manifest_file_path: str - ): + def build_from_manifest(cls, manifest_file_path: str): """ Factory method for TableDefinition from the raw "manifest" path. @@ -1118,16 +1149,17 @@ def build_from_manifest(cls, with open(manifest_file_path) as in_file: manifest = json.load(in_file) - file_path = Path(manifest_file_path.replace('.manifest', '')) + file_path = Path(manifest_file_path.replace(".manifest", "")) if file_path.is_dir() and manifest: is_sliced = True elif file_path.is_dir() and not manifest: # skip folders that do not have matching manifest - raise ValueError(f'The manifest {manifest_file_path} does not exist ' - f'and it'f's matching file {file_path} is folder!') + raise ValueError( + f"The manifest {manifest_file_path} does not exist and its matching file {file_path} is folder!" + ) elif not file_path.exists() and not manifest: - raise ValueError(f'Nor the manifest file or the corresponding file {file_path} exist!') + raise ValueError(f"Nor the manifest file or the corresponding file {file_path} exist!") if file_path.exists(): full_path = str(file_path) @@ -1135,78 +1167,75 @@ def build_from_manifest(cls, else: name = Path(manifest_file_path).stem - if manifest.get('name'): - name = manifest.get('name') + if manifest.get("name"): + name = manifest.get("name") # test if the manifest is output and incompatible force_legacy_mode = False - if not manifest.get('columns') and manifest.get('primary_key'): - warnings.warn('Primary key is set but columns are not. Forcing legacy mode for CSV file.', - DeprecationWarning) + if not manifest.get("columns") and manifest.get("primary_key"): + warnings.warn( + "Primary key is set but columns are not. Forcing legacy mode for CSV file.", DeprecationWarning + ) force_legacy_mode = True - if manifest.get('id'): - stage = 'in' + if manifest.get("id"): + stage = "in" table_def = cls.build_input_definition( # helper parameters stage=stage, force_legacy_mode=force_legacy_mode, is_sliced=is_sliced, full_path=full_path, - # basic in manifest parameters - id=manifest.get('id'), - uri=manifest.get('uri'), + id=manifest.get("id"), + uri=manifest.get("uri"), name=name, - primary_key=manifest.get('primary_key'), - created=manifest.get('created'), - last_change_date=manifest.get('last_change_date'), - last_import_date=manifest.get('last_import_date'), + primary_key=manifest.get("primary_key"), + created=manifest.get("created"), + last_change_date=manifest.get("last_change_date"), + last_import_date=manifest.get("last_import_date"), schema=cls.return_schema_from_manifest(manifest), table_metadata=TableMetadata(manifest), - # additional in manifest parameters - rows_count=manifest.get('rows_count'), - data_size_bytes=manifest.get('data_size_bytes'), - is_alias=manifest.get('is_alias'), - attributes=manifest.get('attributes'), - indexed_columns=manifest.get('indexed_columns'), - + rows_count=manifest.get("rows_count"), + data_size_bytes=manifest.get("data_size_bytes"), + is_alias=manifest.get("is_alias"), + attributes=manifest.get("attributes"), + indexed_columns=manifest.get("indexed_columns"), # storage staging - s3=manifest.get('s3'), - abs=manifest.get('abs'), + s3=manifest.get("s3"), + abs=manifest.get("abs"), ) else: - stage = 'out' + stage = "out" table_def = cls.build_output_definition( # helper parameters stage=stage, force_legacy_mode=force_legacy_mode, is_sliced=is_sliced, full_path=full_path, - # basic out manifest parameters name=name, - destination=manifest.get('destination'), + destination=manifest.get("destination"), schema=cls.return_schema_from_manifest(manifest), - incremental=manifest.get('incremental', False), - primary_key=manifest.get('primary_key'), - write_always=manifest.get('write_always', False), - delimiter=manifest.get('delimiter', ','), - enclosure=manifest.get('enclosure', '"'), + incremental=manifest.get("incremental", False), + primary_key=manifest.get("primary_key"), + write_always=manifest.get("write_always", False), + delimiter=manifest.get("delimiter", ","), + enclosure=manifest.get("enclosure", '"'), table_metadata=TableMetadata(manifest), - # additional in manifest parameters - delete_where_values=manifest.get('delete_where_values'), - delete_where_column=manifest.get('delete_where_column'), - delete_where_operator=manifest.get('delete_where_operator') + delete_where_values=manifest.get("delete_where_values"), + delete_where_column=manifest.get("delete_where_column"), + delete_where_operator=manifest.get("delete_where_operator"), ) return table_def - def get_manifest_dictionary(self, manifest_type: Optional[str] = None, legacy_queue: bool = False, - legacy_manifest: Optional[bool] = None) -> dict: + def get_manifest_dictionary( + self, manifest_type: Optional[str] = None, legacy_queue: bool = False, legacy_manifest: Optional[bool] = None + ) -> dict: """ Returns manifest dictionary in appropriate manifest_type: either 'in' or 'out'. By default, returns output manifest. @@ -1242,11 +1271,7 @@ def get_manifest_dictionary(self, manifest_type: Optional[str] = None, legacy_qu def _filter_dictionary(self, data): if isinstance(data, dict): - return { - k: self._filter_dictionary(v) - for k, v in data.items() - if v not in (None, [], {}, "") - } + return {k: self._filter_dictionary(v) for k, v in data.items() if v not in (None, [], {}, "")} elif isinstance(data, list): return [self._filter_dictionary(item) for item in data if item not in (None, [], {}, "")] else: @@ -1254,8 +1279,9 @@ def _filter_dictionary(self, data): # Usage - def _filter_attributes_by_manifest_type(self, manifest_type: Literal["in", "out"], legacy_queue: bool = False, - legacy_manifest: bool = False): + def _filter_attributes_by_manifest_type( + self, manifest_type: Literal["in", "out"], legacy_queue: bool = False, legacy_manifest: bool = False + ): """ Filter manifest to contain only supported fields Args: @@ -1265,41 +1291,42 @@ def _filter_attributes_by_manifest_type(self, manifest_type: Literal["in", "out" """ - supported_fields = self._manifest_attributes.get_attributes_by_stage(manifest_type, legacy_queue, - legacy_manifest) + supported_fields = self._manifest_attributes.get_attributes_by_stage( + manifest_type, legacy_queue, legacy_manifest + ) fields = { - 'id': self.id, - 'uri': self._uri, - 'name': self.name, - 'created': self._created, - 'last_change_date': self._last_change_date, - 'last_import_date': self._last_import_date, - 'rows_count': self._rows_count, - 'data_size_bytes': self._data_size_bytes, - 'is_alias': self._is_alias, - 'indexed_columns': self._indexed_columns, - 'attributes': self._attributes, - - 'destination': self.destination, - 'incremental': self.incremental, - 'primary_key': self.primary_key, - 'write_always': self.write_always, - 'delimiter': self.delimiter, - 'enclosure': self.enclosure, - 'metadata': self.table_metadata.get_table_metadata_for_manifest(legacy_manifest=True), - 'column_metadata': self.table_metadata._get_legacy_column_metadata_for_manifest(), - 'manifest_type': manifest_type, - 'has_header': self.has_header, - 'table_metadata': self.table_metadata.get_table_metadata_for_manifest(), - 'delete_where_column': self.delete_where_column, - 'delete_where_values': self.delete_where_values, - 'delete_where_operator': self.delete_where_operator, - 'schema': [col.to_dict(name) - for name, col in self.schema.items()] if isinstance(self.schema, (OrderedDict, dict)) else [] + "id": self.id, + "uri": self._uri, + "name": self.name, + "created": self._created, + "last_change_date": self._last_change_date, + "last_import_date": self._last_import_date, + "rows_count": self._rows_count, + "data_size_bytes": self._data_size_bytes, + "is_alias": self._is_alias, + "indexed_columns": self._indexed_columns, + "attributes": self._attributes, + "destination": self.destination, + "incremental": self.incremental, + "primary_key": self.primary_key, + "write_always": self.write_always, + "delimiter": self.delimiter, + "enclosure": self.enclosure, + "metadata": self.table_metadata.get_table_metadata_for_manifest(legacy_manifest=True), + "column_metadata": self.table_metadata._get_legacy_column_metadata_for_manifest(), + "manifest_type": manifest_type, + "has_header": self.has_header, + "table_metadata": self.table_metadata.get_table_metadata_for_manifest(), + "delete_where_column": self.delete_where_column, + "delete_where_values": self.delete_where_values, + "delete_where_operator": self.delete_where_operator, + "schema": [col.to_dict(name) for name, col in self.schema.items()] + if isinstance(self.schema, (OrderedDict, dict)) + else [], } - if (legacy_manifest and not self.has_header) or self.stage == 'in': - fields['columns'] = self.column_names + if (legacy_manifest and not self.has_header) or self.stage == "in": + fields["columns"] = self.column_names new_dict = fields.copy() @@ -1312,7 +1339,7 @@ def _filter_attributes_by_manifest_type(self, manifest_type: Literal["in", "out" def _has_header_in_file(self): if self.is_sliced: has_header = False - elif self.column_names and not self.stage == 'in': + elif self.column_names and not self.stage == "in": has_header = False else: has_header = True @@ -1337,8 +1364,9 @@ def schema(self, value: Union[TypeOrderedDict[str, ColumnDefinition], list[str]] @property def _manifest_attributes(self) -> SupportedManifestAttributes: - return SupportedManifestAttributes(self.MANIFEST_ATTRIBUTES['out'], self.MANIFEST_ATTRIBUTES['in'], - self.OUTPUT_MANIFEST_LEGACY_EXCLUDES) + return SupportedManifestAttributes( + self.MANIFEST_ATTRIBUTES["out"], self.MANIFEST_ATTRIBUTES["in"], self.OUTPUT_MANIFEST_LEGACY_EXCLUDES + ) # #### Manifest properties @property @@ -1371,7 +1399,7 @@ def name(self) -> str: @property def rows_count(self) -> int: """ - int: rows_count property used in input manifest. + int: rows_count property used in input manifest. """ return self._rows_count @@ -1379,13 +1407,13 @@ def rows_count(self) -> int: @property def data_size_bytes(self) -> int: """ - int: data_size_bytes property used in input manifest. + int: data_size_bytes property used in input manifest. """ return self._data_size_bytes @property - @deprecated(version='1.5.1', reason="Please use new column_names method instead of columns property") + @deprecated(version="1.5.1", reason="Please use new column_names method instead of columns property") def columns(self) -> List[str]: if isinstance(self.schema, (OrderedDict, dict)): return list(self.schema.keys()) @@ -1393,7 +1421,7 @@ def columns(self) -> List[str]: return [] @columns.setter - @deprecated(version='1.5.1', reason="Please use new column_names method instead of schema property") + @deprecated(version="1.5.1", reason="Please use new column_names method instead of schema property") def columns(self, val: List[str]): """ Set columns for the table. @@ -1452,8 +1480,9 @@ def primary_key(self, primary_key: List[str]): if col in self.schema: self.schema[col].primary_key = True else: - raise UserException(f"Primary key column {col} not found in schema. " - f"Please specify all columns / schema") + raise UserException( + f"Primary key column {col} not found in schema. Please specify all columns / schema" + ) else: self._legacy_primary_key = primary_key @@ -1484,7 +1513,7 @@ def table_metadata(self, table_metadata: TableMetadata): for col, val in table_metadata._get_legacy_column_metadata_for_manifest().items(): if not self.schema.get(col): self.schema[col] = ColumnDefinition() - self.schema[col].metadata = {item['key']: item['value'] for item in val} + self.schema[col].metadata = {item["key"]: item["value"] for item in val} @property def created(self) -> Union[datetime, None]: # Created timestamp in the KBC Storage (read only input attribute) @@ -1530,7 +1559,6 @@ def update_column(self, name: str, column_definition: ColumnDefinition): raise ValueError(f'Column with name: "{name}" not found') def delete_column(self, column_name: str): - if column_name not in self.schema: raise ValueError(f"Column with name {column_name} not found") del self.schema[column_name] @@ -1562,20 +1590,19 @@ def set_delete_where_from_dict(self, delete_where): Manifest dict """ if delete_where: - if 'column' in delete_where and 'values' in delete_where: - if not isinstance(delete_where['column'], str): + if "column" in delete_where and "values" in delete_where: + if not isinstance(delete_where["column"], str): raise TypeError("Delete column must be a string") - if not isinstance(delete_where['values'], list): + if not isinstance(delete_where["values"], list): raise TypeError("Delete values must be a list") - op = delete_where['operator'] or 'eq' - if (not op == 'eq') and (not op == 'ne'): + op = delete_where["operator"] or "eq" + if (not op == "eq") and (not op == "ne"): raise ValueError("Delete operator must be 'eq' or 'ne'") - self.delete_where_values = delete_where['values'] - self.delete_where_column = delete_where['column'] + self.delete_where_values = delete_where["values"] + self.delete_where_column = delete_where["column"] self.delete_where_operator = op else: - raise ValueError("Delete where specification must contain " - "keys 'column' and 'values'") + raise ValueError("Delete where specification must contain keys 'column' and 'values'") class FileDefinition(IODefinition): @@ -1612,32 +1639,27 @@ class FileDefinition(IODefinition): notify: Notifies project administrators that a file was uploaded. """ - SYSTEM_TAG_PREFIXES = ['componentId:', - 'configurationId:', - 'configurationRowId:', - 'runId:', - 'branchId:'] - - OUTPUT_MANIFEST_KEYS = ["tags", - "is_public", - "is_permanent", - "is_encrypted", - "notify"] - - def __init__(self, full_path: str, - stage: Optional[str] = 'out', - tags: Optional[List[str]] = None, - is_public: Optional[bool] = False, - is_permanent: Optional[bool] = False, - is_encrypted: Optional[bool] = False, - notify: Optional[bool] = False, - id: Optional[str] = None, - s3: Optional[dict] = None, - abs: Optional[dict] = None, - created: Optional[str] = None, - size_bytes: Optional[int] = None, - max_age_days: Optional[int] = None - ): + + SYSTEM_TAG_PREFIXES = ["componentId:", "configurationId:", "configurationRowId:", "runId:", "branchId:"] + + OUTPUT_MANIFEST_KEYS = ["tags", "is_public", "is_permanent", "is_encrypted", "notify"] + + def __init__( + self, + full_path: str, + stage: Optional[str] = "out", + tags: Optional[List[str]] = None, + is_public: Optional[bool] = False, + is_permanent: Optional[bool] = False, + is_encrypted: Optional[bool] = False, + notify: Optional[bool] = False, + id: Optional[str] = None, + s3: Optional[dict] = None, + abs: Optional[dict] = None, + created: Optional[str] = None, + size_bytes: Optional[int] = None, + max_age_days: Optional[int] = None, + ): """ Args: @@ -1670,13 +1692,15 @@ def __init__(self, full_path: str, self._max_age_days = max_age_days @classmethod - def build_output_definition(cls, - full_path: str, - tags: Optional[List[str]] = None, - is_public: Optional[bool] = False, - is_permanent: Optional[bool] = False, - is_encrypted: Optional[bool] = False, - notify: Optional[bool] = False): + def build_output_definition( + cls, + full_path: str, + tags: Optional[List[str]] = None, + is_public: Optional[bool] = False, + is_permanent: Optional[bool] = False, + is_encrypted: Optional[bool] = False, + notify: Optional[bool] = False, + ): """ Factory method to create an instance of FileDefinition for output files. @@ -1695,17 +1719,27 @@ def build_output_definition(cls, Returns: An instance of FileDefinition configured for output files. """ - return cls(full_path=full_path, stage="out", tags=tags, is_public=is_public, is_permanent=is_permanent, - is_encrypted=is_encrypted, notify=notify) + return cls( + full_path=full_path, + stage="out", + tags=tags, + is_public=is_public, + is_permanent=is_permanent, + is_encrypted=is_encrypted, + notify=notify, + ) @classmethod - def build_input_definition(cls, full_path: str, - id: Optional[str] = None, - s3: Optional[dict] = None, - abs: Optional[dict] = None, - created: Optional[str] = None, - size_bytes: Optional[int] = None, - max_age_days: Optional[int] = None): + def build_input_definition( + cls, + full_path: str, + id: Optional[str] = None, + s3: Optional[dict] = None, + abs: Optional[dict] = None, + created: Optional[str] = None, + size_bytes: Optional[int] = None, + max_age_days: Optional[int] = None, + ): """ Factory method to create an instance of FileDefinition for input files. @@ -1725,13 +1759,19 @@ def build_input_definition(cls, full_path: str, Returns: An instance of FileDefinition configured for input files. """ - return cls(full_path=full_path, stage="in", id=id, s3=s3, abs=abs, created=created, size_bytes=size_bytes, - max_age_days=max_age_days) + return cls( + full_path=full_path, + stage="in", + id=id, + s3=s3, + abs=abs, + created=created, + size_bytes=size_bytes, + max_age_days=max_age_days, + ) @classmethod - def build_from_manifest(cls, - manifest_file_path: str - ): + def build_from_manifest(cls, manifest_file_path: str): """ Factory method for FileDefinition from the raw "manifest" path. @@ -1753,31 +1793,32 @@ def build_from_manifest(cls, with open(manifest_file_path) as in_file: manifest = json.load(in_file) - file_path = Path(manifest_file_path.replace('.manifest', '')) + file_path = Path(manifest_file_path.replace(".manifest", "")) if not file_path.exists(): - raise ValueError(f'The corresponding file {file_path} does not exist!') + raise ValueError(f"The corresponding file {file_path} does not exist!") full_path = str(file_path) - if manifest.get('id'): - stage = 'in' + if manifest.get("id"): + stage = "in" else: - stage = 'out' - - file_def = cls(full_path=full_path, - stage=stage, - tags=manifest.get('tags', []), - is_public=manifest.get('is_public', False), - is_permanent=manifest.get('is_permanent', False), - is_encrypted=manifest.get('is_encrypted', False), - id=manifest.get('id', ''), - s3=manifest.get('s3'), - abs=manifest.get('abs'), - created=manifest.get('created'), - size_bytes=manifest.get('size_bytes', 0), - max_age_days=manifest.get('max_age_days', 0) - ) + stage = "out" + + file_def = cls( + full_path=full_path, + stage=stage, + tags=manifest.get("tags", []), + is_public=manifest.get("is_public", False), + is_permanent=manifest.get("is_permanent", False), + is_encrypted=manifest.get("is_encrypted", False), + id=manifest.get("id", ""), + s3=manifest.get("s3"), + abs=manifest.get("abs"), + created=manifest.get("created"), + size_bytes=manifest.get("size_bytes", 0), + max_age_days=manifest.get("max_age_days", 0), + ) return file_def @@ -1788,8 +1829,9 @@ def is_system_tag(cls, tag: str) -> bool: return True return False - def get_manifest_dictionary(self, manifest_type: Optional[str] = None, legacy_queue: bool = False, - legacy_manifest: Optional[bool] = None) -> dict: + def get_manifest_dictionary( + self, manifest_type: Optional[str] = None, legacy_queue: bool = False, legacy_manifest: Optional[bool] = None + ) -> dict: """ Returns manifest dictionary in appropriate manifest_type: either 'in' or 'out'. By default, returns output manifest. @@ -1820,8 +1862,9 @@ def get_manifest_dictionary(self, manifest_type: Optional[str] = None, legacy_qu return filtered_dictionary - def _filter_attributes_by_manifest_type(self, manifest_type: Literal["in", "out"], legacy_queue: bool = False, - legacy_manifest: bool = False): + def _filter_attributes_by_manifest_type( + self, manifest_type: Literal["in", "out"], legacy_queue: bool = False, legacy_manifest: bool = False + ): """ Filter manifest to contain only supported fields Args: @@ -1831,28 +1874,27 @@ def _filter_attributes_by_manifest_type(self, manifest_type: Literal["in", "out" """ - if manifest_type == 'in': - + if manifest_type == "in": manifest_dictionary = { - 'id': self.id, - 'created': self.created.strftime('%Y-%m-%dT%H:%M:%S%z') if self.created else None, - 'is_public': self.is_public, - 'is_encrypted': self.is_encrypted, - 'name': self.name, - 'size_bytes': self.size_bytes, - 'tags': self.tags, - 'notify': self.notify, - 'max_age_days': self.max_age_days, - 'is_permanent': self.is_permanent, + "id": self.id, + "created": self.created.strftime("%Y-%m-%dT%H:%M:%S%z") if self.created else None, + "is_public": self.is_public, + "is_encrypted": self.is_encrypted, + "name": self.name, + "size_bytes": self.size_bytes, + "tags": self.tags, + "notify": self.notify, + "max_age_days": self.max_age_days, + "is_permanent": self.is_permanent, } else: manifest_dictionary = { - 'is_public': self.is_public, - 'is_permanent': self.is_permanent, - 'is_encrypted': self.is_encrypted, - 'tags': self.tags, - 'notify': self.notify, + "is_public": self.is_public, + "is_permanent": self.is_permanent, + "is_encrypted": self.is_encrypted, + "tags": self.tags, + "notify": self.notify, } return manifest_dictionary @@ -1865,7 +1907,7 @@ def name(self) -> str: # separate id from name file_name = Path(self.full_path).name if self._id: - fsplit = file_name.split('_', 1) + fsplit = file_name.split("_", 1) if len(fsplit) > 1: self._id = fsplit[0] file_name = fsplit[1] @@ -1982,15 +2024,16 @@ class TableInputMapping(SubscriptableDataclass): Abstraction of [input mapping definition]( https://developers.keboola.com/extend/common-interface/config-file/#tables) in the config file """ - source: str = '' + + source: str = "" destination: str = None limit: int = None columns: List[str] = dataclasses.field(default_factory=lambda: []) where_values: List[str] = None full_path: str = None - where_operator: str = '' + where_operator: str = "" days: int = 0 - column_types: List[TableColumnTypes] = None, + column_types: List[TableColumnTypes] = (None,) file_type: str = "csv" @@ -2000,16 +2043,17 @@ class TableOutputMapping(SubscriptableDataclass): Abstraction of [output mapping definition]( https://developers.keboola.com/extend/common-interface/config-file/#tables) in the config file """ + source: str destination: str incremental: bool = False - columns: str = '' - primary_key: str = '' - delete_where_column: str = '' - delete_where_operator: str = '' - delete_where_values: str = '' - delimiter: str = '' - enclosure: str = '' + columns: str = "" + primary_key: str = "" + delete_where_column: str = "" + delete_where_operator: str = "" + delete_where_values: str = "" + delimiter: str = "" + enclosure: str = "" @dataclass @@ -2018,8 +2062,9 @@ class FileInputMapping(SubscriptableDataclass): Abstraction of [output mapping definition]( https://developers.keboola.com/extend/common-interface/config-file/#files) in the config file """ + tags: List[str] - query: str = '' + query: str = "" filter_by_run_id: bool = False @@ -2029,6 +2074,7 @@ class FileOutputMapping(SubscriptableDataclass): Abstraction of [output mapping definition]( https://developers.keboola.com/extend/common-interface/config-file/#files) in the config file """ + source: str is_public: bool = False is_permanent: bool = False diff --git a/src/keboola/component/interface.py b/src/keboola/component/interface.py index 98c6e69..b366617 100644 --- a/src/keboola/component/interface.py +++ b/src/keboola/component/interface.py @@ -11,10 +11,10 @@ import warnings from datetime import datetime from pathlib import Path -from typing import List, Dict, Optional, Union, OrderedDict +from typing import Dict, List, Optional, OrderedDict, Union from deprecated import deprecated -from pygelf import GelfUdpHandler, GelfTcpHandler +from pygelf import GelfTcpHandler, GelfUdpHandler from pytz import utc from . import dao @@ -26,8 +26,7 @@ def register_csv_dialect(): """ Register the KBC CSV dialect """ - csv.register_dialect('kbc', lineterminator='\n', delimiter=',', - quotechar='"') + csv.register_dialect("kbc", lineterminator="\n", delimiter=",", quotechar='"') def init_environment_variables() -> dao.EnvironmentVariables: @@ -38,26 +37,27 @@ def init_environment_variables() -> dao.EnvironmentVariables: Returns: dao.EnvironmentVariables: """ - return dao.EnvironmentVariables(data_dir=os.environ.get('KBC_DATADIR', None), - run_id=os.environ.get('KBC_RUNID', None), - project_id=os.environ.get('KBC_PROJECTID', None), - stack_id=os.environ.get('KBC_STACKID', None), - config_id=os.environ.get('KBC_CONFIGID', None), - component_id=os.environ.get('KBC_COMPONENTID', None), - config_row_id=os.environ.get('KBC_CONFIGROWID', None), - branch_id=os.environ.get('KBC_BRANCHID', None), - staging_file_provider=os.environ.get('KBC_STAGING_FILE_PROVIDER', None), - project_name=os.environ.get('KBC_PROJECTNAME', None), - token_id=os.environ.get('KBC_TOKENID', None), - token_desc=os.environ.get('KBC_TOKENDESC', None), - token=os.environ.get('KBC_TOKEN', None), - url=os.environ.get('KBC_URL', None), - real_user=os.environ.get('KBC_REALUSER', None), - logger_addr=os.environ.get('KBC_LOGGER_ADDR', None), - logger_port=os.environ.get('KBC_LOGGER_PORT', None), - data_type_support=os.environ.get('KBC_DATA_TYPE_SUPPORT', None), - project_features=os.environ.get('KBC_PROJECT_FEATURE_GATES', '') - ) + return dao.EnvironmentVariables( + data_dir=os.environ.get("KBC_DATADIR", None), + run_id=os.environ.get("KBC_RUNID", None), + project_id=os.environ.get("KBC_PROJECTID", None), + stack_id=os.environ.get("KBC_STACKID", None), + config_id=os.environ.get("KBC_CONFIGID", None), + component_id=os.environ.get("KBC_COMPONENTID", None), + config_row_id=os.environ.get("KBC_CONFIGROWID", None), + branch_id=os.environ.get("KBC_BRANCHID", None), + staging_file_provider=os.environ.get("KBC_STAGING_FILE_PROVIDER", None), + project_name=os.environ.get("KBC_PROJECTNAME", None), + token_id=os.environ.get("KBC_TOKENID", None), + token_desc=os.environ.get("KBC_TOKENDESC", None), + token=os.environ.get("KBC_TOKEN", None), + url=os.environ.get("KBC_URL", None), + real_user=os.environ.get("KBC_REALUSER", None), + logger_addr=os.environ.get("KBC_LOGGER_ADDR", None), + logger_port=os.environ.get("KBC_LOGGER_PORT", None), + data_type_support=os.environ.get("KBC_DATA_TYPE_SUPPORT", None), + project_features=os.environ.get("KBC_PROJECT_FEATURE_GATES", ""), + ) class CommonInterface: @@ -74,8 +74,9 @@ class CommonInterface: Full path to the /data folder """ - LOGGING_TYPE_STD = 'std' - LOGGING_TYPE_GELF = 'gelf' + + LOGGING_TYPE_STD = "std" + LOGGING_TYPE_GELF = "gelf" def __init__(self, data_folder_path: str = None, log_level=logging.INFO, logging_type=None): """ @@ -95,8 +96,11 @@ def __init__(self, data_folder_path: str = None, log_level=logging.INFO, logging register_csv_dialect() # init logging - logging_type_inf = CommonInterface.LOGGING_TYPE_GELF if os.getenv('KBC_LOGGER_ADDR', - None) else CommonInterface.LOGGING_TYPE_STD + logging_type_inf = ( + CommonInterface.LOGGING_TYPE_GELF + if os.getenv("KBC_LOGGER_ADDR", None) + else CommonInterface.LOGGING_TYPE_STD + ) if not logging_type: logging_type = logging_type_inf @@ -112,8 +116,7 @@ def __init__(self, data_folder_path: str = None, log_level=logging.INFO, logging # validate if not os.path.exists(data_folder_path) and not os.path.isdir(data_folder_path): raise ValueError( - f"The data directory does not exist, verify that the data directory is correct. Dir: " - f"{data_folder_path}" + f"The data directory does not exist, verify that the data directory is correct. Dir: {data_folder_path}" ) self.data_folder_path = data_folder_path @@ -123,13 +126,7 @@ def _get_data_folder_from_context(self): # get from parameters argparser = argparse.ArgumentParser() - argparser.add_argument( - '-d', - '--data', - dest='data_dir', - default='', - help='Data directory' - ) + argparser.add_argument("-d", "--data", dest="data_dir", default="", help="Data directory") # unknown is to ignore extra arguments args, unknown = argparser.parse_known_args() data_folder_path = args.data_dir @@ -138,10 +135,10 @@ def _get_data_folder_from_context(self): cwd = Path(os.getcwd()) if self.environment_variables.data_dir: data_folder_path = self.environment_variables.data_dir - elif cwd.resolve().joinpath('data').is_dir(): - data_folder_path = cwd.resolve().joinpath('data').as_posix() - elif cwd.resolve().parent.joinpath('data').is_dir(): - data_folder_path = cwd.resolve().parent.joinpath('data').as_posix() + elif cwd.resolve().joinpath("data").is_dir(): + data_folder_path = cwd.resolve().joinpath("data").as_posix() + elif cwd.resolve().parent.joinpath("data").is_dir(): + data_folder_path = cwd.resolve().parent.joinpath("data").as_posix() return data_folder_path @@ -179,8 +176,9 @@ def filter(self, rec): return logger @staticmethod - def set_gelf_logger(log_level: int = logging.INFO, transport_layer='TCP', - stdout=False, include_extra_fields=True, **gelf_kwargs): # noqa: E301 + def set_gelf_logger( + log_level: int = logging.INFO, transport_layer="TCP", stdout=False, include_extra_fields=True, **gelf_kwargs + ): # noqa: E301 """ Sets gelf console logger. Handler for console output is not included by default, for testing in non-gelf environments use stdout=True. @@ -203,16 +201,16 @@ def set_gelf_logger(log_level: int = logging.INFO, transport_layer='TCP', CommonInterface.set_default_logger(log_level) # gelf handler setup - gelf_kwargs['include_extra_fields'] = include_extra_fields + gelf_kwargs["include_extra_fields"] = include_extra_fields - host = os.getenv('KBC_LOGGER_ADDR', 'localhost') - port = os.getenv('KBC_LOGGER_PORT', 12201) - if transport_layer == 'TCP': + host = os.getenv("KBC_LOGGER_ADDR", "localhost") + port = os.getenv("KBC_LOGGER_PORT", 12201) + if transport_layer == "TCP": gelf = GelfTcpHandler(host=host, port=port, **gelf_kwargs) - elif transport_layer == 'UDP': + elif transport_layer == "UDP": gelf = GelfUdpHandler(host=host, port=port, **gelf_kwargs) else: - raise ValueError(F'Unsupported gelf transport layer: {transport_layer}. Choose TCP or UDP') + raise ValueError(f"Unsupported gelf transport layer: {transport_layer}. Choose TCP or UDP") logging.getLogger().setLevel(log_level) logging.getLogger().addHandler(gelf) @@ -229,19 +227,16 @@ def get_state_file(self) -> dict: dict: """ - logging.info('Loading state file..') - state_file_path = os.path.join(self.data_folder_path, 'in', 'state.json') + logging.info("Loading state file..") + state_file_path = os.path.join(self.data_folder_path, "in", "state.json") if not os.path.isfile(state_file_path): - logging.info('State file not found. First run?') + logging.info("State file not found. First run?") return {} try: - with open(state_file_path, 'r') \ - as state_file: + with open(state_file_path, "r") as state_file: return json.load(state_file) except (OSError, IOError): - raise ValueError( - "State file state.json unable to read " - ) + raise ValueError("State file state.json unable to read ") def write_state_file(self, state_dict: dict): """ @@ -250,9 +245,9 @@ def write_state_file(self, state_dict: dict): state_dict (dict): """ if not isinstance(state_dict, dict): - raise TypeError('Dictionary expected as a state file datatype!') + raise TypeError("Dictionary expected as a state file datatype!") - with open(os.path.join(self.configuration.data_dir, 'out', 'state.json'), 'w+') as state_file: + with open(os.path.join(self.configuration.data_dir, "out", "state.json"), "w+") as state_file: json.dump(state_dict, state_file) def get_input_table_definition_by_name(self, table_name: str) -> dao.TableDefinition: @@ -270,10 +265,7 @@ def get_input_table_definition_by_name(self, table_name: str) -> dao.TableDefini Returns: dao.TableDefinition """ - manifest_path = os.path.join( - self.tables_in_path, - table_name + '.manifest' - ) + manifest_path = os.path.join(self.tables_in_path, table_name + ".manifest") return dao.TableDefinition.build_from_manifest(manifest_path) @@ -299,76 +291,82 @@ def get_input_tables_definitions(self, orphaned_manifests=False) -> List[dao.Tab """ - table_files = [f for f in glob.glob(self.tables_in_path + "/**", recursive=False) if - not f.endswith('.manifest')] + table_files = [ + f for f in glob.glob(self.tables_in_path + "/**", recursive=False) if not f.endswith(".manifest") + ] table_defs = list() for t in table_files: p = Path(t) - manifest_path = t + '.manifest' + manifest_path = t + ".manifest" if p.is_dir() and not Path(manifest_path).exists(): # skip folders that do not have matching manifest - logging.warning(f'Folder {t} does not have matching manifest, it will be ignored!') + logging.warning(f"Folder {t} does not have matching manifest, it will be ignored!") continue table_defs.append(dao.TableDefinition.build_from_manifest(manifest_path)) if orphaned_manifests: - files_w_manifest = [t.name + '.manifest' for t in table_defs] - manifest_files = [f for f in glob.glob(self.tables_in_path + "/**.manifest", recursive=False) - if Path(f).name not in files_w_manifest] + files_w_manifest = [t.name + ".manifest" for t in table_defs] + manifest_files = [ + f + for f in glob.glob(self.tables_in_path + "/**.manifest", recursive=False) + if Path(f).name not in files_w_manifest + ] for t in manifest_files: p = Path(t) if p.is_dir(): # skip folders that do not have matching manifest - logging.warning(f'Manifest {t} is folder,s skipping!') + logging.warning(f"Manifest {t} is folder,s skipping!") continue table_defs.append(dao.TableDefinition.build_from_manifest(t)) return table_defs - def _create_table_definition(self, name: str, - storage_stage: str = 'out', - is_sliced: bool = False, - destination: str = '', - primary_key: List[str] = None, - columns: List[str] = None, - incremental: bool = None, - table_metadata: dao.TableMetadata = None, - enclosure: str = '"', - delimiter: str = ',', - delete_where: dict = None, - write_always: bool = False, - schema: Union[ - OrderedDict[str, ColumnDefinition], list[str]] = None, - has_header: Optional[bool] = None, - description: Optional[str] = None) -> dao.TableDefinition: - """ - Helper method for dao.TableDefinition creation along with the "manifest". - It initializes path according to the storage_stage type. - - Args: - name: Table / file name. e.g. `'my_table.csv'`. - storage_stage: - default value: 'out' - either `'in'` or `'out'`. Determines the path to result file. - E.g. `data/tables/in/my_table.csv` - is_sliced: True if the full_path points to a folder with sliced tables - destination: String name of the table in Storage. - primary_key: List with names of columns used for primary key. - columns: List of columns for headless CSV files - incremental: Set to true to enable incremental loading - table_metadata: <.dao.TableMetadata> object containing column and table metadata - enclosure: str: CSV enclosure, by default " - delimiter: str: CSV delimiter, by default , - delete_where: Dict with settings for deleting rows - write_always: Bool: If true, the table will be saved to Storage even when the job execution - fails. - """ - if storage_stage == 'in': + def _create_table_definition( + self, + name: str, + storage_stage: str = "out", + is_sliced: bool = False, + destination: str = "", + primary_key: List[str] = None, + columns: List[str] = None, + incremental: bool = None, + table_metadata: dao.TableMetadata = None, + enclosure: str = '"', + delimiter: str = ",", + delete_where: dict = None, + write_always: bool = False, + schema: Union[OrderedDict[str, ColumnDefinition], list[str]] = None, + has_header: Optional[bool] = None, + description: Optional[str] = None, + ) -> dao.TableDefinition: + """ + Helper method for dao.TableDefinition creation along with the "manifest". + It initializes path according to the storage_stage type. + + Args: + name: Table / file name. e.g. `'my_table.csv'`. + storage_stage: + default value: 'out' + either `'in'` or `'out'`. Determines the path to result file. + E.g. `data/tables/in/my_table.csv` + is_sliced: True if the full_path points to a folder with sliced tables + destination: String name of the table in Storage. + primary_key: List with names of columns used for primary key. + columns: List of columns for headless CSV files + incremental: Set to true to enable incremental loading + table_metadata: <.dao.TableMetadata> object containing column and table metadata + enclosure: str: CSV enclosure, by default " + delimiter: str: CSV delimiter, by default , + delete_where: Dict with settings for deleting rows + write_always: Bool: If true, the table will be saved to Storage even when the job execution + fails. + """ + if storage_stage == "in": full_path = os.path.join(self.tables_in_path, name) - elif storage_stage == 'out': + elif storage_stage == "out": full_path = os.path.join(self.tables_out_path, name) else: raise ValueError(f'Invalid storage_stage value "{storage_stage}". Supported values are: "in" or "out"!') @@ -376,125 +374,135 @@ def _create_table_definition(self, name: str, # for transition period we need to force legacy mode for csv files w headers. force_legacy_mode = False if not schema and not columns and primary_key: - warnings.warn('Primary key is set but columns are not. Forcing legacy mode for CSV file.', - DeprecationWarning) + warnings.warn( + "Primary key is set but columns are not. Forcing legacy mode for CSV file.", DeprecationWarning + ) force_legacy_mode = True - return dao.TableDefinition(name=name, - full_path=full_path, - is_sliced=is_sliced, - destination=destination, - primary_key=primary_key, - columns=columns, - incremental=incremental, - description=description, - table_metadata=table_metadata, - enclosure=enclosure, - delimiter=delimiter, - delete_where=delete_where, - stage=storage_stage, - write_always=write_always, - schema=schema, - has_header=has_header, - force_legacy_mode=force_legacy_mode) - - def create_in_table_definition(self, name: str, - is_sliced: bool = False, - destination: str = '', - primary_key: List[str] = None, - columns: List[str] = None, - incremental: bool = None, - table_metadata: dao.TableMetadata = None, - delete_where: str = None, - schema: List[ColumnDefinition] = None) -> dao.TableDefinition: - """ - Helper method for input dao.TableDefinition creation along with the "manifest". - It initializes path in data/tables/in/ folder. - - Args: - name: Table / file name. e.g. `'my_table.csv'`. - is_sliced: True if the full_path points to a folder with sliced tables - destination: String name of the table in Storage. - primary_key: List with names of columns used for primary key. - columns: List of columns for headless CSV files - incremental: Set to true to enable incremental loading - table_metadata: <.dao.TableMetadata> object containing column and table metadata - delete_where: Dict with settings for deleting rows - schema: Table schema - """ - - return self._create_table_definition(name=name, - storage_stage='in', - is_sliced=is_sliced, - destination=destination, - primary_key=primary_key, - columns=columns, - incremental=incremental, - table_metadata=table_metadata, - delete_where=delete_where, - schema=schema) - - def create_out_table_definition(self, name: str, - is_sliced: bool = False, - destination: str = '', - primary_key: List[str] = None, - schema: TableDefinition.SCHEMA_TYPE = None, - incremental: bool = None, - table_metadata: dao.TableMetadata = None, - enclosure: str = '"', - delimiter: str = ',', - delete_where: dict = None, - write_always: bool = False, - has_header: Optional[bool] = None, - description: Optional[str] = None, - **kwargs - ) -> dao.TableDefinition: - """ - Helper method for output dao.TableDefinition creation along with the "manifest". - It initializes path in data/tables/out/ folder. - - Args: - name: Table / file name. e.g. `'my_table.csv'`. - is_sliced: True if the full_path points to a folder with sliced tables - destination: String name of the table in Storage. - primary_key: List with names of columns used for primary key. - schema: List of columns or mapping of column names and ColumnDefinition objects. - if list of strings is provided, the columns will be created with default settings - (BaseType.string) - incremental: Set to true to enable incremental loading - table_metadata: <.dao.TableMetadata> object containing column and table metadata - enclosure: str: CSV enclosure, by default " - delimiter: str: CSV delimiter, by default , - delete_where: Dict with settings for deleting rows - write_always: Bool: If true, the table will be saved to Storage even when the job execution - fails. - has_header:Optional[bool] = flag whether the header is present in the file, - if None legacy method is used - description: Table description - """ - - return self._create_table_definition(name=name, - storage_stage='out', - is_sliced=is_sliced, - destination=destination, - primary_key=primary_key, - columns=kwargs.get('columns'), - incremental=incremental, - table_metadata=table_metadata, - enclosure=enclosure, - delimiter=delimiter, - delete_where=delete_where, - write_always=write_always, - schema=schema, - has_header=has_header, - description=description) + return dao.TableDefinition( + name=name, + full_path=full_path, + is_sliced=is_sliced, + destination=destination, + primary_key=primary_key, + columns=columns, + incremental=incremental, + description=description, + table_metadata=table_metadata, + enclosure=enclosure, + delimiter=delimiter, + delete_where=delete_where, + stage=storage_stage, + write_always=write_always, + schema=schema, + has_header=has_header, + force_legacy_mode=force_legacy_mode, + ) + + def create_in_table_definition( + self, + name: str, + is_sliced: bool = False, + destination: str = "", + primary_key: List[str] = None, + columns: List[str] = None, + incremental: bool = None, + table_metadata: dao.TableMetadata = None, + delete_where: str = None, + schema: List[ColumnDefinition] = None, + ) -> dao.TableDefinition: + """ + Helper method for input dao.TableDefinition creation along with the "manifest". + It initializes path in data/tables/in/ folder. + + Args: + name: Table / file name. e.g. `'my_table.csv'`. + is_sliced: True if the full_path points to a folder with sliced tables + destination: String name of the table in Storage. + primary_key: List with names of columns used for primary key. + columns: List of columns for headless CSV files + incremental: Set to true to enable incremental loading + table_metadata: <.dao.TableMetadata> object containing column and table metadata + delete_where: Dict with settings for deleting rows + schema: Table schema + """ + + return self._create_table_definition( + name=name, + storage_stage="in", + is_sliced=is_sliced, + destination=destination, + primary_key=primary_key, + columns=columns, + incremental=incremental, + table_metadata=table_metadata, + delete_where=delete_where, + schema=schema, + ) + + def create_out_table_definition( + self, + name: str, + is_sliced: bool = False, + destination: str = "", + primary_key: List[str] = None, + schema: TableDefinition.SCHEMA_TYPE = None, + incremental: bool = None, + table_metadata: dao.TableMetadata = None, + enclosure: str = '"', + delimiter: str = ",", + delete_where: dict = None, + write_always: bool = False, + has_header: Optional[bool] = None, + description: Optional[str] = None, + **kwargs, + ) -> dao.TableDefinition: + """ + Helper method for output dao.TableDefinition creation along with the "manifest". + It initializes path in data/tables/out/ folder. + + Args: + name: Table / file name. e.g. `'my_table.csv'`. + is_sliced: True if the full_path points to a folder with sliced tables + destination: String name of the table in Storage. + primary_key: List with names of columns used for primary key. + schema: List of columns or mapping of column names and ColumnDefinition objects. + if list of strings is provided, the columns will be created with default settings + (BaseType.string) + incremental: Set to true to enable incremental loading + table_metadata: <.dao.TableMetadata> object containing column and table metadata + enclosure: str: CSV enclosure, by default " + delimiter: str: CSV delimiter, by default , + delete_where: Dict with settings for deleting rows + write_always: Bool: If true, the table will be saved to Storage even when the job execution + fails. + has_header:Optional[bool] = flag whether the header is present in the file, + if None legacy method is used + description: Table description + """ + + return self._create_table_definition( + name=name, + storage_stage="out", + is_sliced=is_sliced, + destination=destination, + primary_key=primary_key, + columns=kwargs.get("columns"), + incremental=incremental, + table_metadata=table_metadata, + enclosure=enclosure, + delimiter=delimiter, + delete_where=delete_where, + write_always=write_always, + schema=schema, + has_header=has_header, + description=description, + ) # # File processing - def get_input_file_definitions_grouped_by_tag_group(self, orphaned_manifests=False, - only_latest_files=True, - tags: List[str] = None, - include_system_tags=False) \ - -> Dict[str, List[dao.FileDefinition]]: + def get_input_file_definitions_grouped_by_tag_group( + self, orphaned_manifests=False, only_latest_files=True, tags: List[str] = None, include_system_tags=False + ) -> Dict[str, List[dao.FileDefinition]]: """ Convenience method returning lists of files in dictionary grouped by tag group. @@ -516,8 +524,9 @@ def get_input_file_definitions_grouped_by_tag_group(self, orphaned_manifests=Fal file_definitions = self.get_input_files_definitions(orphaned_manifests, only_latest_files, tags) return self.__group_file_defs_by_tag_group(file_definitions, include_system_tags=include_system_tags) - def get_input_file_definitions_grouped_by_name(self, orphaned_manifests=False, only_latest_files=True, - tags: List[str] = None) -> Dict[str, List[dao.FileDefinition]]: + def get_input_file_definitions_grouped_by_name( + self, orphaned_manifests=False, only_latest_files=True, tags: List[str] = None + ) -> Dict[str, List[dao.FileDefinition]]: """ Convenience method returning lists of files in dictionary grouped by file name. @@ -533,23 +542,22 @@ def get_input_file_definitions_grouped_by_name(self, orphaned_manifests=False, o file_definitions = self.get_input_files_definitions(orphaned_manifests, only_latest_files, tags) return self.__group_files_by_name(file_definitions) - def __group_file_defs_by_tag_group(self, file_definitions: List[dao.FileDefinition], include_system_tags=False) \ - -> Dict[str, List[dao.FileDefinition]]: - + def __group_file_defs_by_tag_group( + self, file_definitions: List[dao.FileDefinition], include_system_tags=False + ) -> Dict[str, List[dao.FileDefinition]]: files_per_tag: dict = {} for f in file_definitions: - tag_group_v1 = f.tags if include_system_tags else f.user_tags tag_group_v1.sort() - tag_group_key = ';'.join(tag_group_v1) + tag_group_key = ";".join(tag_group_v1) if not files_per_tag.get(tag_group_key): files_per_tag[tag_group_key] = [] files_per_tag[tag_group_key].append(f) return files_per_tag - def _filter_files(self, file_definitions: List[dao.FileDefinition], tags: List[str] = None, - only_latest: bool = True) -> List[dao.FileDefinition]: - + def _filter_files( + self, file_definitions: List[dao.FileDefinition], tags: List[str] = None, only_latest: bool = True + ) -> List[dao.FileDefinition]: filtered_files = file_definitions if only_latest: @@ -598,9 +606,9 @@ def __filter_filedefs_by_latest(self, file_definitions: List[dao.FileDefinition] filtered_files.append(max_file) return filtered_files - def get_input_files_definitions(self, orphaned_manifests=False, - only_latest_files=True, - tags: Optional[List[str]] = None) -> List[dao.FileDefinition]: + def get_input_files_definitions( + self, orphaned_manifests=False, only_latest_files=True, tags: Optional[List[str]] = None + ) -> List[dao.FileDefinition]: """ Return dao.FileDefinition objects by scanning the `data/in/files` folder. @@ -625,56 +633,60 @@ def get_input_files_definitions(self, orphaned_manifests=False, """ - in_files = [f for f in glob.glob(self.files_in_path + "/**", recursive=False) if - not f.endswith('.manifest')] + in_files = [f for f in glob.glob(self.files_in_path + "/**", recursive=False) if not f.endswith(".manifest")] file_defs = list() for t in in_files: - manifest_path = t + '.manifest' + manifest_path = t + ".manifest" file_defs.append(dao.FileDefinition.build_from_manifest(manifest_path)) if orphaned_manifests: files_w_manifest = [t.full_path for t in file_defs] - manifest_files = [f for f in glob.glob(self.tables_in_path + "/**.manifest", recursive=False) - if Path(f).name not in files_w_manifest] + manifest_files = [ + f + for f in glob.glob(self.tables_in_path + "/**.manifest", recursive=False) + if Path(f).name not in files_w_manifest + ] for t in manifest_files: p = Path(t) if p.is_dir(): # skip folders that do not have matching manifest - logging.warning(f'Manifest {t} is folder,s skipping!') + logging.warning(f"Manifest {t} is folder,s skipping!") continue file_defs.append(dao.FileDefinition.build_from_manifest(t)) return self._filter_files(file_defs, tags, only_latest_files) - def _create_file_definition(self, - name: str, - storage_stage: str = 'out', - tags: List[str] = None, - is_public: bool = False, - is_permanent: bool = False, - is_encrypted: bool = False, - notify: bool = False) -> dao.FileDefinition: - """ - Helper method for dao.FileDefinition creation along with the "manifest". - It initializes path according to the storage_stage type. - - Args: - name (str): Name of the file, e.g. file.jpg. - tags (list): - List of tags that are assigned to this file - is_public: When true, the file URL will be permanent and publicly accessible. - is_permanent: Keeps a file forever. If false, the file will be deleted after default - period of time (e.g. - 15 days) - is_encrypted: If true, the file content will be encrypted in the storage. - notify: Notifies project administrators that a file was uploaded. - """ - if storage_stage == 'in': + def _create_file_definition( + self, + name: str, + storage_stage: str = "out", + tags: List[str] = None, + is_public: bool = False, + is_permanent: bool = False, + is_encrypted: bool = False, + notify: bool = False, + ) -> dao.FileDefinition: + """ + Helper method for dao.FileDefinition creation along with the "manifest". + It initializes path according to the storage_stage type. + + Args: + name (str): Name of the file, e.g. file.jpg. + tags (list): + List of tags that are assigned to this file + is_public: When true, the file URL will be permanent and publicly accessible. + is_permanent: Keeps a file forever. If false, the file will be deleted after default + period of time (e.g. + 15 days) + is_encrypted: If true, the file content will be encrypted in the storage. + notify: Notifies project administrators that a file was uploaded. + """ + if storage_stage == "in": full_path = os.path.join(self.files_in_path, name) - elif storage_stage == 'out': + elif storage_stage == "out": full_path = os.path.join(self.files_out_path, name) else: raise ValueError(f'Invalid storage_stage value "{storage_stage}". Supported values are: "in" or "out"!') @@ -685,37 +697,43 @@ def _create_file_definition(self, is_public=is_public, is_permanent=is_permanent, is_encrypted=is_encrypted, - notify=notify) - - def create_out_file_definition(self, name: str, - tags: List[str] = None, - is_public: bool = False, - is_permanent: bool = False, - is_encrypted: bool = False, - notify: bool = False) -> dao.FileDefinition: - """ - Helper method for input dao.FileDefinition creation along with the "manifest". - It initializes path in data/files/out/ folder. - - Args: - name (str): Name of the file, e.g. file.jpg. - tags (list): - List of tags that are assigned to this file - is_public: When true, the file URL will be permanent and publicly accessible. - is_permanent: Keeps a file forever. If false, the file will be deleted after default - period of time (e.g. - 15 days) - is_encrypted: If true, the file content will be encrypted in the storage. - notify: Notifies project administrators that a file was uploaded. - """ - - return self._create_file_definition(name=name, - storage_stage='out', - tags=tags, - is_public=is_public, - is_permanent=is_permanent, - is_encrypted=is_encrypted, - notify=notify) + notify=notify, + ) + + def create_out_file_definition( + self, + name: str, + tags: List[str] = None, + is_public: bool = False, + is_permanent: bool = False, + is_encrypted: bool = False, + notify: bool = False, + ) -> dao.FileDefinition: + """ + Helper method for input dao.FileDefinition creation along with the "manifest". + It initializes path in data/files/out/ folder. + + Args: + name (str): Name of the file, e.g. file.jpg. + tags (list): + List of tags that are assigned to this file + is_public: When true, the file URL will be permanent and publicly accessible. + is_permanent: Keeps a file forever. If false, the file will be deleted after default + period of time (e.g. + 15 days) + is_encrypted: If true, the file content will be encrypted in the storage. + notify: Notifies project administrators that a file was uploaded. + """ + + return self._create_file_definition( + name=name, + storage_stage="out", + tags=tags, + is_public=is_public, + is_permanent=is_permanent, + is_encrypted=is_encrypted, + notify=notify, + ) # TODO: refactor the validate config so it's more userfriendly """ @@ -724,88 +742,89 @@ def create_out_file_definition(self, name: str, def validate_configuration_parameters(self, mandatory_params=None): """ - Validates config parameters based on provided mandatory parameters. - All provided parameters must be present in config to pass. - ex1.: - par1 = 'par1' - par2 = 'par2' - mandatory_params = [par1, par2] - Validation will fail when one of the above parameters is not found - - Two levels of nesting: - Parameters can be grouped as arrays par3 = [groupPar1, groupPar2] - => at least one of the pars has to be present - ex2. - par1 = 'par1' - par2 = 'par2' - par3 = 'par3' - groupPar1 = 'groupPar1' - groupPar2 = 'groupPar2' - group1 = [groupPar1, groupPar2] - group3 = [par3, group1] - mandatory_params = [par1, par2, group1] - - Folowing logical expression is evaluated: - Par1 AND Par2 AND (groupPar1 OR groupPar2) - - ex3 - par1 = 'par1' - par2 = 'par2' - par3 = 'par3' - groupPar1 = 'groupPar1' - groupPar2 = 'groupPar2' - group1 = [groupPar1, groupPar2] - group3 = [par3, group1] - mandatory_params = [par1, par2, group3] - - Following logical expression is evaluated: - par1 AND par2 AND (par3 OR (groupPar1 AND groupPar2)) - """ + Validates config parameters based on provided mandatory parameters. + All provided parameters must be present in config to pass. + ex1.: + par1 = 'par1' + par2 = 'par2' + mandatory_params = [par1, par2] + Validation will fail when one of the above parameters is not found + + Two levels of nesting: + Parameters can be grouped as arrays par3 = [groupPar1, groupPar2] + => at least one of the pars has to be present + ex2. + par1 = 'par1' + par2 = 'par2' + par3 = 'par3' + groupPar1 = 'groupPar1' + groupPar2 = 'groupPar2' + group1 = [groupPar1, groupPar2] + group3 = [par3, group1] + mandatory_params = [par1, par2, group1] + + Folowing logical expression is evaluated: + Par1 AND Par2 AND (groupPar1 OR groupPar2) + + ex3 + par1 = 'par1' + par2 = 'par2' + par3 = 'par3' + groupPar1 = 'groupPar1' + groupPar2 = 'groupPar2' + group1 = [groupPar1, groupPar2] + group3 = [par3, group1] + mandatory_params = [par1, par2, group3] + + Following logical expression is evaluated: + par1 AND par2 AND (par3 OR (groupPar1 AND groupPar2)) + """ if not mandatory_params: mandatory_params = [] - return self._validate_parameters(self.configuration.parameters, mandatory_params, 'config parameters') + return self._validate_parameters(self.configuration.parameters, mandatory_params, "config parameters") def validate_image_parameters(self, mandatory_params): """ - Validates image parameters based on provided mandatory parameters. - All provided parameters must be present in config to pass. - ex1.: - par1 = 'par1' - par2 = 'par2' - mandatory_params = [par1, par2] - Validation will fail when one of the above parameters is not found - - Two levels of nesting: - Parameters can be grouped as arrays par3 = [groupPar1, groupPar2] - => at least one of the pars has to be present - ex2. - par1 = 'par1' - par2 = 'par2' - par3 = 'par3' - groupPar1 = 'groupPar1' - groupPar2 = 'groupPar2' - group1 = [groupPar1, groupPar2] - group3 = [par3, group1] - mandatory_params = [par1, par2, group1] - - Folowing logical expression is evaluated: - Par1 AND Par2 AND (groupPar1 OR groupPar2) - - ex3 - par1 = 'par1' - par2 = 'par2' - par3 = 'par3' - groupPar1 = 'groupPar1' - groupPar2 = 'groupPar2' - group1 = [groupPar1, groupPar2] - group3 = [par3, group1] - mandatory_params = [par1, par2, group3] - - Following logical expression is evaluated: - par1 AND par2 AND (par3 OR (groupPar1 AND groupPar2)) - """ - return self._validate_parameters(self.configuration.image_parameters, - mandatory_params, 'image/stack parameters') + Validates image parameters based on provided mandatory parameters. + All provided parameters must be present in config to pass. + ex1.: + par1 = 'par1' + par2 = 'par2' + mandatory_params = [par1, par2] + Validation will fail when one of the above parameters is not found + + Two levels of nesting: + Parameters can be grouped as arrays par3 = [groupPar1, groupPar2] + => at least one of the pars has to be present + ex2. + par1 = 'par1' + par2 = 'par2' + par3 = 'par3' + groupPar1 = 'groupPar1' + groupPar2 = 'groupPar2' + group1 = [groupPar1, groupPar2] + group3 = [par3, group1] + mandatory_params = [par1, par2, group1] + + Folowing logical expression is evaluated: + Par1 AND Par2 AND (groupPar1 OR groupPar2) + + ex3 + par1 = 'par1' + par2 = 'par2' + par3 = 'par3' + groupPar1 = 'groupPar1' + groupPar2 = 'groupPar2' + group1 = [groupPar1, groupPar2] + group3 = [par3, group1] + mandatory_params = [par1, par2, group3] + + Following logical expression is evaluated: + par1 AND par2 AND (par3 OR (groupPar1 AND groupPar2)) + """ + return self._validate_parameters( + self.configuration.image_parameters, mandatory_params, "image/stack parameters" + ) def _validate_parameters(self, parameters, mandatory_params, _type): """ @@ -857,8 +876,7 @@ def _validate_parameters(self, parameters, mandatory_params, _type): missing_fields.append(par) if missing_fields: - raise UserException( - 'Missing mandatory {} fields: [{}] '.format(_type, ', '.join(missing_fields))) + raise UserException("Missing mandatory {} fields: [{}] ".format(_type, ", ".join(missing_fields))) def _validate_par_group(self, par_group, parameters): missing_fields = [] @@ -896,19 +914,19 @@ def configuration(self): @property def tables_out_path(self): - return os.path.join(self.data_folder_path, 'out', 'tables') + return os.path.join(self.data_folder_path, "out", "tables") @property def tables_in_path(self): - return os.path.join(self.data_folder_path, 'in', 'tables') + return os.path.join(self.data_folder_path, "in", "tables") @property def files_out_path(self): - return os.path.join(self.data_folder_path, 'out', 'files') + return os.path.join(self.data_folder_path, "out", "files") @property def files_in_path(self): - return os.path.join(self.data_folder_path, 'in', 'files') + return os.path.join(self.data_folder_path, "in", "files") @property def _running_in_kbc(self): @@ -924,12 +942,13 @@ def is_legacy_queue(self) -> bool: features = self.environment_variables.project_features is_legacy_queue = True - if not self._running_in_kbc or 'queuev2' in features: + if not self._running_in_kbc or "queuev2" in features: is_legacy_queue = False return is_legacy_queue - def write_manifest(self, io_definition: Union[dao.FileDefinition, dao.TableDefinition], - legacy_manifest: Optional[bool] = None): + def write_manifest( + self, io_definition: Union[dao.FileDefinition, dao.TableDefinition], legacy_manifest: Optional[bool] = None + ): """ Write a table manifest from dao.IODefinition. Creates the appropriate manifest file in the proper location. @@ -965,24 +984,30 @@ def write_manifest(self, io_definition: Union[dao.FileDefinition, dao.TableDefin if not legacy_manifest: legacy_manifest = self._expects_legacy_manifest() - manifest = io_definition.get_manifest_dictionary(legacy_queue=self.is_legacy_queue, - legacy_manifest=legacy_manifest) + manifest = io_definition.get_manifest_dictionary( + legacy_queue=self.is_legacy_queue, legacy_manifest=legacy_manifest + ) # make dirs if not exist os.makedirs(os.path.dirname(io_definition.full_path), exist_ok=True) - with open(io_definition.full_path + '.manifest', 'w') as manifest_file: + with open(io_definition.full_path + ".manifest", "w") as manifest_file: json.dump(manifest, manifest_file) def _expects_legacy_manifest(self) -> bool: - legacy_manifest = \ - (self._running_in_kbc and self.environment_variables.data_type_support not in ('authoritative', 'hints')) + legacy_manifest = self._running_in_kbc and self.environment_variables.data_type_support not in ( + "authoritative", + "hints", + ) - om_override = self.configuration.config_data.get('storage', {}).get('output', {}).get('data_type_support') + om_override = self.configuration.config_data.get("storage", {}).get("output", {}).get("data_type_support") if om_override: - legacy_manifest = om_override not in ('authoritative', 'hints') + legacy_manifest = om_override not in ("authoritative", "hints") return legacy_manifest - def write_manifests(self, io_definitions: List[Union[dao.FileDefinition, dao.TableDefinition]], - legacy_manifest: Optional[bool] = None): + def write_manifests( + self, + io_definitions: List[Union[dao.FileDefinition, dao.TableDefinition]], + legacy_manifest: Optional[bool] = None, + ): """ Process all table definition objects and create appropriate manifest files. Args: @@ -997,7 +1022,7 @@ def write_manifests(self, io_definitions: List[Union[dao.FileDefinition, dao.Tab # ############# DEPRECATED METHODS, TODO: remove - @deprecated(version='1.3.0', reason="You should use write_manifest function") + @deprecated(version="1.3.0", reason="You should use write_manifest function") def write_filedef_manifest(self, file_definition: dao.FileDefinition): """ Write a table manifest from dao.FileDefinition. Creates the appropriate manifest file in the proper location. @@ -1024,7 +1049,7 @@ def write_filedef_manifest(self, file_definition: dao.FileDefinition): """ self.write_manifest(file_definition) - @deprecated(version='1.3.0', reason="You should use write_manifests function") + @deprecated(version="1.3.0", reason="You should use write_manifests function") def write_filedef_manifests(self, file_definitions: List[dao.FileDefinition]): """ Process all table definition objects and create appropriate manifest files. @@ -1036,7 +1061,7 @@ def write_filedef_manifests(self, file_definitions: List[dao.FileDefinition]): """ self.write_manifests(file_definitions) - @deprecated(version='1.3.0', reason="You should use write_manifest function") + @deprecated(version="1.3.0", reason="You should use write_manifest function") def write_tabledef_manifest(self, table_definition: dao.TableDefinition): """ Write a table manifest from dao.TableDefinition. Creates the appropriate manifest file in the proper location. @@ -1068,7 +1093,7 @@ def write_tabledef_manifest(self, table_definition: dao.TableDefinition): """ self.write_manifest(table_definition, legacy_manifest=True) - @deprecated(version='1.3.0', reason="You should use write_manifests function") + @deprecated(version="1.3.0", reason="You should use write_manifests function") def write_tabledef_manifests(self, table_definitions: List[dao.TableDefinition]): """ Process all table definition objects and create appropriate manifest files. @@ -1083,6 +1108,7 @@ def write_tabledef_manifests(self, table_definitions: List[dao.TableDefinition]) # ########## CONFIGURATION + class Configuration: """ Class representing configuration file generated and read @@ -1101,8 +1127,7 @@ def __init__(self, data_folder_path: str): self.data_dir = data_folder_path try: - with open(os.path.join(data_folder_path, 'config.json'), 'r') \ - as config_file: + with open(os.path.join(data_folder_path, "config.json"), "r") as config_file: self.config_data = json.load(config_file) except (OSError, IOError): raise ValueError( @@ -1111,10 +1136,10 @@ def __init__(self, data_folder_path: str): f"{self.data_dir}" ) - self.parameters = self.config_data.get('parameters', {}) - self.image_parameters = self.config_data.get('image_parameters', {}) - self.action = self.config_data.get('action', '') - self.workspace_credentials = self.config_data.get('authorization', {}).get('workspace', {}) + self.parameters = self.config_data.get("parameters", {}) + self.image_parameters = self.config_data.get("image_parameters", {}) + self.action = self.config_data.get("action", "") + self.workspace_credentials = self.config_data.get("authorization", {}).get("workspace", {}) # ################ PROPERTIES @property @@ -1125,16 +1150,16 @@ def oauth_credentials(self) -> dao.OauthCredentials: Returns: OauthCredentials """ - oauth_credentials = self.config_data.get('authorization', {}).get('oauth_api', {}).get('credentials', {}) + oauth_credentials = self.config_data.get("authorization", {}).get("oauth_api", {}).get("credentials", {}) credentials = None if oauth_credentials: credentials = dao.OauthCredentials( - id=oauth_credentials.get("id", ''), - created=oauth_credentials.get("created", ''), - data=json.loads(oauth_credentials.get("#data", '{}')), - oauthVersion=oauth_credentials.get("oauthVersion", ''), - appKey=oauth_credentials.get("appKey", ''), - appSecret=oauth_credentials.get("#appSecret", '') + id=oauth_credentials.get("id", ""), + created=oauth_credentials.get("created", ""), + data=json.loads(oauth_credentials.get("#data", "{}")), + oauthVersion=oauth_credentials.get("oauthVersion", ""), + appKey=oauth_credentials.get("appKey", ""), + appSecret=oauth_credentials.get("#appSecret", ""), ) return credentials @@ -1149,22 +1174,17 @@ def tables_input_mapping(self) -> List[dao.TableInputMapping]: """ - tables_defs = self.config_data.get('storage', {}).get('input', {}).get('tables', []) + tables_defs = self.config_data.get("storage", {}).get("input", {}).get("tables", []) tables = [] for table in tables_defs: # nested dataclass - table['column_types'] = [dao.build_dataclass_from_dict(dao.TableColumnTypes, coltype) for coltype in - table.get('column_types', [])] + table["column_types"] = [ + dao.build_dataclass_from_dict(dao.TableColumnTypes, coltype) + for coltype in table.get("column_types", []) + ] im = dao.build_dataclass_from_dict(dao.TableInputMapping, table) - im.full_path = os.path.normpath( - os.path.join( - self.data_dir, - 'in', - 'tables', - table['destination'] - ) - ) + im.full_path = os.path.normpath(os.path.join(self.data_dir, "in", "tables", table["destination"])) tables.append(im) return tables @@ -1178,7 +1198,7 @@ def tables_output_mapping(self) -> List[dao.TableOutputMapping]: Returns: List[TableOutputMapping] """ - tables_defs = self.config_data.get('storage', {}).get('output', {}).get('tables', []) + tables_defs = self.config_data.get("storage", {}).get("output", {}).get("tables", []) tables = [] for table in tables_defs: om = dao.build_dataclass_from_dict(dao.TableOutputMapping, table) @@ -1195,7 +1215,7 @@ def files_input_mapping(self) -> List[dao.FileInputMapping]: Returns: List[FileInputMapping] """ - defs = self.config_data.get('storage', {}).get('input', {}).get('files', []) + defs = self.config_data.get("storage", {}).get("input", {}).get("files", []) files = [] for file in defs: om = dao.build_dataclass_from_dict(dao.FileInputMapping, file) @@ -1212,7 +1232,7 @@ def files_output_mapping(self) -> List[dao.FileOutputMapping]: Returns: """ - defs = self.config_data.get('storage', {}).get('output', {}).get('files', []) + defs = self.config_data.get("storage", {}).get("output", {}).get("files", []) files = [] for file in defs: om = dao.build_dataclass_from_dict(dao.FileOutputMapping, file) diff --git a/src/keboola/component/sync_actions.py b/src/keboola/component/sync_actions.py index 4223c4b..c22f94f 100644 --- a/src/keboola/component/sync_actions.py +++ b/src/keboola/component/sync_actions.py @@ -8,7 +8,7 @@ from abc import ABC from dataclasses import dataclass from enum import Enum -from typing import Union, List, Optional +from typing import List, Optional, Union @dataclass @@ -23,15 +23,14 @@ def __post_init__(self): In other cases exception is thrown and printed via stderr. """ - self.status = 'success' + self.status = "success" def __str__(self): # the None values / attributes will be ignored. - dict_obj = dataclasses.asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if - v is not None}) + dict_obj = dataclasses.asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v is not None}) # hack to add default status if self.status: - dict_obj['status'] = self.status + dict_obj["status"] = self.status return json.dumps(dict_obj) @@ -56,6 +55,7 @@ class SelectElement(SyncActionResult): """ For select elements. Label is optional and value will be used """ + value: str label: Optional[str] = None @@ -77,13 +77,14 @@ def process_sync_action_result(result: Union[None, List[dict], dict, SyncActionR if isinstance(result, SyncActionResult): result_str = str(result) elif isinstance(result, list): - result_str = f'[{", ".join([json.dumps(r) if isinstance(r, dict) else str(r) for r in result])}]' + result_str = f"[{', '.join([json.dumps(r) if isinstance(r, dict) else str(r) for r in result])}]" elif result is None: - result_str = json.dumps({'status': 'success'}) + result_str = json.dumps({"status": "success"}) elif isinstance(result, dict): # for backward compatibility result_str = json.dumps(result) else: - raise ValueError("Result of sync action must be either None or an instance of SyncActionResult " - "or a List[SyncActionResult]") + raise ValueError( + "Result of sync action must be either None or an instance of SyncActionResult or a List[SyncActionResult]" + ) return result_str diff --git a/src/keboola/component/table_schema.py b/src/keboola/component/table_schema.py index 107d22f..31e0ec3 100644 --- a/src/keboola/component/table_schema.py +++ b/src/keboola/component/table_schema.py @@ -1,7 +1,7 @@ -from typing import List, Dict -from typing import Optional, Union -from keboola.component.dao import SupportedDataTypes from dataclasses import dataclass +from typing import Dict, List, Optional, Union + +from keboola.component.dao import SupportedDataTypes @dataclass @@ -9,6 +9,7 @@ class FieldSchema: """ Defines the name and type specifications of a single field in a table """ + name: str base_type: Optional[Union[SupportedDataTypes, str]] = None description: Optional[str] = None @@ -22,6 +23,7 @@ class TableSchema: """ TableSchema class is used to define the schema and metadata of a table. """ + name: str fields: List[FieldSchema] primary_keys: Optional[List[str]] = None @@ -79,10 +81,12 @@ def init_table_schema_from_dict(json_table_schema: Dict) -> TableSchema: json_table_schema["fields"] = [FieldSchema(**field) for field in json_table_schema["fields"]] except TypeError as type_error: raise KeyError( - f"When creating the table schema the definition of columns failed : {type_error}") from type_error + f"When creating the table schema the definition of columns failed : {type_error}" + ) from type_error try: ts = TableSchema(**json_table_schema) except TypeError as type_error: raise KeyError( - f"When creating the table schema the definition of the table failed : {type_error}") from type_error + f"When creating the table schema the definition of the table failed : {type_error}" + ) from type_error return ts diff --git a/tests/__init__.py b/tests/__init__.py index a4a24e0..9ef5481 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -2,4 +2,4 @@ import sys # just in case include in path -sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1].joinpath('src'))) +sys.path.insert(0, str(pathlib.Path(__file__).resolve().parents[1].joinpath("src"))) diff --git a/tests/test_base.py b/tests/test_base.py index d8e298e..1989657 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -10,7 +10,7 @@ class MockComponent(ComponentBase): def run(self): - return 'run_executed' + return "run_executed" class MockComponentFail(ComponentBase): @@ -19,10 +19,8 @@ def run(self): class TestCommonInterface(unittest.TestCase): - def setUp(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1") os.environ["KBC_DATADIR"] = path def test_default_arguments_pass(self): @@ -30,33 +28,31 @@ def test_default_arguments_pass(self): def test_missing_config_parameters_fail(self): with self.assertRaises(UserException): - MockComponent(required_parameters=['missing']) + MockComponent(required_parameters=["missing"]) def test_missing_image_parameters_fail(self): with self.assertRaises(UserException): - c = MockComponent(required_image_parameters=['missing']) + c = MockComponent(required_image_parameters=["missing"]) c.execute_action() def test_missing_action_fail(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_custom_action') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_custom_action") os.environ["KBC_DATADIR"] = path with self.assertRaises(AttributeError): MockComponent().execute_action() def test_run_action_passes(self): - self.assertEqual(MockComponent().execute_action(), 'run_executed') + self.assertEqual(MockComponent().execute_action(), "run_executed") def test_custom_action_passes(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_custom_action') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_custom_action") os.environ["KBC_DATADIR"] = path class CustomActionComponent(ComponentBase): def run(self): pass - @sync_action('custom_action') + @sync_action("custom_action") def test_action(self): return [SelectElement("test")] @@ -68,40 +64,37 @@ def test_run_action_fails_with_user_error(self): def test_system_action_name_fail(self): with self.assertRaises(ValueError): + class ComponentInvalidActionName(ComponentBase): def run(self): pass - @sync_action('run') + @sync_action("run") def test_action(self): pass - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_custom_action') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_custom_action") os.environ["KBC_DATADIR"] = path ComponentInvalidActionName().execute_action() - @patch('sys.stdout', new_callable=StringIO) + @patch("sys.stdout", new_callable=StringIO) def test_sync_action_prints_valid_message(self, stdout): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_custom_action') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_custom_action") os.environ["KBC_DATADIR"] = path class CustomActionComponent(ComponentBase): def run(self): pass - @sync_action('custom_action') + @sync_action("custom_action") def get_columns(self): - return [SelectElement("value_a", "label_a"), - SelectElement("value_b") - ] + return [SelectElement("value_a", "label_a"), SelectElement("value_b")] CustomActionComponent().execute_action() expected = '[{"value": "value_a", "label": "label_a"}, {"value": "value_b", "label": "value_b"}]' self.assertEqual(stdout.getvalue(), expected) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_dao.py b/tests/test_dao.py index 8673806..8a5f9fd 100644 --- a/tests/test_dao.py +++ b/tests/test_dao.py @@ -2,7 +2,6 @@ import os import tempfile import unittest - from datetime import datetime from keboola.component import dao @@ -19,58 +18,39 @@ class TestTableMetadata(unittest.TestCase): - def test_full_column_datatypes_for_manifest_is_valid(self): - column_metadata_full = {"col_1": [{ - "key": "KBC.datatype.basetype", - "value": "NUMERIC" - }, { - "key": "KBC.datatype.nullable", - "value": True - }, { - "key": "KBC.datatype.length", - "value": "39,8" - }, { - "key": "KBC.datatype.default", - "value": 0 - } - ], "col_2": [{ - "key": "KBC.datatype.basetype", - "value": "STRING" - }, { - "key": "KBC.datatype.nullable", - "value": False - }, { - "key": "KBC.datatype.length", - "value": "4000" + column_metadata_full = { + "col_1": [ + {"key": "KBC.datatype.basetype", "value": "NUMERIC"}, + {"key": "KBC.datatype.nullable", "value": True}, + {"key": "KBC.datatype.length", "value": "39,8"}, + {"key": "KBC.datatype.default", "value": 0}, + ], + "col_2": [ + {"key": "KBC.datatype.basetype", "value": "STRING"}, + {"key": "KBC.datatype.nullable", "value": False}, + {"key": "KBC.datatype.length", "value": "4000"}, + ], } - ]} tmetadata = TableMetadata() # col 1 - tmetadata.add_column_data_type("col_1", data_type='NUMERIC', nullable=True, length='39,8', default=0) + tmetadata.add_column_data_type("col_1", data_type="NUMERIC", nullable=True, length="39,8", default=0) # col 2 - tmetadata.add_column_data_type("col_2", data_type='STRING', nullable=False, length='4000') + tmetadata.add_column_data_type("col_2", data_type="STRING", nullable=False, length="4000") self.assertDictEqual(tmetadata.get_column_metadata_for_manifest(), column_metadata_full) def test_multi_column_datatypes_for_manifest_is_valid(self): - column_metadata_full = {"col_1": [{ - "key": "KBC.datatype.basetype", - "value": "NUMERIC" - }, - { - "key": "KBC.datatype.nullable", - "value": False - } - ], "col_2": [{ - "key": "KBC.datatype.basetype", - "value": "STRING" - }, - { - "key": "KBC.datatype.nullable", - "value": False + column_metadata_full = { + "col_1": [ + {"key": "KBC.datatype.basetype", "value": "NUMERIC"}, + {"key": "KBC.datatype.nullable", "value": False}, + ], + "col_2": [ + {"key": "KBC.datatype.basetype", "value": "STRING"}, + {"key": "KBC.datatype.nullable", "value": False}, + ], } - ]} tmetadata = TableMetadata() tmetadata.add_column_data_types({"col_1": "NUMERIC", "col_2": "STRING"}) @@ -78,23 +58,16 @@ def test_multi_column_datatypes_for_manifest_is_valid(self): self.assertDictEqual(tmetadata.get_column_metadata_for_manifest(), column_metadata_full) def test_datatype_accepts_enum_for_manifest_valid(self): - column_metadata_full = {"col_1": [{ - "key": "KBC.datatype.basetype", - "value": "NUMERIC" - }, - { - "key": "KBC.datatype.nullable", - "value": False - } - ], "col_2": [{ - "key": "KBC.datatype.basetype", - "value": "STRING" - }, - { - "key": "KBC.datatype.nullable", - "value": False + column_metadata_full = { + "col_1": [ + {"key": "KBC.datatype.basetype", "value": "NUMERIC"}, + {"key": "KBC.datatype.nullable", "value": False}, + ], + "col_2": [ + {"key": "KBC.datatype.basetype", "value": "STRING"}, + {"key": "KBC.datatype.nullable", "value": False}, + ], } - ]} tmetadata = TableMetadata() tmetadata.add_column_data_types({"col_1": SupportedDataTypes.NUMERIC, "col_2": SupportedDataTypes.STRING}) @@ -104,19 +77,14 @@ def test_datatype_accepts_enum_for_manifest_valid(self): def test_invalid_datatype_fails(self): tmetadata = TableMetadata() with self.assertRaises(ValueError): - tmetadata.add_column_data_type('col', 'invalid type') + tmetadata.add_column_data_type("col", "invalid type") def test_table_description_metadata_for_legacy_manifest_is_valid(self): tmetadata = TableMetadata() - table_metadata = [{ - "key": "KBC.description", - "value": "Description of table" - }, - { - "key": "custom_key", - "value": "custom_value" - } + table_metadata = [ + {"key": "KBC.description", "value": "Description of table"}, + {"key": "custom_key", "value": "custom_value"}, ] tmetadata.add_table_description("Description of table") tmetadata.add_table_metadata("custom_key", "custom_value") @@ -125,9 +93,7 @@ def test_table_description_metadata_for_legacy_manifest_is_valid(self): def test_table_description_metadata_for_manifest_is_valid(self): tmetadata = TableMetadata() - table_metadata = {"KBC.description": "Description of table", - "custom_key": "custom_value" - } + table_metadata = {"KBC.description": "Description of table", "custom_key": "custom_value"} tmetadata.add_table_description("Description of table") tmetadata.add_table_metadata("custom_key", "custom_value") @@ -135,15 +101,15 @@ def test_table_description_metadata_for_manifest_is_valid(self): def test_build_from_manifest_valid(self): raw_manifest = { - 'destination': 'some-destination', - 'columns': ['foo', 'bar'], - 'primary_key': ['foo'], - 'incremental': True, - 'metadata': [{'key': 'bar', 'value': 'kochba'}], - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq' + "destination": "some-destination", + "columns": ["foo", "bar"], + "primary_key": ["foo"], + "incremental": True, + "metadata": [{"key": "bar", "value": "kochba"}], + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", } table_metadata = TableMetadata(raw_manifest) @@ -157,151 +123,191 @@ def test_build_from_manifest_valid(self): self.assertEqual(table_metadata.table_metadata, expected_tmetadata.table_metadata) def test_build_manifest_legacy_none_metadata_skipped(self): - table_def = TableDefinition("testDef", "somepath", is_sliced=False, - destination='some-destination', - incremental=True, - table_metadata=TableMetadata() - ) - table_def.table_metadata.add_column_metadata('foo', 'KBC.description', None) + table_def = TableDefinition( + "testDef", + "somepath", + is_sliced=False, + destination="some-destination", + incremental=True, + table_metadata=TableMetadata(), + ) + table_def.table_metadata.add_column_metadata("foo", "KBC.description", None) self.assertDictEqual({}, table_def.table_metadata.column_metadata) class TestTableDefinition(unittest.TestCase): - def test_legacy_order_out(self): - table_def = TableDefinition("testDef", "somepath", False, 'some-destination', ['foo'], ['foo', 'bar'], True, - TableMetadata(), '"', ',', - {'column': 'lilly', 'values': ['a', 'b'], 'operator': 'eq'}, - 'out', False - ) + table_def = TableDefinition( + "testDef", + "somepath", + False, + "some-destination", + ["foo"], + ["foo", "bar"], + True, + TableMetadata(), + '"', + ",", + {"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + "out", + False, + ) self.assertEqual( - {'destination': 'some-destination', 'incremental': True, 'primary_key': ['foo'], 'write_always': False, - 'delimiter': ',', 'enclosure': '"', 'delete_where_column': 'lilly', 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq', 'columns': ['foo', 'bar']}, - table_def.get_manifest_dictionary(legacy_manifest=True)) + { + "destination": "some-destination", + "incremental": True, + "primary_key": ["foo"], + "write_always": False, + "delimiter": ",", + "enclosure": '"', + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "columns": ["foo", "bar"], + }, + table_def.get_manifest_dictionary(legacy_manifest=True), + ) def test_legacy_order_in(self): - table_def = TableDefinition("data", - metadata=TableMetadata({ - "id": "228956", - "key": "KBC.createdBy.component.id", - "value": "keboola.python-transformation", - "provider": "system", - "timestamp": "2017-05-26 00:39:07" - }), - stage='in', - is_sliced=False, - schema=["id", "name", "text"], - created="2015-01-25T01:35:14+0100", - last_change_date="2015-01-25T01:35:14+0100", - last_import_date="2015-01-25T01:35:14+0100") + table_def = TableDefinition( + "data", + metadata=TableMetadata( + { + "id": "228956", + "key": "KBC.createdBy.component.id", + "value": "keboola.python-transformation", + "provider": "system", + "timestamp": "2017-05-26 00:39:07", + } + ), + stage="in", + is_sliced=False, + schema=["id", "name", "text"], + created="2015-01-25T01:35:14+0100", + last_change_date="2015-01-25T01:35:14+0100", + last_import_date="2015-01-25T01:35:14+0100", + ) self.assertEqual( - {'columns': ['id', 'name', 'text'], 'created': '2015-01-25T01:35:14+0100', - 'last_change_date': '2015-01-25T01:35:14+0100', 'last_import_date': '2015-01-25T01:35:14+0100', - 'name': 'data'}, - table_def.get_manifest_dictionary(legacy_manifest=True)) + { + "columns": ["id", "name", "text"], + "created": "2015-01-25T01:35:14+0100", + "last_change_date": "2015-01-25T01:35:14+0100", + "last_import_date": "2015-01-25T01:35:14+0100", + "name": "data", + }, + table_def.get_manifest_dictionary(legacy_manifest=True), + ) def test_out_old_to_new_has_headers_sliced(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sliced.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "sliced.csv.manifest")) manifest = table_def.get_manifest_dictionary() - self.assertEqual(manifest['has_header'], False) + self.assertEqual(manifest["has_header"], False) def test_out_old_to_new_has_headers_columns(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sample_output.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "sample_output.csv.manifest")) manifest = table_def.get_manifest_dictionary() - self.assertEqual(manifest['has_header'], False) + self.assertEqual(manifest["has_header"], False) def test_out_pkey_and_no_columns_incompatible(self): with self.assertRaises(UserException): - TableDefinition("testDef", "somepath", primary_key=['foo']) + TableDefinition("testDef", "somepath", primary_key=["foo"]) def test_out_legacy_to_new_compatible(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1", "in", "tables" + ) - res = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sample_output_header.csv.manifest')) + res = TableDefinition.build_from_manifest(os.path.join(sample_path, "sample_output_header.csv.manifest")) res_manifest = res.get_manifest_dictionary() self.assertDictEqual( - {'delimiter': ',', 'enclosure': '"', 'incremental': True, 'primary_key': ['x'], 'write_always': False}, - res_manifest) + {"delimiter": ",", "enclosure": '"', "incremental": True, "primary_key": ["x"], "write_always": False}, + res_manifest, + ) def test_table_manifest_minimal(self): - table_def = TableDefinition("testDef", "somepath", is_sliced=False, - columns=['foo', 'bar'], - primary_key=['foo', 'bar'] - ) + table_def = TableDefinition( + "testDef", "somepath", is_sliced=False, columns=["foo", "bar"], primary_key=["foo", "bar"] + ) self.assertEqual( - {'columns': ['foo', 'bar'], 'delimiter': ',', 'enclosure': '"', 'primary_key': ['foo', 'bar'], - 'write_always': False}, - table_def.get_manifest_dictionary(legacy_manifest=True) + { + "columns": ["foo", "bar"], + "delimiter": ",", + "enclosure": '"', + "primary_key": ["foo", "bar"], + "write_always": False, + }, + table_def.get_manifest_dictionary(legacy_manifest=True), ) def test_table_manifest_missing_key(self): with self.assertRaises(UserException) as e: - TableDefinition("testDef", "somepath", is_sliced=False, - primary_key=['foo', 'bar']) + TableDefinition("testDef", "somepath", is_sliced=False, primary_key=["foo", "bar"]) - self.assertEqual(str(e.exception), - "Primary key column foo not found in schema. Please specify all columns / schema") + self.assertEqual( + str(e.exception), "Primary key column foo not found in schema. Please specify all columns / schema" + ) def test_table_manifest_full(self): - table_def = TableDefinition("testDef", "somepath", is_sliced=False, - columns=['foo', 'bar'], - destination='some-destination', - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) + table_def = TableDefinition( + "testDef", + "somepath", + is_sliced=False, + columns=["foo", "bar"], + destination="some-destination", + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) # add metadata - table_def.table_metadata.add_column_metadata('bar', 'foo', 'gogo') - table_def.table_metadata.add_table_metadata('bar', 'kochba') + table_def.table_metadata.add_column_metadata("bar", "foo", "gogo") + table_def.table_metadata.add_table_metadata("bar", "kochba") self.assertDictEqual( { - 'destination': 'some-destination', - 'columns': ['foo', 'bar'], - 'primary_key': ['foo'], - 'incremental': True, - 'delimiter': ',', - 'enclosure': '"', - 'metadata': [{'key': 'bar', 'value': 'kochba'}], - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq', - 'write_always': False + "destination": "some-destination", + "columns": ["foo", "bar"], + "primary_key": ["foo"], + "incremental": True, + "delimiter": ",", + "enclosure": '"', + "metadata": [{"key": "bar", "value": "kochba"}], + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "write_always": False, }, - table_def.get_manifest_dictionary('out', legacy_manifest=True) + table_def.get_manifest_dictionary("out", legacy_manifest=True), ) def test_build_from_table_manifest_metadata_equals(self): raw_manifest = { - 'destination': 'some-destination', - 'columns': ['foo', 'bar'], - 'primary_key': ['foo'], - 'incremental': True, - 'metadata': [{'key': 'bar', 'value': 'kochba'}], - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq' + "destination": "some-destination", + "columns": ["foo", "bar"], + "primary_key": ["foo"], + "incremental": True, + "metadata": [{"key": "bar", "value": "kochba"}], + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", } - manifest_file = os.path.join(tempfile.mkdtemp('kbc-test') + 'table.manifest') - with open(manifest_file, 'w') as out_f: + manifest_file = os.path.join(tempfile.mkdtemp("kbc-test") + "table.manifest") + with open(manifest_file, "w") as out_f: json.dump(raw_manifest, out_f) table_def = TableDefinition.build_from_manifest(manifest_file) @@ -317,31 +323,30 @@ def test_build_from_table_manifest_metadata_equals(self): os.remove(manifest_file) def test_build_from_manifest_matching_table_valid_attributes(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sample.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "sample.csv.manifest")) - expected_table_def = TableDefinition(name='sample.csv', - full_path=os.path.join(sample_path, 'sample.csv'), - is_sliced=False - ) + expected_table_def = TableDefinition( + name="sample.csv", full_path=os.path.join(sample_path, "sample.csv"), is_sliced=False + ) self.assertEqual(expected_table_def.full_path, table_def.full_path) self.assertEqual(expected_table_def.name, table_def.name) self.assertEqual(expected_table_def.is_sliced, table_def.is_sliced) def test_build_from_manifest_orphaned_table_valid_attributes(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'orphaned.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "orphaned.csv.manifest")) - expected_table_def = TableDefinition(name='orphaned.csv', - full_path=os.path.join(sample_path, 'orphaned.csv'), - is_sliced=False, - incremental=False - ) + expected_table_def = TableDefinition( + name="orphaned.csv", full_path=os.path.join(sample_path, "orphaned.csv"), is_sliced=False, incremental=False + ) self.assertEqual(expected_table_def.full_path, table_def.full_path) self.assertEqual(expected_table_def.name, table_def.name) @@ -349,74 +354,100 @@ def test_build_from_manifest_orphaned_table_valid_attributes(self): self.assertEqual(expected_table_def.get_manifest_dictionary(), table_def.get_manifest_dictionary()) def test_build_from_manifest_sliced_table_valid_attributes(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sliced.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "sliced.csv.manifest")) - expected_table_def = TableDefinition(name='sliced.csv', - full_path=os.path.join(sample_path, 'sliced.csv'), - is_sliced=True - ) + expected_table_def = TableDefinition( + name="sliced.csv", full_path=os.path.join(sample_path, "sliced.csv"), is_sliced=True + ) self.assertEqual(expected_table_def.full_path, table_def.full_path) self.assertEqual(expected_table_def.name, table_def.name) self.assertEqual(expected_table_def.is_sliced, table_def.is_sliced) def test_build_from_manifest_orphaned_manifest_valid_attributes(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'orphaned_manifest.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "orphaned_manifest.csv.manifest")) - expected_table_def = TableDefinition(name='orphaned_manifest.csv', - full_path=None, - is_sliced=False - ) + expected_table_def = TableDefinition(name="orphaned_manifest.csv", full_path=None, is_sliced=False) self.assertEqual(expected_table_def.full_path, table_def.full_path) self.assertEqual(expected_table_def.name, table_def.name) self.assertEqual(expected_table_def.is_sliced, table_def.is_sliced) def test_build_manifest_legacy_none_metadata_skipped(self): - table_def = TableDefinition("testDef", "somepath", is_sliced=False, - destination='some-destination', - incremental=True, - table_metadata=TableMetadata() - ) - table_def.table_metadata.add_column_metadata('foo', 'KBC.description', None) - expected = {'delimiter': ',', 'destination': 'some-destination', 'enclosure': '"', 'incremental': True, - 'write_always': False} - manifest_dict = table_def.get_manifest_dictionary('out', legacy_manifest=True) + table_def = TableDefinition( + "testDef", + "somepath", + is_sliced=False, + destination="some-destination", + incremental=True, + table_metadata=TableMetadata(), + ) + table_def.table_metadata.add_column_metadata("foo", "KBC.description", None) + expected = { + "delimiter": ",", + "destination": "some-destination", + "enclosure": '"', + "incremental": True, + "write_always": False, + } + manifest_dict = table_def.get_manifest_dictionary("out", legacy_manifest=True) self.assertDictEqual(expected, manifest_dict) def test_build_manifest_new_to_legacy_none_metadata_skipped(self): - table_def = TableDefinition("testDef", "somepath", is_sliced=False, - destination='some-destination', - incremental=True, - schema={ - 'foo': ColumnDefinition(metadata={'KBC.description': None, 'some': 'value'})}, - ) - table_def.table_metadata.add_column_metadata('foo', 'KBC.description', '') - expected = {'column_metadata': {}, 'columns': ['foo'], 'delimiter': ',', 'destination': 'some-destination', - 'enclosure': '"', - 'incremental': True, - 'write_always': False} - manifest_dict = table_def.get_manifest_dictionary('out', legacy_manifest=True) + table_def = TableDefinition( + "testDef", + "somepath", + is_sliced=False, + destination="some-destination", + incremental=True, + schema={"foo": ColumnDefinition(metadata={"KBC.description": None, "some": "value"})}, + ) + table_def.table_metadata.add_column_metadata("foo", "KBC.description", "") + expected = { + "column_metadata": {}, + "columns": ["foo"], + "delimiter": ",", + "destination": "some-destination", + "enclosure": '"', + "incremental": True, + "write_always": False, + } + manifest_dict = table_def.get_manifest_dictionary("out", legacy_manifest=True) self.assertDictEqual(expected, manifest_dict) def test_incremental_defaults_to_false(self): source_m = { - 'columns': ['x', 'Sales', 'CompPrice', 'Income', 'Advertising', 'Population', 'Price', 'ShelveLoc', 'Age', - 'Education', 'Urban', 'US', 'High'], - 'delimiter': ',', - 'enclosure': '"', - 'write_always': False + "columns": [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + "delimiter": ",", + "enclosure": '"', + "write_always": False, } with tempfile.NamedTemporaryFile(delete=False) as temp_file: temp_file_path = temp_file.name - with open(temp_file_path, 'w') as f: + with open(temp_file_path, "w") as f: json.dump(source_m, f) td = TableDefinition.build_from_manifest(temp_file_path) @@ -424,7 +455,7 @@ def test_incremental_defaults_to_false(self): def test_table_manifest_error_destination(self): with self.assertRaises(TypeError): - TableDefinition("testDef", "somepath", is_sliced=False, destination=['foo', 'bar']) + TableDefinition("testDef", "somepath", is_sliced=False, destination=["foo", "bar"]) def test_table_manifest_error_primary_key(self): with self.assertRaises(TypeError): @@ -440,392 +471,434 @@ def test_table_manifest_error_column_delete_1(self): def test_table_manifest_error_column_delete_2(self): with self.assertRaises(TypeError): - TableDefinition("testDef", "somepath", is_sliced=False, delete_where={"column": "a", - "values": "b"}) + TableDefinition("testDef", "somepath", is_sliced=False, delete_where={"column": "a", "values": "b"}) def test_table_manifest_error_column_delete_3(self): with self.assertRaises(TypeError): - TableDefinition("testDef", "somepath", is_sliced=False, delete_where={"column": "a", - "values": "b", - "operator": "c"}) + TableDefinition( + "testDef", "somepath", is_sliced=False, delete_where={"column": "a", "values": "b", "operator": "c"} + ) def test_unsupported_legacy_queue_properties_log(self): - with self.assertLogs(level='WARNING') as log: - td = TableDefinition("testDef", "somepath", - write_always=True, stage='out') + with self.assertLogs(level="WARNING") as log: + td = TableDefinition("testDef", "somepath", write_always=True, stage="out") td.get_manifest_dictionary(legacy_queue=True) self.assertEqual(len(log.output), 1) self.assertEqual(len(log.records), 1) - self.assertIn("WARNING:root:Running on legacy queue " - "some manifest properties will be ignored: ['write_always']", - log.output[0]) + self.assertIn( + "WARNING:root:Running on legacy queue some manifest properties will be ignored: ['write_always']", + log.output[0], + ) def test_unsupported_legacy_queue_properties_excluded(self): - td = TableDefinition("testDef", "somepath", - write_always=True, stage='out') + td = TableDefinition("testDef", "somepath", write_always=True, stage="out") manifest = td.get_manifest_dictionary(legacy_queue=True) - self.assertTrue('write_always' not in manifest) + self.assertTrue("write_always" not in manifest) manifest = td.get_manifest_dictionary(legacy_queue=False) - self.assertTrue('write_always' in manifest) + self.assertTrue("write_always" in manifest) def test_new_manifest_full(self): - table_def = TableDefinition("testDef", "somepath", is_sliced=False, - schema=['foo', 'bar'], - destination='some-destination', - has_header=True, - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'}, - description='some description' - ) + table_def = TableDefinition( + "testDef", + "somepath", + is_sliced=False, + schema=["foo", "bar"], + destination="some-destination", + has_header=True, + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + description="some description", + ) # add metadata - table_def.table_metadata.add_column_metadata('bar', 'foo', 'gogo') - table_def.table_metadata.add_table_metadata('bar', 'kochba') + table_def.table_metadata.add_column_metadata("bar", "foo", "gogo") + table_def.table_metadata.add_table_metadata("bar", "kochba") self.maxDiff = None - os.environ['KBC_DATA_TYPE_SUPPORT'] = "authoritative" - - self.assertDictEqual({ - 'destination': 'some-destination', - 'incremental': True, - 'write_always': False, - 'delimiter': ',', - 'enclosure': '"', - 'manifest_type': 'out', - 'has_header': True, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq', - 'table_metadata': {'KBC.description': "some description", 'bar': 'kochba'}, - 'schema': [ - {'name': 'foo', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True, 'primary_key': True}, - {'name': 'bar', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}] - }, - table_def.get_manifest_dictionary('out') - ) - - del os.environ['KBC_DATA_TYPE_SUPPORT'] + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" + + self.assertDictEqual( + { + "destination": "some-destination", + "incremental": True, + "write_always": False, + "delimiter": ",", + "enclosure": '"', + "manifest_type": "out", + "has_header": True, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "table_metadata": {"KBC.description": "some description", "bar": "kochba"}, + "schema": [ + {"name": "foo", "data_type": {"base": {"type": "STRING"}}, "nullable": True, "primary_key": True}, + {"name": "bar", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + ], + }, + table_def.get_manifest_dictionary("out"), + ) + + del os.environ["KBC_DATA_TYPE_SUPPORT"] def test_new_manifest_native_types(self): - table_def = TableDefinition("testDef", "somepath", is_sliced=False, - stage='out', - schema=['foo', 'bar'], - destination='some-destination', - has_header=True, - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) + table_def = TableDefinition( + "testDef", + "somepath", + is_sliced=False, + stage="out", + schema=["foo", "bar"], + destination="some-destination", + has_header=True, + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) # update column - table_def.update_column('foo', - ColumnDefinition(data_types=BaseType.integer(length='20'))) + table_def.update_column("foo", ColumnDefinition(data_types=BaseType.integer(length="20"))) # add new columns - table_def.add_column('note', ColumnDefinition(nullable=False)) - table_def.add_column('test1') - table_def.add_columns(['test2', 'test3', 'test4']) + table_def.add_column("note", ColumnDefinition(nullable=False)) + table_def.add_column("test1") + table_def.add_columns(["test2", "test3", "test4"]) # add new typed column - table_def.add_column('id', ColumnDefinition(primary_key=True, - data_types=BaseType.numeric(length='200'))) + table_def.add_column("id", ColumnDefinition(primary_key=True, data_types=BaseType.numeric(length="200"))) table_def.add_columns( - {'new2': ColumnDefinition(data_types=BaseType.float(length='200')), - 'new3': ColumnDefinition(data_types=BaseType.date())}) + { + "new2": ColumnDefinition(data_types=BaseType.float(length="200")), + "new3": ColumnDefinition(data_types=BaseType.date()), + } + ) # delete columns - table_def.delete_column('bar') - table_def.delete_columns(['test2', 'test3']) + table_def.delete_column("bar") + table_def.delete_columns(["test2", "test3"]) self.maxDiff = None - os.environ['KBC_DATA_TYPE_SUPPORT'] = "authoritative" - - self.assertDictEqual({ - 'destination': 'some-destination', - 'incremental': True, - 'write_always': False, - 'delimiter': ',', - 'enclosure': '"', - 'manifest_type': 'out', - 'has_header': True, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], 'delete_where_operator': 'eq', - 'schema': [{'name': 'foo', 'data_type': {'base': {'type': 'INTEGER', 'length': '20'}}, 'nullable': True}, - {'name': 'note', 'data_type': {'base': {'type': 'STRING'}}}, - {'name': 'test1', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'test4', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'id', 'data_type': {'base': {'type': 'NUMERIC', 'length': '200'}}, 'nullable': True, - 'primary_key': True}, - {'name': 'new2', 'data_type': {'base': {'type': 'FLOAT', 'length': '200'}}, 'nullable': True}, - {'name': 'new3', 'data_type': {'base': {'type': 'DATE'}}, 'nullable': True}]}, - table_def.get_manifest_dictionary('out') - ) - - del os.environ['KBC_DATA_TYPE_SUPPORT'] + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" + + self.assertDictEqual( + { + "destination": "some-destination", + "incremental": True, + "write_always": False, + "delimiter": ",", + "enclosure": '"', + "manifest_type": "out", + "has_header": True, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "schema": [ + {"name": "foo", "data_type": {"base": {"type": "INTEGER", "length": "20"}}, "nullable": True}, + {"name": "note", "data_type": {"base": {"type": "STRING"}}}, + {"name": "test1", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "test4", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + { + "name": "id", + "data_type": {"base": {"type": "NUMERIC", "length": "200"}}, + "nullable": True, + "primary_key": True, + }, + {"name": "new2", "data_type": {"base": {"type": "FLOAT", "length": "200"}}, "nullable": True}, + {"name": "new3", "data_type": {"base": {"type": "DATE"}}, "nullable": True}, + ], + }, + table_def.get_manifest_dictionary("out"), + ) + + del os.environ["KBC_DATA_TYPE_SUPPORT"] def test_new_manifest_base_type_columns(self): - table_def = TableDefinition("testDef", "somepath", is_sliced=False, - destination='some-destination', - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) + table_def = TableDefinition( + "testDef", + "somepath", + is_sliced=False, + destination="some-destination", + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) # add new columns - table_def.add_column('string1', ColumnDefinition(BaseType.string())) - table_def.schema["string1"].add_datatype('redshift', DataType(dtype='STRING', length='255')) + table_def.add_column("string1", ColumnDefinition(BaseType.string())) + table_def.schema["string1"].add_datatype("redshift", DataType(dtype="STRING", length="255")) - table_def.add_column('numeric', ColumnDefinition(BaseType.numeric())) + table_def.add_column("numeric", ColumnDefinition(BaseType.numeric())) - table_def.add_column('id', ColumnDefinition(primary_key=True, data_types=BaseType.integer(length='200'))) + table_def.add_column("id", ColumnDefinition(primary_key=True, data_types=BaseType.integer(length="200"))) - os.environ['KBC_DATA_TYPE_SUPPORT'] = "hints" + os.environ["KBC_DATA_TYPE_SUPPORT"] = "hints" self.assertDictEqual( - {'destination': 'some-destination', 'incremental': True, 'write_always': False, 'delimiter': ',', - 'enclosure': '"', 'manifest_type': 'out', 'has_header': True, 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], 'delete_where_operator': 'eq', 'schema': [{'name': 'string1', - 'data_type': { - 'base': { - 'type': 'STRING'}, - 'redshift': { - 'type': 'STRING', - 'length': '255'} - }, 'nullable': True}, - {'name': 'numeric', - 'data_type': { - 'base': { - 'type': 'NUMERIC'} - }, - 'nullable': True}, - {'name': 'id', - 'data_type': { - 'base': { - 'type': 'INTEGER', - 'length': '200'}}, - 'nullable': True, - 'primary_key': True}]}, - table_def.get_manifest_dictionary('out') - ) - - del os.environ['KBC_DATA_TYPE_SUPPORT'] + { + "destination": "some-destination", + "incremental": True, + "write_always": False, + "delimiter": ",", + "enclosure": '"', + "manifest_type": "out", + "has_header": True, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "schema": [ + { + "name": "string1", + "data_type": {"base": {"type": "STRING"}, "redshift": {"type": "STRING", "length": "255"}}, + "nullable": True, + }, + {"name": "numeric", "data_type": {"base": {"type": "NUMERIC"}}, "nullable": True}, + { + "name": "id", + "data_type": {"base": {"type": "INTEGER", "length": "200"}}, + "nullable": True, + "primary_key": True, + }, + ], + }, + table_def.get_manifest_dictionary("out"), + ) + + del os.environ["KBC_DATA_TYPE_SUPPORT"] def test_new_manifest_column_methods(self): - table_def = TableDefinition("testDef", "somepath", - stage='out', - schema=['foo', 'bar', 'to_delete'], - destination='some-destination', - has_header=True, - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) + table_def = TableDefinition( + "testDef", + "somepath", + stage="out", + schema=["foo", "bar", "to_delete"], + destination="some-destination", + has_header=True, + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) - table_def.add_column('note', ColumnDefinition(nullable=False)) + table_def.add_column("note", ColumnDefinition(nullable=False)) - table_def.update_column('foo', ColumnDefinition(data_types=BaseType.integer())) - table_def.schema['bar'].add_datatype('redshift', DataType(dtype='STRING', length='255')) + table_def.update_column("foo", ColumnDefinition(data_types=BaseType.integer())) + table_def.schema["bar"].add_datatype("redshift", DataType(dtype="STRING", length="255")) - table_def.delete_column('to_delete') + table_def.delete_column("to_delete") self.assertDictEqual( - {'destination': 'some-destination', 'incremental': True, 'write_always': False, 'delimiter': ',', - 'enclosure': '"', 'manifest_type': 'out', 'has_header': True, 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], 'delete_where_operator': 'eq', - 'schema': [{'name': 'foo', 'data_type': {'base': {'type': 'INTEGER'}}, 'nullable': True}, - {'name': 'bar', - 'data_type': { - 'base': { - 'type': 'STRING'}, - 'redshift': { - 'type': 'STRING', - 'length': '255'}}, - 'nullable': True}, - {'name': 'note', 'data_type': {'base': {'type': 'STRING'}}}]}, - table_def.get_manifest_dictionary()) + { + "destination": "some-destination", + "incremental": True, + "write_always": False, + "delimiter": ",", + "enclosure": '"', + "manifest_type": "out", + "has_header": True, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "schema": [ + {"name": "foo", "data_type": {"base": {"type": "INTEGER"}}, "nullable": True}, + { + "name": "bar", + "data_type": {"base": {"type": "STRING"}, "redshift": {"type": "STRING", "length": "255"}}, + "nullable": True, + }, + {"name": "note", "data_type": {"base": {"type": "STRING"}}}, + ], + }, + table_def.get_manifest_dictionary(), + ) def test_build_from_manifest_full_input(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_full_input_manifest', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_full_input_manifest", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sample.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "sample.csv.manifest")) - self.assertEqual(table_def.id, 'in.c-main.test') - self.assertEqual(table_def.uri, 'https://connection.keboola.com//v2/storage/tables/in.c-main.test') - self.assertEqual(table_def.name, 'sample.csv') + self.assertEqual(table_def.id, "in.c-main.test") + self.assertEqual(table_def.uri, "https://connection.keboola.com//v2/storage/tables/in.c-main.test") + self.assertEqual(table_def.name, "sample.csv") self.assertEqual(table_def.created, datetime.strptime("2015-11-02T09:11:37+0100", "%Y-%m-%dT%H:%M:%S%z")) self.assertEqual(table_def.last_change_date, "2015-11-02T09:11:37+0100") self.assertEqual(table_def.last_import_date, "2015-11-02T09:11:37+0100") self.assertEqual(table_def.rows_count, 400) self.assertEqual(table_def.data_size_bytes, 81920) self.assertEqual(table_def.is_alias, False) - self.assertEqual(table_def._indexed_columns, ['x']) - self.assertEqual(table_def.primary_key, ['x']) - self.assertEqual(table_def.table_metadata.column_metadata, {'x': {'foo': 'gogo'}}) - self.assertEqual(table_def.column_names, ['x', 'Sales', 'CompPrice', 'Income', 'Advertising', - 'Population', 'Price', 'ShelveLoc', 'Age', 'Education', - 'Urban', 'US', 'High']) + self.assertEqual(table_def._indexed_columns, ["x"]) + self.assertEqual(table_def.primary_key, ["x"]) + self.assertEqual(table_def.table_metadata.column_metadata, {"x": {"foo": "gogo"}}) + self.assertEqual( + table_def.column_names, + [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + ) def test_build_from_manifest_full_output(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_full_output_manifest', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_full_output_manifest", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sample_output.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "sample_output.csv.manifest")) - self.assertEqual(table_def.name, 'sample_output.csv') - self.assertEqual(table_def.destination, 'out.c-adform_masterdata-processor-test.sample_output') - self.assertEqual(table_def.column_names, ['x', 'Sales', 'CompPrice', 'Income', 'Advertising', - 'Population', 'Price', 'ShelveLoc', 'Age', 'Education', - 'Urban', 'US', 'High']) + self.assertEqual(table_def.name, "sample_output.csv") + self.assertEqual(table_def.destination, "out.c-adform_masterdata-processor-test.sample_output") + self.assertEqual( + table_def.column_names, + [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + ) self.assertEqual(table_def.incremental, True) - self.assertEqual(table_def.primary_key, ['x']) + self.assertEqual(table_def.primary_key, ["x"]) self.assertEqual(table_def.write_always, True) - self.assertEqual(table_def.delimiter, '\t') - self.assertEqual(table_def.enclosure, '\'') - self.assertEqual(table_def.table_metadata.column_metadata, {'x': {'foo': 'gogo'}}) - self.assertEqual(table_def.delete_where_column, 'Advertising') - self.assertEqual(table_def.delete_where_values, ['Video', 'Search']) - self.assertEqual(table_def.delete_where_operator, 'eq') + self.assertEqual(table_def.delimiter, "\t") + self.assertEqual(table_def.enclosure, "'") + self.assertEqual(table_def.table_metadata.column_metadata, {"x": {"foo": "gogo"}}) + self.assertEqual(table_def.delete_where_column, "Advertising") + self.assertEqual(table_def.delete_where_values, ["Video", "Search"]) + self.assertEqual(table_def.delete_where_operator, "eq") class TestFileDefinition(unittest.TestCase): - def setUp(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1") os.environ["KBC_DATADIR"] = path def test_file_manifest_minimal(self): - file_path = os.path.join(os.environ["KBC_DATADIR"], 'in', 'files', '151971405_21702.strip.print.gif') + file_path = os.path.join(os.environ["KBC_DATADIR"], "in", "files", "151971405_21702.strip.print.gif") file_def = FileDefinition(file_path) self.assertDictEqual( - { - 'is_public': False, - 'is_permanent': False, - 'is_encrypted': False, - 'notify': False}, - file_def.get_manifest_dictionary() + {"is_public": False, "is_permanent": False, "is_encrypted": False, "notify": False}, + file_def.get_manifest_dictionary(), ) def test_file_manifest_full(self): - file_def = FileDefinition("123_test_Def", is_permanent=True, - is_encrypted=True, - is_public=True, - tags=['foo', 'bar'], - notify=True, - id="123" - ) + file_def = FileDefinition( + "123_test_Def", + is_permanent=True, + is_encrypted=True, + is_public=True, + tags=["foo", "bar"], + notify=True, + id="123", + ) self.assertDictEqual( - {'tags': ['foo', 'bar'], - 'is_public': True, - 'is_permanent': True, - 'is_encrypted': True, - 'notify': True, - }, - file_def.get_manifest_dictionary('out') + { + "tags": ["foo", "bar"], + "is_public": True, + "is_permanent": True, + "is_encrypted": True, + "notify": True, + }, + file_def.get_manifest_dictionary("out"), ) - self.assertEqual(file_def.name, 'test_Def') - self.assertEqual(file_def.id, '123') + self.assertEqual(file_def.name, "test_Def") + self.assertEqual(file_def.id, "123") def test_file_output_manifest_ignores_unrecognized(self): - file_path = os.path.join(os.environ["KBC_DATADIR"], 'in', 'files', - '151971405_21702.strip.print.gif.manifest') + file_path = os.path.join(os.environ["KBC_DATADIR"], "in", "files", "151971405_21702.strip.print.gif.manifest") file_def = FileDefinition.build_from_manifest(file_path) # change stage - file_def.stage = 'out' + file_def.stage = "out" self.assertDictEqual( - {'is_encrypted': True, 'is_permanent': False, - 'is_public': False, 'notify': False, 'tags': ['dilbert']}, - file_def.get_manifest_dictionary() + {"is_encrypted": True, "is_permanent": False, "is_public": False, "notify": False, "tags": ["dilbert"]}, + file_def.get_manifest_dictionary(), ) def test_build_from_manifest_matching_file_valid_attributes(self): - sample_path = os.path.join(os.environ["KBC_DATADIR"], 'in', 'files', '151971405_21702.strip.print.gif') - manifest_path = sample_path + '.manifest' - file_def = FileDefinition.build_from_manifest( - manifest_path) + sample_path = os.path.join(os.environ["KBC_DATADIR"], "in", "files", "151971405_21702.strip.print.gif") + manifest_path = sample_path + ".manifest" + file_def = FileDefinition.build_from_manifest(manifest_path) expected_manifest = json.load(open(manifest_path)) self.assertEqual(sample_path, file_def.full_path) - self.assertEqual(expected_manifest['name'], file_def.name) - self.assertEqual(datetime.strptime(expected_manifest['created'], dao.KBC_DEFAULT_TIME_FORMAT), - file_def.created) - self.assertEqual(expected_manifest['is_public'], file_def.is_public) - self.assertEqual(expected_manifest['is_encrypted'], file_def.is_encrypted) - self.assertEqual(expected_manifest['tags'], file_def.tags) - self.assertEqual(expected_manifest['max_age_days'], file_def.max_age_days) - self.assertEqual(expected_manifest['size_bytes'], file_def.size_bytes) + self.assertEqual(expected_manifest["name"], file_def.name) + self.assertEqual(datetime.strptime(expected_manifest["created"], dao.KBC_DEFAULT_TIME_FORMAT), file_def.created) + self.assertEqual(expected_manifest["is_public"], file_def.is_public) + self.assertEqual(expected_manifest["is_encrypted"], file_def.is_encrypted) + self.assertEqual(expected_manifest["tags"], file_def.tags) + self.assertEqual(expected_manifest["max_age_days"], file_def.max_age_days) + self.assertEqual(expected_manifest["size_bytes"], file_def.size_bytes) def test_build_from_manifest_nonexistentfile_fails(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1', 'in', 'files') + sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1", "in", "files") with self.assertRaises(ValueError): - FileDefinition.build_from_manifest(os.path.join(sample_path, 'orphaned.csv.manifest')) + FileDefinition.build_from_manifest(os.path.join(sample_path, "orphaned.csv.manifest")) def test_user_tags(self): - all_tags = ['foo', - 'bar', - 'componentId: 1234', - 'configurationId: 12345', - 'configurationRowId: 12345', - 'runId: 22123', - 'branchId: 312321' - ] - file_def = FileDefinition("123_test_Def", is_permanent=True, - is_encrypted=True, - is_public=True, - tags=all_tags, - notify=True - ) + all_tags = [ + "foo", + "bar", + "componentId: 1234", + "configurationId: 12345", + "configurationRowId: 12345", + "runId: 22123", + "branchId: 312321", + ] + file_def = FileDefinition( + "123_test_Def", is_permanent=True, is_encrypted=True, is_public=True, tags=all_tags, notify=True + ) self.assertDictEqual( - {'tags': all_tags, - 'is_public': True, - 'is_permanent': True, - 'is_encrypted': True, - 'notify': True - }, - file_def.get_manifest_dictionary() + {"tags": all_tags, "is_public": True, "is_permanent": True, "is_encrypted": True, "notify": True}, + file_def.get_manifest_dictionary(), ) - self.assertEqual(['foo', 'bar'], file_def.user_tags) + self.assertEqual(["foo", "bar"], file_def.user_tags) def test_all_tags(self): - all_tags = ['foo', - 'bar', - 'componentId: 1234', - 'configurationId: 12345', - 'configurationRowId: 12345', - 'runId: 22123', - 'branchId: 312321' - ] - file_def = FileDefinition("123_test_Def", - tags=all_tags - ) + all_tags = [ + "foo", + "bar", + "componentId: 1234", + "configurationId: 12345", + "configurationRowId: 12345", + "runId: 22123", + "branchId: 312321", + ] + file_def = FileDefinition("123_test_Def", tags=all_tags) self.assertEqual(all_tags, file_def.tags) def test_build_from_manifest_s3_staging(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_storage_staging_s3', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_storage_staging_s3", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sample.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "sample.csv.manifest")) self.assertEqual(table_def.s3_staging.bucket, "test") self.assertEqual(table_def.s3_staging.credentials_access_key_id, "ASDF") @@ -836,17 +909,20 @@ def test_build_from_manifest_s3_staging(self): self.assertEqual(table_def.s3_staging.region, "eu-central-1") def test_build_from_manifest_abs_staging(self): - sample_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_storage_staging_abs', 'in', 'tables') + sample_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_storage_staging_abs", "in", "tables" + ) - table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, 'sample.csv.manifest')) + table_def = TableDefinition.build_from_manifest(os.path.join(sample_path, "sample.csv.manifest")) self.assertEqual(table_def.abs_staging.container, "exp-2-export-test-test") self.assertEqual(table_def.abs_staging.credentials_expiration, "2020-08-27T22:42:08+0200") self.assertEqual( table_def.abs_staging.credentials_sas_connection_string, - ("BlobEndpoint=https://asdf.blob.core.windows.net;SharedAccessSignature=" - "sv=2017-11-09&sr=c&st=2020-08-27T08:42:08Z&se=2020-08-27T20:42:08Z&sp=rl&sig=UJW4DPh%2Baaaaaaaaaa") + ( + "BlobEndpoint=https://asdf.blob.core.windows.net;SharedAccessSignature=" + "sv=2017-11-09&sr=c&st=2020-08-27T08:42:08Z&se=2020-08-27T20:42:08Z&sp=rl&sig=UJW4DPh%2Baaaaaaaaaa" + ), ) self.assertEqual(table_def.abs_staging.is_sliced, True) self.assertEqual(table_def.abs_staging.name, "12345.csv.gzmanifest") diff --git a/tests/test_interface.py b/tests/test_interface.py index af0b629..30e0ed1 100644 --- a/tests/test_interface.py +++ b/tests/test_interface.py @@ -6,43 +6,41 @@ class TestCommonInterface(unittest.TestCase): - def setUp(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1") os.environ["KBC_DATADIR"] = path - os.environ['KBC_STACKID'] = 'connection.keboola.com' - os.environ['KBC_PROJECT_FEATURE_GATES'] = 'queuev2' + os.environ["KBC_STACKID"] = "connection.keboola.com" + os.environ["KBC_PROJECT_FEATURE_GATES"] = "queuev2" def test_all_env_variables_initialized(self): # set all variables - os.environ['KBC_RUNID'] = 'KBC_RUNID' - os.environ['KBC_PROJECTID'] = 'KBC_PROJECTID' - os.environ['KBC_STACKID'] = 'KBC_STACKID' - os.environ['KBC_CONFIGID'] = 'KBC_CONFIGID' - os.environ['KBC_COMPONENTID'] = 'KBC_COMPONENTID' - os.environ['KBC_PROJECTNAME'] = 'KBC_PROJECTNAME' - os.environ['KBC_TOKENID'] = 'KBC_TOKENID' - os.environ['KBC_TOKENDESC'] = 'KBC_TOKENDESC' - os.environ['KBC_TOKEN'] = 'KBC_TOKEN' - os.environ['KBC_URL'] = 'KBC_URL' - os.environ['KBC_LOGGER_ADDR'] = 'KBC_LOGGER_ADDR' - os.environ['KBC_LOGGER_PORT'] = 'KBC_LOGGER_PORT' + os.environ["KBC_RUNID"] = "KBC_RUNID" + os.environ["KBC_PROJECTID"] = "KBC_PROJECTID" + os.environ["KBC_STACKID"] = "KBC_STACKID" + os.environ["KBC_CONFIGID"] = "KBC_CONFIGID" + os.environ["KBC_COMPONENTID"] = "KBC_COMPONENTID" + os.environ["KBC_PROJECTNAME"] = "KBC_PROJECTNAME" + os.environ["KBC_TOKENID"] = "KBC_TOKENID" + os.environ["KBC_TOKENDESC"] = "KBC_TOKENDESC" + os.environ["KBC_TOKEN"] = "KBC_TOKEN" + os.environ["KBC_URL"] = "KBC_URL" + os.environ["KBC_LOGGER_ADDR"] = "KBC_LOGGER_ADDR" + os.environ["KBC_LOGGER_PORT"] = "KBC_LOGGER_PORT" ci = CommonInterface() self.assertEqual(ci.environment_variables.data_dir, os.environ["KBC_DATADIR"]) - self.assertEqual(ci.environment_variables.run_id, 'KBC_RUNID') - self.assertEqual(ci.environment_variables.project_id, 'KBC_PROJECTID') - self.assertEqual(ci.environment_variables.stack_id, 'KBC_STACKID') - self.assertEqual(ci.environment_variables.config_id, 'KBC_CONFIGID') - self.assertEqual(ci.environment_variables.component_id, 'KBC_COMPONENTID') - self.assertEqual(ci.environment_variables.project_name, 'KBC_PROJECTNAME') - self.assertEqual(ci.environment_variables.token_id, 'KBC_TOKENID') - self.assertEqual(ci.environment_variables.token_desc, 'KBC_TOKENDESC') - self.assertEqual(ci.environment_variables.token, 'KBC_TOKEN') - self.assertEqual(ci.environment_variables.url, 'KBC_URL') - self.assertEqual(ci.environment_variables.logger_addr, 'KBC_LOGGER_ADDR') - self.assertEqual(ci.environment_variables.logger_port, 'KBC_LOGGER_PORT') + self.assertEqual(ci.environment_variables.run_id, "KBC_RUNID") + self.assertEqual(ci.environment_variables.project_id, "KBC_PROJECTID") + self.assertEqual(ci.environment_variables.stack_id, "KBC_STACKID") + self.assertEqual(ci.environment_variables.config_id, "KBC_CONFIGID") + self.assertEqual(ci.environment_variables.component_id, "KBC_COMPONENTID") + self.assertEqual(ci.environment_variables.project_name, "KBC_PROJECTNAME") + self.assertEqual(ci.environment_variables.token_id, "KBC_TOKENID") + self.assertEqual(ci.environment_variables.token_desc, "KBC_TOKENDESC") + self.assertEqual(ci.environment_variables.token, "KBC_TOKEN") + self.assertEqual(ci.environment_variables.url, "KBC_URL") + self.assertEqual(ci.environment_variables.logger_addr, "KBC_LOGGER_ADDR") + self.assertEqual(ci.environment_variables.logger_port, "KBC_LOGGER_PORT") def test_empty_required_params_pass(self): return True @@ -70,234 +68,227 @@ def test_unknown_config_tables_input_mapping_properties_pass(self): def test_missing_dir(self): os.environ["KBC_DATADIR"] = "asdf" - with self.assertRaisesRegex( - ValueError, - "The data directory does not exist"): + with self.assertRaisesRegex(ValueError, "The data directory does not exist"): CommonInterface() # ########## PROPERTIES def test_missing_config(self): - os.environ["KBC_DATADIR"] = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples') - with self.assertRaisesRegex( - ValueError, - "Configuration file config.json not found"): + os.environ["KBC_DATADIR"] = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples") + with self.assertRaisesRegex(ValueError, "Configuration file config.json not found"): ci = CommonInterface() ci.configuration def test_get_data_dir(self): ci = CommonInterface() - self.assertEqual(os.getenv('KBC_DATADIR', ''), ci.data_folder_path) + self.assertEqual(os.getenv("KBC_DATADIR", ""), ci.data_folder_path) def test_get_tables_out_dir(self): ci = CommonInterface() - tables_out = os.path.join(os.getenv('KBC_DATADIR', ''), 'out', 'tables') + tables_out = os.path.join(os.getenv("KBC_DATADIR", ""), "out", "tables") self.assertEqual(tables_out, ci.tables_out_path) def test_get_tables_in_dir(self): ci = CommonInterface() - tables_out = os.path.join(os.getenv('KBC_DATADIR', ''), 'in', 'files') + tables_out = os.path.join(os.getenv("KBC_DATADIR", ""), "in", "files") self.assertEqual(tables_out, ci.files_in_path) def test_get_files_out_dir(self): ci = CommonInterface() - tables_out = os.path.join(os.getenv('KBC_DATADIR', ''), 'out', 'files') + tables_out = os.path.join(os.getenv("KBC_DATADIR", ""), "out", "files") self.assertEqual(tables_out, ci.files_out_path) def test_get_files_in_dir(self): ci = CommonInterface() - tables_out = os.path.join(os.getenv('KBC_DATADIR', ''), 'in', 'tables') + tables_out = os.path.join(os.getenv("KBC_DATADIR", ""), "in", "tables") self.assertEqual(tables_out, ci.tables_in_path) def test_legacy_queue(self): - os.environ['KBC_PROJECT_FEATURE_GATES'] = '' + os.environ["KBC_PROJECT_FEATURE_GATES"] = "" ci = CommonInterface() # with no KBC_PROJECT_FEATURE_GATES env default to legacy queue self.assertEqual(True, ci.is_legacy_queue) # otherwise check for queuev2 - os.environ['KBC_PROJECT_FEATURE_GATES'] = 'queuev2;someotherfeature' + os.environ["KBC_PROJECT_FEATURE_GATES"] = "queuev2;someotherfeature" ci = CommonInterface() self.assertEqual(False, ci.is_legacy_queue) # If feature gates exists but doesn't contain queuev2 it's old queue - os.environ['KBC_PROJECT_FEATURE_GATES'] = 'feature1;someotherfeature' + os.environ["KBC_PROJECT_FEATURE_GATES"] = "feature1;someotherfeature" ci = CommonInterface() self.assertEqual(True, ci.is_legacy_queue) # when running locally default to queue v2 - os.environ['KBC_STACKID'] = '' + os.environ["KBC_STACKID"] = "" ci = CommonInterface() self.assertEqual(False, ci.is_legacy_queue) def test_create_and_write_table_manifest_deprecated(self): ci = CommonInterface() # create table def - out_table = ci.create_out_table_definition('some-table.csv', - columns=['foo', 'bar'], - destination='some-destination', - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) - out_table.table_metadata.add_table_metadata('bar', 'kochba') - out_table.table_metadata.add_column_metadata('bar', 'foo', 'gogo') + out_table = ci.create_out_table_definition( + "some-table.csv", + columns=["foo", "bar"], + destination="some-destination", + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) + out_table.table_metadata.add_table_metadata("bar", "kochba") + out_table.table_metadata.add_column_metadata("bar", "foo", "gogo") # write ci.write_tabledef_manifest(out_table) - manifest_filename = out_table.full_path + '.manifest' + manifest_filename = out_table.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( { - 'destination': 'some-destination', - 'columns': ['foo', 'bar'], - 'primary_key': ['foo'], - 'incremental': True, - 'delimiter': ',', - 'enclosure': '"', - 'metadata': [{'key': 'bar', 'value': 'kochba'}], - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq', - 'write_always': False + "destination": "some-destination", + "columns": ["foo", "bar"], + "primary_key": ["foo"], + "incremental": True, + "delimiter": ",", + "enclosure": '"', + "metadata": [{"key": "bar", "value": "kochba"}], + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "write_always": False, }, - config + config, ) os.remove(manifest_filename) def test_create_and_write_table_manifest(self): ci = CommonInterface() # create table def - out_table = ci.create_out_table_definition('some-table.csv', - columns=['foo', 'bar'], - destination='some-destination', - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'}, - write_always=True, - description='some-description' - ) - out_table.table_metadata.add_table_metadata('bar', 'kochba') - out_table.table_metadata.add_column_metadata('bar', 'foo', 'gogo') + out_table = ci.create_out_table_definition( + "some-table.csv", + columns=["foo", "bar"], + destination="some-destination", + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + write_always=True, + description="some-description", + ) + out_table.table_metadata.add_table_metadata("bar", "kochba") + out_table.table_metadata.add_column_metadata("bar", "foo", "gogo") # write ci.write_manifest(out_table, legacy_manifest=True) - manifest_filename = out_table.full_path + '.manifest' + manifest_filename = out_table.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( { - 'destination': 'some-destination', - 'columns': ['foo', 'bar'], - 'primary_key': ['foo'], - 'incremental': True, - 'write_always': True, - 'delimiter': ',', - 'enclosure': '"', - 'metadata': [{'key': 'KBC.description', 'value': 'some-description'}, - {'key': 'bar', 'value': 'kochba'}], - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq' + "destination": "some-destination", + "columns": ["foo", "bar"], + "primary_key": ["foo"], + "incremental": True, + "write_always": True, + "delimiter": ",", + "enclosure": '"', + "metadata": [ + {"key": "KBC.description", "value": "some-description"}, + {"key": "bar", "value": "kochba"}, + ], + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", }, - config + config, ) os.remove(manifest_filename) def test_create_and_write_table_manifest_old_queue(self): # If feature gates exists but doesn't contain queuev2 it's old queue - os.environ['KBC_PROJECT_FEATURE_GATES'] = 'feature1;someotherfeature' + os.environ["KBC_PROJECT_FEATURE_GATES"] = "feature1;someotherfeature" ci = CommonInterface() # create table def - out_table = ci.create_out_table_definition('some-table.csv', - columns=['foo', 'bar'], - destination='some-destination', - primary_key=['foo'], - incremental=True, - # the write_always will then not be present - # in the manifest even if set - write_always=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) - out_table.table_metadata.add_table_metadata('bar', 'kochba') - out_table.table_metadata.add_column_metadata('bar', 'foo', 'gogo') + out_table = ci.create_out_table_definition( + "some-table.csv", + columns=["foo", "bar"], + destination="some-destination", + primary_key=["foo"], + incremental=True, + # the write_always will then not be present + # in the manifest even if set + write_always=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) + out_table.table_metadata.add_table_metadata("bar", "kochba") + out_table.table_metadata.add_column_metadata("bar", "foo", "gogo") # write ci.write_manifest(out_table, legacy_manifest=True) - manifest_filename = out_table.full_path + '.manifest' + manifest_filename = out_table.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( { - 'destination': 'some-destination', - 'columns': ['foo', 'bar'], - 'primary_key': ['foo'], - 'incremental': True, - 'delimiter': ',', - 'enclosure': '"', - 'metadata': [{'key': 'bar', 'value': 'kochba'}], - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq' + "destination": "some-destination", + "columns": ["foo", "bar"], + "primary_key": ["foo"], + "incremental": True, + "delimiter": ",", + "enclosure": '"', + "metadata": [{"key": "bar", "value": "kochba"}], + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", }, - config + config, ) os.remove(manifest_filename) def test_legacy_manifest_without_columns_with_header(self): # If feature gates exists but doesn't contain queuev2 it's old queue - os.environ['KBC_PROJECT_FEATURE_GATES'] = 'feature1;someotherfeature' + os.environ["KBC_PROJECT_FEATURE_GATES"] = "feature1;someotherfeature" ci = CommonInterface() # create table def - out_table = ci.create_out_table_definition('some-table.csv', - columns=['foo', 'bar'], - destination='some-destination', - primary_key=['foo'], - incremental=True, - # the write_always will then not be present - # in the manifest even if set - write_always=True, - has_header=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) - out_table.table_metadata.add_table_metadata('bar', 'kochba') - out_table.table_metadata.add_column_metadata('bar', 'foo', 'gogo') + out_table = ci.create_out_table_definition( + "some-table.csv", + columns=["foo", "bar"], + destination="some-destination", + primary_key=["foo"], + incremental=True, + # the write_always will then not be present + # in the manifest even if set + write_always=True, + has_header=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) + out_table.table_metadata.add_table_metadata("bar", "kochba") + out_table.table_metadata.add_column_metadata("bar", "foo", "gogo") # write ci.write_manifest(out_table, legacy_manifest=True) - manifest_filename = out_table.full_path + '.manifest' + manifest_filename = out_table.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( { - 'destination': 'some-destination', - 'primary_key': ['foo'], - 'incremental': True, - 'delimiter': ',', - 'enclosure': '"', - 'metadata': [{'key': 'bar', 'value': 'kochba'}], - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq' + "destination": "some-destination", + "primary_key": ["foo"], + "incremental": True, + "delimiter": ",", + "enclosure": '"', + "metadata": [{"key": "bar", "value": "kochba"}], + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", }, - config + config, ) os.remove(manifest_filename) @@ -305,172 +296,172 @@ def test_legacy_manifest_without_columns_with_header(self): def test_create_and_write_table_manifest_multi_deprecated(self): ci = CommonInterface() # create table def - out_table = ci.create_out_table_definition('some-table.csv', - columns=['foo', 'bar'], - destination='some-destination', - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) - out_table.table_metadata.add_table_metadata('bar', 'kochba') - out_table.table_metadata.add_column_metadata('bar', 'foo', 'gogo') + out_table = ci.create_out_table_definition( + "some-table.csv", + columns=["foo", "bar"], + destination="some-destination", + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) + out_table.table_metadata.add_table_metadata("bar", "kochba") + out_table.table_metadata.add_column_metadata("bar", "foo", "gogo") # write ci.write_tabledef_manifests([out_table]) - manifest_filename = out_table.full_path + '.manifest' + manifest_filename = out_table.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( { - 'destination': 'some-destination', - 'columns': ['foo', 'bar'], - 'primary_key': ['foo'], - 'incremental': True, - 'metadata': [{'key': 'bar', 'value': 'kochba'}], - 'delimiter': ',', - 'enclosure': '"', - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq', - 'write_always': False + "destination": "some-destination", + "columns": ["foo", "bar"], + "primary_key": ["foo"], + "incremental": True, + "metadata": [{"key": "bar", "value": "kochba"}], + "delimiter": ",", + "enclosure": '"', + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "write_always": False, }, - config + config, ) os.remove(manifest_filename) def test_create_and_write_table_manifest_multi(self): ci = CommonInterface() # create table def - out_table = ci.create_out_table_definition('some-table.csv', - columns=['foo', 'bar'], - destination='some-destination', - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) - out_table.table_metadata.add_table_metadata('bar', 'kochba') - out_table.table_metadata.add_column_metadata('bar', 'foo', 'gogo') + out_table = ci.create_out_table_definition( + "some-table.csv", + columns=["foo", "bar"], + destination="some-destination", + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) + out_table.table_metadata.add_table_metadata("bar", "kochba") + out_table.table_metadata.add_column_metadata("bar", "foo", "gogo") # write ci.write_manifests([out_table], legacy_manifest=True) - manifest_filename = out_table.full_path + '.manifest' + manifest_filename = out_table.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( { - 'destination': 'some-destination', - 'columns': ['foo', 'bar'], - 'primary_key': ['foo'], - 'incremental': True, - 'metadata': [{'key': 'bar', 'value': 'kochba'}], - 'delimiter': ',', - 'enclosure': '"', - 'column_metadata': {'bar': [{'key': 'foo', 'value': 'gogo'}]}, - 'delete_where_column': 'lilly', - 'delete_where_values': ['a', 'b'], - 'delete_where_operator': 'eq', - 'write_always': False + "destination": "some-destination", + "columns": ["foo", "bar"], + "primary_key": ["foo"], + "incremental": True, + "metadata": [{"key": "bar", "value": "kochba"}], + "delimiter": ",", + "enclosure": '"', + "column_metadata": {"bar": [{"key": "foo", "value": "gogo"}]}, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "write_always": False, }, - config + config, ) os.remove(manifest_filename) def test_create_and_write_table_manifest_new(self): - os.environ['KBC_DATA_TYPE_SUPPORT'] = "authoritative" + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" ci = CommonInterface() - del os.environ['KBC_DATA_TYPE_SUPPORT'] + del os.environ["KBC_DATA_TYPE_SUPPORT"] # create table def - out_table = ci.create_out_table_definition('some-table.csv', - schema=['foo', 'bar'], - has_header=True, - destination='some-destination', - description='some-description', - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) - out_table.table_metadata.add_table_metadata('bar', 'kochba') - out_table.table_metadata.add_column_metadata('bar', 'foo', 'gogo') + out_table = ci.create_out_table_definition( + "some-table.csv", + schema=["foo", "bar"], + has_header=True, + destination="some-destination", + description="some-description", + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) + out_table.table_metadata.add_table_metadata("bar", "kochba") + out_table.table_metadata.add_column_metadata("bar", "foo", "gogo") # write ci.write_manifests([out_table]) - manifest_filename = out_table.full_path + '.manifest' + manifest_filename = out_table.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( - {'delete_where_column': 'lilly', - 'delete_where_operator': 'eq', - 'delete_where_values': ['a', 'b'], - 'delimiter': ',', - 'destination': 'some-destination', - 'enclosure': '"', - 'has_header': True, - 'incremental': True, - 'manifest_type': 'out', - 'schema': [{'data_type': {'base': {'type': 'STRING'}}, - 'name': 'foo', - 'nullable': True, - 'primary_key': True}, - {'data_type': {'base': {'type': 'STRING'}}, - 'name': 'bar', - 'nullable': True}], - 'table_metadata': {'KBC.description': 'some-description', 'bar': 'kochba'}, - 'write_always': False}, - config + { + "delete_where_column": "lilly", + "delete_where_operator": "eq", + "delete_where_values": ["a", "b"], + "delimiter": ",", + "destination": "some-destination", + "enclosure": '"', + "has_header": True, + "incremental": True, + "manifest_type": "out", + "schema": [ + {"data_type": {"base": {"type": "STRING"}}, "name": "foo", "nullable": True, "primary_key": True}, + {"data_type": {"base": {"type": "STRING"}}, "name": "bar", "nullable": True}, + ], + "table_metadata": {"KBC.description": "some-description", "bar": "kochba"}, + "write_always": False, + }, + config, ) os.remove(manifest_filename) def test_legacy_column_metadata_ignored_on_new_schema(self): # TODO: this is not implemented on purpose - os.environ['KBC_DATA_TYPE_SUPPORT'] = "authoritative" + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" ci = CommonInterface() # create table def - out_table = ci.create_out_table_definition('some-table.csv', - columns=['foo', 'bar'], - destination='some-destination', - primary_key=['foo'], - incremental=True, - delete_where={'column': 'lilly', - 'values': ['a', 'b'], - 'operator': 'eq'} - ) + out_table = ci.create_out_table_definition( + "some-table.csv", + columns=["foo", "bar"], + destination="some-destination", + primary_key=["foo"], + incremental=True, + delete_where={"column": "lilly", "values": ["a", "b"], "operator": "eq"}, + ) # this will be ignored - out_table.table_metadata.add_table_metadata('bar', 'kochba') + out_table.table_metadata.add_table_metadata("bar", "kochba") # this will be ignored - out_table.table_metadata.add_column_metadata('bar', 'foo', 'gogo') + out_table.table_metadata.add_column_metadata("bar", "foo", "gogo") # this will be ignored - out_table.table_metadata.add_column_data_type('bar', 'NUMERIC') + out_table.table_metadata.add_column_data_type("bar", "NUMERIC") # write ci.write_manifest(out_table) - del os.environ['KBC_DATA_TYPE_SUPPORT'] - manifest_filename = out_table.full_path + '.manifest' + del os.environ["KBC_DATA_TYPE_SUPPORT"] + manifest_filename = out_table.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( - {'destination': 'some-destination', - 'incremental': True, - 'manifest_type': 'out', - 'write_always': False, - 'delimiter': ',', - 'enclosure': '"', - 'table_metadata': {'bar': 'kochba'}, - 'has_header': False, - 'delete_where_column': 'lilly', 'delete_where_values': ['a', 'b'], 'delete_where_operator': 'eq', - 'schema': [ - {'name': 'foo', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True, 'primary_key': True}, - {'name': 'bar', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}]}, - config + { + "destination": "some-destination", + "incremental": True, + "manifest_type": "out", + "write_always": False, + "delimiter": ",", + "enclosure": '"', + "table_metadata": {"bar": "kochba"}, + "has_header": False, + "delete_where_column": "lilly", + "delete_where_values": ["a", "b"], + "delete_where_operator": "eq", + "schema": [ + {"name": "foo", "data_type": {"base": {"type": "STRING"}}, "nullable": True, "primary_key": True}, + {"name": "bar", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + ], + }, + config, ) def test_get_input_tables_definition(self): @@ -480,28 +471,31 @@ def test_get_input_tables_definition(self): self.assertEqual(6, len(tables)) for table in tables: - if table.name == 'sample.csv': - self.assertEqual(table.columns, [ - "x", - "Sales", - "CompPrice", - "Income", - "Advertising", - "Population", - "Price", - "ShelveLoc", - "Age", - "Education", - "Urban", - "US", - "High" - ]) + if table.name == "sample.csv": + self.assertEqual( + table.columns, + [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + ) self.assertEqual(table.rows_count, 400) self.assertEqual(table.data_size_bytes, 81920) - elif table.name == 'fooBar': - self.assertEqual(table.id, 'in.c-main.test2') - self.assertEqual(table.full_path, os.path.join(ci.tables_in_path, 'fooBar')) - self.assertEqual(table.name, 'fooBar') + elif table.name == "fooBar": + self.assertEqual(table.id, "in.c-main.test2") + self.assertEqual(table.full_path, os.path.join(ci.tables_in_path, "fooBar")) + self.assertEqual(table.name, "fooBar") def test_get_input_tables_definition_orphaned_manifest(self): ci = CommonInterface() @@ -510,33 +504,36 @@ def test_get_input_tables_definition_orphaned_manifest(self): self.assertEqual(7, len(tables)) for table in tables: - if table.name == 'sample.csv': - self.assertEqual(table.columns, [ - "x", - "Sales", - "CompPrice", - "Income", - "Advertising", - "Population", - "Price", - "ShelveLoc", - "Age", - "Education", - "Urban", - "US", - "High" - ]) + if table.name == "sample.csv": + self.assertEqual( + table.columns, + [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + ) self.assertEqual(table.rows_count, 400) self.assertEqual(table.data_size_bytes, 81920) - elif table.name == 'fooBar': - self.assertEqual(table.id, 'in.c-main.test2') - self.assertEqual(table.full_path, os.path.join(ci.tables_in_path, 'fooBar')) - self.assertEqual(table.name, 'fooBar') + elif table.name == "fooBar": + self.assertEqual(table.id, "in.c-main.test2") + self.assertEqual(table.full_path, os.path.join(ci.tables_in_path, "fooBar")) + self.assertEqual(table.name, "fooBar") def test_state_file_initialized(self): ci = CommonInterface() state = ci.get_state_file() - self.assertEqual(state['test_state'], 1234) + self.assertEqual(state["test_state"], 1234) def test_state_file_created(self): ci = CommonInterface() @@ -544,14 +541,11 @@ def test_state_file_created(self): ci.write_state_file({"some_state": 1234}) # load - state_filename = os.path.join(ci.data_folder_path, 'out', 'state.json') + state_filename = os.path.join(ci.data_folder_path, "out", "state.json") with open(state_filename) as state_file: state = json.load(state_file) - self.assertEqual( - {"some_state": 1234}, - state - ) + self.assertEqual({"some_state": 1234}, state) # cleanup os.remove(state_filename) @@ -559,67 +553,51 @@ def test_state_file_created(self): def test_get_input_table_by_name_fails_on_nonexistent(self): ci = CommonInterface() with self.assertRaises(ValueError): - ci.get_input_table_definition_by_name('nonexistent.csv') + ci.get_input_table_definition_by_name("nonexistent.csv") def test_get_input_table_by_name_existing_passes(self): ci = CommonInterface() - in_table = ci.get_input_table_definition_by_name('fooBar') - self.assertEqual(in_table.id, 'in.c-main.test2') - self.assertEqual(in_table.full_path, os.path.join(ci.tables_in_path, 'fooBar')) - self.assertEqual(in_table.name, 'fooBar') + in_table = ci.get_input_table_definition_by_name("fooBar") + self.assertEqual(in_table.id, "in.c-main.test2") + self.assertEqual(in_table.full_path, os.path.join(ci.tables_in_path, "fooBar")) + self.assertEqual(in_table.name, "fooBar") # Files def test_create_and_write_file_manifest_deprecated(self): ci = CommonInterface() # create table def - out_file = ci.create_out_file_definition('some-file.jpg', - is_permanent=True, - is_encrypted=True, - is_public=True, - tags=['foo', 'bar'], - notify=True - ) + out_file = ci.create_out_file_definition( + "some-file.jpg", is_permanent=True, is_encrypted=True, is_public=True, tags=["foo", "bar"], notify=True + ) # write ci.write_filedef_manifest(out_file) - manifest_filename = out_file.full_path + '.manifest' + manifest_filename = out_file.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( - {'tags': ['foo', 'bar'], - 'is_public': True, - 'is_permanent': True, - 'is_encrypted': True, - 'notify': True}, - config + {"tags": ["foo", "bar"], "is_public": True, "is_permanent": True, "is_encrypted": True, "notify": True}, + config, ) os.remove(manifest_filename) def test_create_and_write_file_manifest(self): ci = CommonInterface() # create table def - out_file = ci.create_out_file_definition('some-file.jpg', - is_permanent=True, - is_encrypted=True, - is_public=True, - tags=['foo', 'bar'], - notify=True - ) + out_file = ci.create_out_file_definition( + "some-file.jpg", is_permanent=True, is_encrypted=True, is_public=True, tags=["foo", "bar"], notify=True + ) # write ci.write_manifest(out_file) - manifest_filename = out_file.full_path + '.manifest' + manifest_filename = out_file.full_path + ".manifest" with open(manifest_filename) as manifest_file: config = json.load(manifest_file) self.assertEqual( - {'tags': ['foo', 'bar'], - 'is_public': True, - 'is_permanent': True, - 'is_encrypted': True, - 'notify': True}, - config + {"tags": ["foo", "bar"], "is_public": True, "is_permanent": True, "is_encrypted": True, "notify": True}, + config, ) os.remove(manifest_filename) @@ -630,62 +608,68 @@ def test_get_input_files_definition_latest(self): self.assertEqual(len(files), 5) for file in files: - if file.name == 'duty_calls.png': - self.assertEqual(file.id, '151971455') + if file.name == "duty_calls.png": + self.assertEqual(file.id, "151971455") def test_get_input_files_definition_by_tag(self): ci = CommonInterface() - files = ci.get_input_files_definitions(tags=['dilbert']) + files = ci.get_input_files_definitions(tags=["dilbert"]) self.assertEqual(len(files), 3) for file in files: - if file.name == '21702.strip.print.gif': - self.assertEqual(file.tags, [ - "dilbert" - ]) + if file.name == "21702.strip.print.gif": + self.assertEqual(file.tags, ["dilbert"]) self.assertEqual(file.max_age_days, 180) self.assertEqual(file.size_bytes, 4931) def test_get_input_files_definition_by_tag_w_system(self): - ci = CommonInterface(os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_system_tags')) + ci = CommonInterface( + os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_system_tags") + ) - files = ci.get_input_files_definitions(tags=['dilbert']) + files = ci.get_input_files_definitions(tags=["dilbert"]) self.assertEqual(len(files), 3) for file in files: - if file.name == '21702.strip.print.gif': - self.assertEqual(file.tags, [ - "dilbert", - "componentId: 1234", - "configurationId: 12345", - "configurationRowId: 12345", - "runId: 22123", - "branchId: 312321" - ]) + if file.name == "21702.strip.print.gif": + self.assertEqual( + file.tags, + [ + "dilbert", + "componentId: 1234", + "configurationId: 12345", + "configurationRowId: 12345", + "runId: 22123", + "branchId: 312321", + ], + ) self.assertEqual(file.max_age_days, 180) self.assertEqual(file.size_bytes, 4931) def test_get_input_files_definition_tag_group_w_system(self): - ci = CommonInterface(os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data_system_tags')) + ci = CommonInterface( + os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_system_tags") + ) files = ci.get_input_file_definitions_grouped_by_tag_group(only_latest_files=False) self.assertEqual(len(files), 2) self.assertEqual(len(files["bar;foo"]), 3) for file in files["bar;foo"]: - if file.name == 'compiler_complaint.png': - self.assertEqual(file.tags, [ - "foo", - "bar", - "componentId: 1234", - "configurationId: 12345", - "configurationRowId: 12345", - "runId: 22123", - "branchId: 312321" - ]) + if file.name == "compiler_complaint.png": + self.assertEqual( + file.tags, + [ + "foo", + "bar", + "componentId: 1234", + "configurationId: 12345", + "configurationRowId: 12345", + "runId: 22123", + "branchId: 312321", + ], + ) def test_get_input_files_definition_nofilter(self): ci = CommonInterface() @@ -694,16 +678,13 @@ def test_get_input_files_definition_nofilter(self): self.assertEqual(len(files), 6) for file in files: - if file.name == 'duty_calls': - self.assertEqual(file.tags, [ - "xkcd" - ]) + if file.name == "duty_calls": + self.assertEqual(file.tags, ["xkcd"]) self.assertEqual(file.max_age_days, 180) self.assertEqual(file.size_bytes, 30027) def test_get_input_files_definition_no_manifest_passes(self): - ci = CommonInterface(os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data2')) + ci = CommonInterface(os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data2")) files = ci.get_input_files_definitions(only_latest_files=True) @@ -714,251 +695,324 @@ def test_get_input_files_definition_no_manifest_passes(self): self.assertEqual(file.created, None) def test_convert_old_to_new_manifest(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data_examples', 'data4') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data4") os.environ["KBC_DATADIR"] = path ci = CommonInterface() tables = ci.get_input_tables_definitions() - os.environ['KBC_DATA_TYPE_SUPPORT'] = "authoritative" - - new_manifest = tables[0].get_manifest_dictionary('out') - - self.assertEqual({ - 'write_always': False, - 'delimiter': ',', - 'enclosure': '"', - 'manifest_type': 'out', - 'has_header': True, - 'schema': [ - {'name': 'x', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True, 'metadata': {'foo': 'gogo'}}, - {'name': 'Sales', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'CompPrice', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Income', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Advertising', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Population', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Price', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'ShelveLoc', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Age', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Education', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Urban', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'US', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'High', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}] - }, new_manifest) - - del os.environ['KBC_DATA_TYPE_SUPPORT'] + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" + + new_manifest = tables[0].get_manifest_dictionary("out") + + self.assertEqual( + { + "write_always": False, + "delimiter": ",", + "enclosure": '"', + "manifest_type": "out", + "has_header": True, + "schema": [ + { + "name": "x", + "data_type": {"base": {"type": "STRING"}}, + "nullable": True, + "metadata": {"foo": "gogo"}, + }, + {"name": "Sales", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "CompPrice", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Income", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Advertising", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Population", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Price", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "ShelveLoc", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Age", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Education", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Urban", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "US", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "High", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + ], + }, + new_manifest, + ) + + del os.environ["KBC_DATA_TYPE_SUPPORT"] def test_convert_new_to_old_manifest_has_header_false(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data_examples', 'data_new_manifest') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_new_manifest") os.environ["KBC_DATADIR"] = path ci = CommonInterface() tables = ci.get_input_tables_definitions() - old_manifest = tables[0].get_manifest_dictionary('out', legacy_manifest=True) + old_manifest = tables[0].get_manifest_dictionary("out", legacy_manifest=True) - self.assertEqual({ - 'columns': ['x', 'Sales', 'CompPrice', 'Income', 'Advertising', 'Population', 'Price', 'ShelveLoc', 'Age', - 'Education', 'Urban', 'US', 'High'], - 'delimiter': ',', - 'enclosure': '"', - 'incremental': False, - 'write_always': False - }, old_manifest) + self.assertEqual( + { + "columns": [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + "delimiter": ",", + "enclosure": '"', + "incremental": False, + "write_always": False, + }, + old_manifest, + ) def test_convert_new_to_old_manifest_storage_param(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data_examples', - 'data_storage_parameter_data_types') + path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_storage_parameter_data_types" + ) os.environ["KBC_DATADIR"] = path - os.environ['KBC_DATA_TYPE_SUPPORT'] = 'authoritative' + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" ci = CommonInterface() tables = ci.get_input_tables_definitions() ci.write_manifests([tables[0]]) - manifest_filename = tables[0].full_path + '.manifest' + manifest_filename = tables[0].full_path + ".manifest" with open(manifest_filename) as manifest_file: old_manifest = json.load(manifest_file) - self.assertEqual({ - 'columns': ['x', 'Sales', 'CompPrice', 'Income', 'Advertising', 'Population', 'Price', 'ShelveLoc', 'Age', - 'Education', 'Urban', 'US', 'High'], - 'delimiter': ',', - 'enclosure': '"', - 'incremental': False, - 'write_always': False - }, old_manifest) + self.assertEqual( + { + "columns": [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + "delimiter": ",", + "enclosure": '"', + "incremental": False, + "write_always": False, + }, + old_manifest, + ) def test_full_input_manifest(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data_examples', 'data_full_input_manifest') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_full_input_manifest") os.environ["KBC_DATADIR"] = path ci = CommonInterface() tables = ci.get_input_tables_definitions() ci.write_manifests([tables[0]]) - manifest_filename = tables[0].full_path + '.manifest' + manifest_filename = tables[0].full_path + ".manifest" with open(manifest_filename) as manifest_file: old_manifest = json.load(manifest_file) - self.assertEqual({ - 'id': 'in.c-main.test', - 'uri': 'https://connection.keboola.com//v2/storage/tables/in.c-main.test', - 'name': 'sample.csv', - 'created': '2015-11-02T09:11:37+0100', - 'last_change_date': '2015-11-02T09:11:37+0100', - 'last_import_date': '2015-11-02T09:11:37+0100', - 'rows_count': 400, - 'data_size_bytes': 81920, - 'is_alias': False, - 'indexed_columns': ['x'], - 'primary_key': ['x'], - 'column_metadata': {'x': [{'key': 'foo', 'value': 'gogo'}]}, - 'columns': ['x', 'Sales', 'CompPrice', 'Income', 'Advertising', 'Population', 'Price', 'ShelveLoc', 'Age', - 'Education', 'Urban', 'US', 'High'] - }, old_manifest) + self.assertEqual( + { + "id": "in.c-main.test", + "uri": "https://connection.keboola.com//v2/storage/tables/in.c-main.test", + "name": "sample.csv", + "created": "2015-11-02T09:11:37+0100", + "last_change_date": "2015-11-02T09:11:37+0100", + "last_import_date": "2015-11-02T09:11:37+0100", + "rows_count": 400, + "data_size_bytes": 81920, + "is_alias": False, + "indexed_columns": ["x"], + "primary_key": ["x"], + "column_metadata": {"x": [{"key": "foo", "value": "gogo"}]}, + "columns": [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + }, + old_manifest, + ) def test_full_input_manifest_dtypes_support(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data_examples', 'data_full_input_manifest') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data_full_input_manifest") os.environ["KBC_DATADIR"] = path - os.environ['KBC_DATA_TYPE_SUPPORT'] = 'authoritative' + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" ci = CommonInterface() tables = ci.get_input_tables_definitions() ci.write_manifests([tables[0]]) - manifest_filename = tables[0].full_path + '.manifest' + manifest_filename = tables[0].full_path + ".manifest" with open(manifest_filename) as manifest_file: old_manifest = json.load(manifest_file) self.maxDiff = None - self.assertEqual({ - 'id': 'in.c-main.test', - 'uri': 'https://connection.keboola.com//v2/storage/tables/in.c-main.test', - 'name': 'sample.csv', - 'created': '2015-11-02T09:11:37+0100', - 'last_change_date': '2015-11-02T09:11:37+0100', - 'last_import_date': '2015-11-02T09:11:37+0100', - 'rows_count': 400, - 'data_size_bytes': 81920, - 'is_alias': False, - 'indexed_columns': ['x'], - 'primary_key': ['x'], - 'column_metadata': {'x': [{'key': 'foo', 'value': 'gogo'}]}, - 'columns': ['x', 'Sales', 'CompPrice', 'Income', 'Advertising', 'Population', 'Price', 'ShelveLoc', 'Age', - 'Education', 'Urban', 'US', 'High'] - }, old_manifest) + self.assertEqual( + { + "id": "in.c-main.test", + "uri": "https://connection.keboola.com//v2/storage/tables/in.c-main.test", + "name": "sample.csv", + "created": "2015-11-02T09:11:37+0100", + "last_change_date": "2015-11-02T09:11:37+0100", + "last_import_date": "2015-11-02T09:11:37+0100", + "rows_count": 400, + "data_size_bytes": 81920, + "is_alias": False, + "indexed_columns": ["x"], + "primary_key": ["x"], + "column_metadata": {"x": [{"key": "foo", "value": "gogo"}]}, + "columns": [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + }, + old_manifest, + ) def test_separator_delimiter(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data_examples', 'data5') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data5") os.environ["KBC_DATADIR"] = path ci = CommonInterface() tables = ci.get_input_tables_definitions() - old_manifest = tables[0].get_manifest_dictionary('out', legacy_manifest=True) - - self.assertEqual({ - 'columns': [ - 'x', - 'Sales', - 'CompPrice', - 'Income', - 'Advertising', - 'Population', - 'Price', - 'ShelveLoc', - 'Age', - 'Education', - 'Urban', - 'US', - 'High' - ], - 'delimiter': '\t', - 'enclosure': "'", - 'incremental': True, - 'primary_key': [ - 'x' - ], - 'write_always': False, - 'delete_where_column': 'Advertising', - 'delete_where_values': ['Video', 'Search'], - 'delete_where_operator': 'eq', - 'destination': 'out.c-main.Leads' - }, old_manifest) + old_manifest = tables[0].get_manifest_dictionary("out", legacy_manifest=True) + + self.assertEqual( + { + "columns": [ + "x", + "Sales", + "CompPrice", + "Income", + "Advertising", + "Population", + "Price", + "ShelveLoc", + "Age", + "Education", + "Urban", + "US", + "High", + ], + "delimiter": "\t", + "enclosure": "'", + "incremental": True, + "primary_key": ["x"], + "write_always": False, + "delete_where_column": "Advertising", + "delete_where_values": ["Video", "Search"], + "delete_where_operator": "eq", + "destination": "out.c-main.Leads", + }, + old_manifest, + ) def test_separator_delimiter_dtypes(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data_examples', 'data5') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data5") os.environ["KBC_DATADIR"] = path ci = CommonInterface() tables = ci.get_input_tables_definitions() - os.environ['KBC_DATA_TYPE_SUPPORT'] = "authoritative" - - new_manifest = tables[0].get_manifest_dictionary('out') - - self.assertEqual({ - 'write_always': False, - 'delimiter': '\t', - 'enclosure': '\'', - 'manifest_type': 'out', - 'has_header': False, - 'incremental': True, - 'schema': [ - {'name': 'x', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True, 'primary_key': True}, - {'name': 'Sales', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'CompPrice', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Income', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Advertising', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Population', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Price', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'ShelveLoc', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Age', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Education', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'Urban', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'US', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}, - {'name': 'High', 'data_type': {'base': {'type': 'STRING'}}, 'nullable': True}], - 'delete_where_column': 'Advertising', - 'delete_where_values': ['Video', 'Search'], - 'delete_where_operator': 'eq', - 'destination': 'out.c-main.Leads' - }, new_manifest) - - del os.environ['KBC_DATA_TYPE_SUPPORT'] + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" + new_manifest = tables[0].get_manifest_dictionary("out") -class TestConfiguration(unittest.TestCase): + self.assertEqual( + { + "write_always": False, + "delimiter": "\t", + "enclosure": "'", + "manifest_type": "out", + "has_header": False, + "incremental": True, + "schema": [ + {"name": "x", "data_type": {"base": {"type": "STRING"}}, "nullable": True, "primary_key": True}, + {"name": "Sales", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "CompPrice", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Income", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Advertising", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Population", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Price", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "ShelveLoc", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Age", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Education", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "Urban", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "US", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + {"name": "High", "data_type": {"base": {"type": "STRING"}}, "nullable": True}, + ], + "delete_where_column": "Advertising", + "delete_where_values": ["Video", "Search"], + "delete_where_operator": "eq", + "destination": "out.c-main.Leads", + }, + new_manifest, + ) + + del os.environ["KBC_DATA_TYPE_SUPPORT"] + +class TestConfiguration(unittest.TestCase): def setUp(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data1') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1") os.environ["KBC_DATADIR"] = path def test_missing_config(self): - with self.assertRaisesRegex( - ValueError, - "Configuration file config.json not found"): - Configuration('/non-existent/') + with self.assertRaisesRegex(ValueError, "Configuration file config.json not found"): + Configuration("/non-existent/") def test_get_parameters(self): cfg = Configuration(os.environ["KBC_DATADIR"]) params = cfg.parameters - self.assertEqual({'fooBar': {'bar': 24, 'foo': 42}, 'baz': 'bazBar'}, - params) - self.assertEqual(params['fooBar']['foo'], 42) - self.assertEqual(params['fooBar']['bar'], 24) + self.assertEqual({"fooBar": {"bar": 24, "foo": 42}, "baz": "bazBar"}, params) + self.assertEqual(params["fooBar"]["foo"], 42) + self.assertEqual(params["fooBar"]["bar"], 24) def test_get_action(self): cfg = Configuration(os.environ["KBC_DATADIR"]) - self.assertEqual(cfg.action, 'run') + self.assertEqual(cfg.action, "run") def test_get_action_empty_config(self): - cfg = Configuration(os.path.join(os.getenv('KBC_DATADIR', ''), '..', - 'data2')) - self.assertEqual(cfg.action, '') + cfg = Configuration(os.path.join(os.getenv("KBC_DATADIR", ""), "..", "data2")) + self.assertEqual(cfg.action, "") def test_get_input_mappings(self): cfg = Configuration(os.environ["KBC_DATADIR"]) @@ -966,14 +1020,13 @@ def test_get_input_mappings(self): self.assertEqual(len(tables), 2) for table in tables: - if table['destination'] == 'sample.csv': - self.assertEqual(table['source'], 'in.c-main.test') + if table["destination"] == "sample.csv": + self.assertEqual(table["source"], "in.c-main.test") else: - self.assertEqual('in.c-main.test2', table['source']) + self.assertEqual("in.c-main.test2", table["source"]) def test_get_input_mappings_with_column_types(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'data_examples', 'data4') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data4") cfg = Configuration(path) tables = cfg.tables_input_mapping coltypes = tables[0].column_types[0] @@ -994,12 +1047,11 @@ def test_get_output_mapping(self): cfg = Configuration(os.environ["KBC_DATADIR"]) tables = cfg.tables_output_mapping self.assertEqual(len(tables), 2) - self.assertEqual(tables[0]['source'], 'results.csv') - self.assertEqual(tables[1]['source'], 'results-new.csv') + self.assertEqual(tables[0]["source"], "results.csv") + self.assertEqual(tables[1]["source"], "results-new.csv") def test_empty_storage(self): - cfg = Configuration(os.path.join(os.getenv('KBC_DATADIR', ''), '..', - 'data2')) + cfg = Configuration(os.path.join(os.getenv("KBC_DATADIR", ""), "..", "data2")) self.assertEqual(cfg.tables_output_mapping, []) self.assertEqual(cfg.files_output_mapping, []) self.assertEqual(cfg.tables_input_mapping, []) @@ -1007,8 +1059,7 @@ def test_empty_storage(self): self.assertEqual(cfg.parameters, {}) def test_empty_params(self): - cfg = Configuration(os.path.join(os.getenv('KBC_DATADIR', ''), '..', - 'data3')) + cfg = Configuration(os.path.join(os.getenv("KBC_DATADIR", ""), "..", "data3")) self.assertEqual([], cfg.tables_output_mapping) self.assertEqual([], cfg.files_output_mapping) self.assertEqual({}, cfg.parameters) @@ -1048,5 +1099,5 @@ def test_get_oauthapi_appkey(self): # os.remove(manifest_filename) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_schema.py b/tests/test_schema.py index 9690688..49a2e30 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -6,19 +6,18 @@ class MockComponent(ComponentBase): def run(self): - return 'run_executed' + return "run_executed" class TestCommonInterface(unittest.TestCase): - def setUp(self): - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data_examples', 'data1') + path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_examples", "data1") os.environ["KBC_DATADIR"] = path # to simulate kbc run - os.environ["KBC_STACKID"] = 'test' + os.environ["KBC_STACKID"] = "test" def test_create_out_table_definition_from_schema_name(self): - schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema_examples', 'schemas') + schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema_examples", "schemas") comp = MockComponent(schema_path_override=schema_path) order_schema = comp.get_table_schema_by_name(schema_name="order") order_table_definition_from_schema = comp.create_out_table_definition_from_schema(order_schema) @@ -27,92 +26,111 @@ def test_create_out_table_definition_from_schema_name(self): self.assertEqual(["id"], order_table_definition_from_schema.primary_key) def test_created_manifest_against_schema(self): - schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema_examples', 'schemas') + schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema_examples", "schemas") comp = MockComponent(schema_path_override=schema_path) order_schema = comp.get_table_schema_by_name(schema_name="order") order_table_definition_from_schema = comp.create_out_table_definition_from_schema(order_schema) manifest_dict = order_table_definition_from_schema.get_manifest_dictionary(legacy_manifest=True) - expected_manifest = {'primary_key': ['id'], 'columns': ['id', 'product_id', 'quantity'], 'enclosure': '"', - 'delimiter': ',', - 'write_always': False, - 'metadata': [{'key': 'KBC.description', 'value': 'this table holds data on orders'}], - 'column_metadata': {'id': [{'key': 'KBC.description', 'value': 'ID of the order'}, - {'key': 'KBC.datatype.basetype', 'value': 'STRING'}, - {'key': 'KBC.datatype.nullable', 'value': False}], - 'product_id': [ - {'key': 'KBC.description', 'value': 'Id of the product in order'}, - {'key': 'KBC.datatype.basetype', 'value': 'NUMERIC'}, - {'key': 'KBC.datatype.nullable', 'value': False}], - 'quantity': [ - {'key': 'KBC.description', - 'value': 'Quantity of the product in order'}, - {'key': 'KBC.datatype.basetype', 'value': 'STRING'}, - {'key': 'KBC.datatype.nullable', 'value': False}]}} + expected_manifest = { + "primary_key": ["id"], + "columns": ["id", "product_id", "quantity"], + "enclosure": '"', + "delimiter": ",", + "write_always": False, + "metadata": [{"key": "KBC.description", "value": "this table holds data on orders"}], + "column_metadata": { + "id": [ + {"key": "KBC.description", "value": "ID of the order"}, + {"key": "KBC.datatype.basetype", "value": "STRING"}, + {"key": "KBC.datatype.nullable", "value": False}, + ], + "product_id": [ + {"key": "KBC.description", "value": "Id of the product in order"}, + {"key": "KBC.datatype.basetype", "value": "NUMERIC"}, + {"key": "KBC.datatype.nullable", "value": False}, + ], + "quantity": [ + {"key": "KBC.description", "value": "Quantity of the product in order"}, + {"key": "KBC.datatype.basetype", "value": "STRING"}, + {"key": "KBC.datatype.nullable", "value": False}, + ], + }, + } self.assertEqual(expected_manifest, manifest_dict) def test_created_manifest_against_schema_new_manifest(self): - os.environ['KBC_DATA_TYPE_SUPPORT'] = "authoritative" + os.environ["KBC_DATA_TYPE_SUPPORT"] = "authoritative" - schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema_examples', 'schemas') + schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema_examples", "schemas") comp = MockComponent(schema_path_override=schema_path) order_schema = comp.get_table_schema_by_name(schema_name="order") order_table_definition_from_schema = comp.create_out_table_definition_from_schema(order_schema) manifest_dict = order_table_definition_from_schema.get_manifest_dictionary(legacy_manifest=False) - expected_manifest = {'delimiter': ',', - 'enclosure': '"', - 'has_header': False, - 'manifest_type': 'out', - 'table_metadata': {'KBC.description': 'this table holds data on orders'}, - 'schema': [{'data_type': {'base': {'type': 'STRING'}}, - 'description': 'ID of the order', - 'name': 'id', - 'primary_key': True}, - {'data_type': {'base': {'type': 'NUMERIC'}}, - 'description': 'Id of the product in order', - 'name': 'product_id'}, - {'data_type': {'base': {'type': 'STRING'}}, - 'description': 'Quantity of the product in order', - 'name': 'quantity'}], - 'write_always': False} - del os.environ['KBC_DATA_TYPE_SUPPORT'] + expected_manifest = { + "delimiter": ",", + "enclosure": '"', + "has_header": False, + "manifest_type": "out", + "table_metadata": {"KBC.description": "this table holds data on orders"}, + "schema": [ + { + "data_type": {"base": {"type": "STRING"}}, + "description": "ID of the order", + "name": "id", + "primary_key": True, + }, + { + "data_type": {"base": {"type": "NUMERIC"}}, + "description": "Id of the product in order", + "name": "product_id", + }, + { + "data_type": {"base": {"type": "STRING"}}, + "description": "Quantity of the product in order", + "name": "quantity", + }, + ], + "write_always": False, + } + del os.environ["KBC_DATA_TYPE_SUPPORT"] self.assertEqual(expected_manifest, manifest_dict) def test_invalid_column_schema_raises_key_error(self): with self.assertRaises(KeyError): - schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema_examples', 'faulty-schemas') + schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema_examples", "faulty-schemas") comp = MockComponent(schema_path_override=schema_path) table_schema = comp.get_table_schema_by_name(schema_name="invalid_column_schema") comp.create_out_table_definition_from_schema(table_schema) def test_invalid_schema_raises_key_error(self): with self.assertRaises(KeyError): - schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema_examples', 'faulty-schemas') + schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema_examples", "faulty-schemas") comp = MockComponent(schema_path_override=schema_path) table_schema = comp.get_table_schema_by_name(schema_name="invalid_table_schema") comp.create_out_table_definition_from_schema(table_schema) def test_missing_schema_raises_key_error(self): with self.assertRaises(FileNotFoundError): - schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema_examples', 'faulty-schemas') + schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema_examples", "faulty-schemas") comp = MockComponent(schema_path_override=schema_path) table_schema = comp.get_table_schema_by_name(schema_name="missing") comp.create_out_table_definition_from_schema(table_schema) def test_invalid_schema_path_raises_key_error(self): with self.assertRaises(FileNotFoundError): - schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema_examples', 'missing') + schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema_examples", "missing") comp = MockComponent(schema_path_override=schema_path) table_schema = comp.get_table_schema_by_name(schema_name="missing") comp.create_out_table_definition_from_schema(table_schema) def test_invalid_base_type_raises_key_error(self): with self.assertRaises(ValueError): - schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema_examples', 'faulty-schemas') + schema_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema_examples", "faulty-schemas") comp = MockComponent(schema_path_override=schema_path) table_schema = comp.get_table_schema_by_name(schema_name="invalid_base_type") comp.create_out_table_definition_from_schema(table_schema) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_sync_actions.py b/tests/test_sync_actions.py index d462ccc..07f7f79 100644 --- a/tests/test_sync_actions.py +++ b/tests/test_sync_actions.py @@ -1,19 +1,16 @@ import unittest -from keboola.component.sync_actions import SelectElement, ValidationResult, process_sync_action_result, MessageType +from keboola.component.sync_actions import MessageType, SelectElement, ValidationResult, process_sync_action_result class TestSyncActions(unittest.TestCase): - def test_select_element_return_value(self): - select_options = [SelectElement("value_a", "label_a"), - SelectElement("value_b")] + select_options = [SelectElement("value_a", "label_a"), SelectElement("value_b")] expected = '[{"value": "value_a", "label": "label_a"}, {"value": "value_b", "label": "value_b"}]' self.assertEqual(process_sync_action_result(select_options), expected) def test_select_element_return_value_legacy(self): - select_options = [dict(value="value_a", label="label_a"), - dict(value="value_b", label="value_b")] + select_options = [dict(value="value_a", label="label_a"), dict(value="value_b", label="value_b")] expected = '[{"value": "value_a", "label": "label_a"}, {"value": "value_b", "label": "value_b"}]' self.assertEqual(process_sync_action_result(select_options), expected) From 36e41febf809b41eae323df0056e09c17a751cec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maty=C3=A1=C5=A1=20Jir=C3=A1t?= Date: Tue, 24 Feb 2026 17:47:15 +0100 Subject: [PATCH 5/7] Removed old Type hints --- pyproject.toml | 3 +- src/keboola/component/base.py | 21 +-- src/keboola/component/dao.py | 259 +++++++++++++------------- src/keboola/component/interface.py | 81 ++++---- src/keboola/component/sync_actions.py | 5 +- src/keboola/component/table_schema.py | 21 +-- 6 files changed, 190 insertions(+), 200 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index abfbe03..3370d5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ explicit = true [tool.ruff] line-length = 120 +target-version = "py310" [tool.ruff.lint] -extend-select = ["I"] \ No newline at end of file +extend-select = ["I", "UP"] \ No newline at end of file diff --git a/src/keboola/component/base.py b/src/keboola/component/base.py index 99414f9..48c6613 100644 --- a/src/keboola/component/base.py +++ b/src/keboola/component/base.py @@ -6,7 +6,6 @@ from abc import ABC, abstractmethod from functools import wraps from pathlib import Path -from typing import Dict, List, Optional, Union from . import dao from . import table_schema as ts @@ -94,7 +93,7 @@ def action_wrapper(self, *args, **kwargs): try: # when success, only supported syntax can be in output / log, so redirect stdout before. with contextlib.redirect_stdout(stdout_redirect): - result: Union[None, SyncActionResult, List[SyncActionResult]] = func(self, *args, **kwargs) + result: None | SyncActionResult | list[SyncActionResult] = func(self, *args, **kwargs) if is_sync_action: # sync action expects valid JSON in stdout on success. @@ -119,10 +118,10 @@ def action_wrapper(self, *args, **kwargs): class ComponentBase(ABC, CommonInterface): def __init__( self, - data_path_override: Optional[str] = None, - schema_path_override: Optional[str] = None, - required_parameters: Optional[list] = None, - required_image_parameters: Optional[list] = None, + data_path_override: str | None = None, + schema_path_override: str | None = None, + required_parameters: list | None = None, + required_image_parameters: list | None = None, ): """ Base class for general Python components. Initializes the CommonInterface @@ -203,7 +202,7 @@ def _get_schema_folder_path(self, schema_path_override: str = None) -> str: return schema_path_override or self._get_default_schema_folder_path() @staticmethod - def _get_default_schema_folder_path() -> Optional[str]: + def _get_default_schema_folder_path() -> str | None: """ Finds the default schema_folder_path if it exists. @@ -368,7 +367,7 @@ def create_out_table_definition_from_schema( return table_def - def get_table_schema_by_name(self, schema_name: str, schema_folder_path: Optional[str] = None) -> ts.TableSchema: + def get_table_schema_by_name(self, schema_name: str, schema_folder_path: str | None = None) -> ts.TableSchema: """ The method finds a table schema JSON based on it's name in a defined schema_folder_path and generates a TableSchema object. @@ -390,9 +389,9 @@ def get_table_schema_by_name(self, schema_name: str, schema_folder_path: Optiona return ts.init_table_schema_from_dict(schema_dict) @staticmethod - def _load_table_schema_dict(schema_name: str, schema_folder_path: str) -> Dict: + def _load_table_schema_dict(schema_name: str, schema_folder_path: str) -> dict: try: - with open(os.path.join(schema_folder_path, f"{schema_name}.json"), "r") as schema_file: + with open(os.path.join(schema_folder_path, f"{schema_name}.json")) as schema_file: json_schema = json.loads(schema_file.read()) except FileNotFoundError as file_err: raise FileNotFoundError( @@ -411,7 +410,7 @@ def _validate_schema_folder_path(schema_folder_path: str): " located in the 'src' directory of a component : src/schemas" ) - def _generate_schema_definition(self, table_schema: ts.TableSchema) -> Dict[str, dao.ColumnDefinition]: + def _generate_schema_definition(self, table_schema: ts.TableSchema) -> dict[str, dao.ColumnDefinition]: """ Generates a TableMetadata object for the table definition using a TableSchema object. diff --git a/src/keboola/component/dao.py b/src/keboola/component/dao.py index 8cbed0e..4f79146 100644 --- a/src/keboola/component/dao.py +++ b/src/keboola/component/dao.py @@ -1,4 +1,3 @@ -# Python 3.7 support from __future__ import annotations import dataclasses @@ -11,18 +10,12 @@ from datetime import datetime from enum import Enum from pathlib import Path -from typing import Dict, List, Optional, Union -from typing import OrderedDict as TypeOrderedDict +from typing import Literal from deprecated import deprecated from .exceptions import UserException -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal - KBC_DEFAULT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z" @@ -192,7 +185,7 @@ def load_table_metadata_from_manifest(self, manifest: dict): value = metadata["value"] self.add_table_metadata(key, value) - def get_table_metadata_for_manifest(self, legacy_manifest: bool = False) -> List[dict]: + def get_table_metadata_for_manifest(self, legacy_manifest: bool = False) -> list[dict]: """ Returns table metadata list as required by the [manifest format] @@ -338,7 +331,7 @@ def add_column_descriptions(self, column_descriptions: dict): "dao.TableDefinition methods to define columns. e.g." "dao.TableDefinition.add_columns()", ) - def add_column_data_types(self, column_types: Dict[str, Union[SupportedDataTypes, str]]): + def add_column_data_types(self, column_types: dict[str, SupportedDataTypes | str]): """ Add column types metadata. Note that only supported datatypes () may be provided. The value accepts either instance of ColumnDataTypes @@ -364,7 +357,7 @@ def add_column_data_types(self, column_types: Dict[str, Union[SupportedDataTypes def add_column_data_type( self, column: str, - data_type: Union[SupportedDataTypes, str], + data_type: SupportedDataTypes | str, source_data_type: str = None, nullable: bool = False, length: str = None, @@ -431,7 +424,7 @@ def add_table_metadata(self, key: str, value: str): reason="Column metadata ere moved to dao.TableDefinition.schema property." "Please use the dao.ColumnDefinition.metadata", ) - def add_column_metadata(self, column: str, key: str, value: Union[str, bool, int], backend="base"): + def add_column_metadata(self, column: str, key: str, value: str | bool | int, backend="base"): """ Add/Updates column metadata and ensures the Key is unique. Args: @@ -451,7 +444,7 @@ def add_column_metadata(self, column: str, key: str, value: Union[str, bool, int reason="Column metadata ere moved to dao.TableDefinition.schema property." "Please use the dao.ColumnDefinition.metadata", ) - def add_multiple_column_metadata(self, column_metadata: Dict[str, List[dict]]): + def add_multiple_column_metadata(self, column_metadata: dict[str, list[dict]]): """ Add key-value pairs to column metadata. @@ -479,8 +472,8 @@ def _validate_data_types(column_types: dict): @dataclass class DataType: dtype: str - length: Optional[str] = None - default: Optional[str] = None + length: str | None = None + default: str | None = None def __post_init__(self): if isinstance(self.dtype, SupportedDataTypes): @@ -491,37 +484,37 @@ class BaseType(dict): def __init__( self, dtype: SupportedDataTypes = SupportedDataTypes.STRING, - length: Optional[str] = None, - default: Optional[str] = None, + length: str | None = None, + default: str | None = None, ): super().__init__(base=DataType(dtype=dtype, length=length, default=default)) @classmethod - def string(cls, length: Optional[str] = None, default: Optional[str] = None) -> "BaseType": + def string(cls, length: str | None = None, default: str | None = None) -> BaseType: return BaseType(dtype=SupportedDataTypes.STRING, length=length, default=default) @classmethod - def integer(cls, length: Optional[str] = None, default: Optional[str] = None) -> "BaseType": + def integer(cls, length: str | None = None, default: str | None = None) -> BaseType: return BaseType(dtype=SupportedDataTypes.INTEGER, length=length, default=default) @classmethod - def numeric(cls, length: Optional[str] = None, default: Optional[str] = None) -> "BaseType": + def numeric(cls, length: str | None = None, default: str | None = None) -> BaseType: return BaseType(dtype=SupportedDataTypes.NUMERIC, length=length, default=default) @classmethod - def float(cls, length: Optional[str] = None, default: Optional[str] = None) -> "BaseType": + def float(cls, length: str | None = None, default: str | None = None) -> BaseType: return BaseType(dtype=SupportedDataTypes.FLOAT, length=length, default=default) @classmethod - def boolean(cls, default: Optional[str] = None) -> "BaseType": + def boolean(cls, default: str | None = None) -> BaseType: return BaseType(dtype=SupportedDataTypes.BOOLEAN, default=default) @classmethod - def date(cls, default: Optional[str] = None) -> "BaseType": + def date(cls, default: str | None = None) -> BaseType: return BaseType(dtype=SupportedDataTypes.DATE, default=default) @classmethod - def timestamp(cls, default: Optional[str] = None) -> "BaseType": + def timestamp(cls, default: str | None = None) -> BaseType: return BaseType(dtype=SupportedDataTypes.TIMESTAMP, default=default) @@ -540,11 +533,11 @@ class ColumnDefinition: metadata (Optional[Dict[str, str]]): Additional metadata associated with the column. Defaults to None. """ - data_types: Optional[Union[Dict[str, DataType], BaseType]] = field(default_factory=lambda: BaseType()) - nullable: Optional[bool] = True - primary_key: Optional[bool] = False - description: Optional[str] = None - metadata: Optional[Dict[str, str]] = None + data_types: dict[str, DataType] | BaseType | None = field(default_factory=lambda: BaseType()) + nullable: bool | None = True + primary_key: bool | None = False + description: str | None = None + metadata: dict[str, str] | None = None def update_properties(self, **kwargs): for key, value in kwargs.items(): @@ -601,14 +594,14 @@ def to_dict(self, name: str): @dataclass class SupportedManifestAttributes(SubscriptableDataclass): - out_attributes: List[str] - in_attributes: List[str] - out_legacy_exclude: List[str] = dataclasses.field(default_factory=lambda: []) - in_legacy_exclude: List[str] = dataclasses.field(default_factory=lambda: []) + out_attributes: list[str] + in_attributes: list[str] + out_legacy_exclude: list[str] = dataclasses.field(default_factory=lambda: []) + in_legacy_exclude: list[str] = dataclasses.field(default_factory=lambda: []) def get_attributes_by_stage( self, stage: Literal["in", "out"], legacy_queue: bool = False, legacy_manifest: bool = False - ) -> List[str]: + ) -> list[str]: if stage == "out": attributes = self.out_attributes exclude = self.out_legacy_exclude @@ -648,7 +641,7 @@ def _filter_attributes_by_manifest_type( raise NotImplementedError def get_manifest_dictionary( - self, manifest_type: Optional[str] = None, legacy_queue: bool = False, legacy_manifest: Optional[bool] = None + self, manifest_type: str | None = None, legacy_queue: bool = False, legacy_manifest: bool | None = None ) -> dict: raise NotImplementedError @@ -703,7 +696,7 @@ class ABSStaging: credentials_expiration: str @property - def s3_staging(self) -> Union[S3Staging, None]: + def s3_staging(self) -> S3Staging | None: s3 = self._s3 if s3: return IODefinition.S3Staging( @@ -719,7 +712,7 @@ def s3_staging(self) -> Union[S3Staging, None]: return None @property - def abs_staging(self) -> Union[ABSStaging, None]: + def abs_staging(self) -> ABSStaging | None: _abs = self._abs if _abs: return IODefinition.ABSStaging( @@ -810,28 +803,28 @@ class TableDefinition(IODefinition): OUTPUT_MANIFEST_LEGACY_EXCLUDES = ["write_always"] MANIFEST_ATTRIBUTES = {"in": INPUT_MANIFEST_ATTRIBUTES, "out": OUTPUT_MANIFEST_ATTRIBUTES} - SCHEMA_TYPE = Union[Dict[str, ColumnDefinition], TypeOrderedDict[str, ColumnDefinition], List[str]] + SCHEMA_TYPE = dict[str, ColumnDefinition] | OrderedDict[str, ColumnDefinition] | list[str] def __init__( self, name: str, - full_path: Optional[Union[str, None]] = None, - is_sliced: Optional[bool] = False, - destination: Optional[str] = "", - primary_key: Optional[List[str]] = None, + full_path: str | None | None = None, + is_sliced: bool | None = False, + destination: str | None = "", + primary_key: list[str] | None = None, schema: SCHEMA_TYPE = None, - incremental: Optional[bool] = None, - table_metadata: Optional[TableMetadata] = None, - enclosure: Optional[str] = '"', - delimiter: Optional[str] = ",", - delete_where: Optional[dict] = None, - stage: Optional[str] = "out", - write_always: Optional[bool] = False, - has_header: Optional[bool] = None, - description: Optional[str] = None, + incremental: bool | None = None, + table_metadata: TableMetadata | None = None, + enclosure: str | None = '"', + delimiter: str | None = ",", + delete_where: dict | None = None, + stage: str | None = "out", + write_always: bool | None = False, + has_header: bool | None = None, + description: str | None = None, # storage staging - s3: Optional[dict] = None, - abs: Optional[dict] = None, + s3: dict | None = None, + abs: dict | None = None, **kwargs, ): """ @@ -868,7 +861,7 @@ def __init__( # initialize manifest properties self._destination = None self.destination = destination - self._schema: Dict[str, ColumnDefinition] = dict() + self._schema: dict[str, ColumnDefinition] = dict() if schema: self.schema = schema @@ -936,17 +929,17 @@ def __get_stage_inferred(self): def build_output_definition( cls, name: str, - destination: Optional[str] = "", - columns: Optional[List[str]] = None, - primary_key: Optional[List[str]] = None, - incremental: Optional[bool] = False, - table_metadata: Optional[TableMetadata] = None, - enclosure: Optional[str] = '"', - delimiter: Optional[str] = ",", - delete_where: Optional[dict] = None, - write_always: Optional[bool] = False, + destination: str | None = "", + columns: list[str] | None = None, + primary_key: list[str] | None = None, + incremental: bool | None = False, + table_metadata: TableMetadata | None = None, + enclosure: str | None = '"', + delimiter: str | None = ",", + delete_where: dict | None = None, + write_always: bool | None = False, schema: SCHEMA_TYPE = None, - description: Optional[str] = None, + description: str | None = None, **kwargs, ): """ @@ -992,31 +985,31 @@ def build_output_definition( def build_input_definition( cls, name: str, - full_path: Optional[Union[str, None]] = None, - is_sliced: Optional[bool] = False, - destination: Optional[str] = "", - primary_key: Optional[List[str]] = None, - columns: Optional[List[str]] = None, - incremental: Optional[bool] = None, - table_metadata: Optional[TableMetadata] = None, - enclosure: Optional[str] = '"', - delimiter: Optional[str] = ",", - delete_where: Optional[dict] = None, - stage: Optional[str] = "in", - write_always: Optional[bool] = False, - schema: Optional[Union[TypeOrderedDict[str, ColumnDefinition], list[str]]] = None, - rows_count: Optional[int] = None, - data_size_bytes: Optional[int] = None, - is_alias: Optional[bool] = False, + full_path: str | None | None = None, + is_sliced: bool | None = False, + destination: str | None = "", + primary_key: list[str] | None = None, + columns: list[str] | None = None, + incremental: bool | None = None, + table_metadata: TableMetadata | None = None, + enclosure: str | None = '"', + delimiter: str | None = ",", + delete_where: dict | None = None, + stage: str | None = "in", + write_always: bool | None = False, + schema: OrderedDict[str, ColumnDefinition] | list[str] | None = None, + rows_count: int | None = None, + data_size_bytes: int | None = None, + is_alias: bool | None = False, # input - uri: Optional[str] = None, - id: Optional[str] = "", - created: Optional[str] = None, - last_change_date: Optional[str] = None, - last_import_date: Optional[str] = None, + uri: str | None = None, + id: str | None = "", + created: str | None = None, + last_change_date: str | None = None, + last_import_date: str | None = None, # storage staging - s3: Optional[dict] = None, - abs: Optional[dict] = None, + s3: dict | None = None, + abs: dict | None = None, **kwargs, ): """ @@ -1234,7 +1227,7 @@ def build_from_manifest(cls, manifest_file_path: str): return table_def def get_manifest_dictionary( - self, manifest_type: Optional[str] = None, legacy_queue: bool = False, legacy_manifest: Optional[bool] = None + self, manifest_type: str | None = None, legacy_queue: bool = False, legacy_manifest: bool | None = None ) -> dict: """ Returns manifest dictionary in appropriate manifest_type: either 'in' or 'out'. @@ -1346,11 +1339,11 @@ def _has_header_in_file(self): return has_header @property - def schema(self) -> TypeOrderedDict[str, ColumnDefinition]: + def schema(self) -> OrderedDict[str, ColumnDefinition]: return self._schema @schema.setter - def schema(self, value: Union[TypeOrderedDict[str, ColumnDefinition], list[str]]): + def schema(self, value: OrderedDict[str, ColumnDefinition] | list[str]): if value: if not isinstance(value, (list, dict, OrderedDict)): raise TypeError("Columns must be a list or a mapping of column names and ColumnDefinition objects") @@ -1414,7 +1407,7 @@ def data_size_bytes(self) -> int: @property @deprecated(version="1.5.1", reason="Please use new column_names method instead of columns property") - def columns(self) -> List[str]: + def columns(self) -> list[str]: if isinstance(self.schema, (OrderedDict, dict)): return list(self.schema.keys()) else: @@ -1422,7 +1415,7 @@ def columns(self) -> List[str]: @columns.setter @deprecated(version="1.5.1", reason="Please use new column_names method instead of schema property") - def columns(self, val: List[str]): + def columns(self, val: list[str]): """ Set columns for the table. If list of names provided, the columns will be created with default settings Basetype.String. @@ -1438,7 +1431,7 @@ def columns(self, val: List[str]): self.schema = val @property - def column_names(self) -> List[str]: + def column_names(self) -> list[str]: if self.schema: return list(self.schema.keys()) else: @@ -1462,14 +1455,14 @@ def write_always(self, write_always: bool): self._write_always = write_always @property - def primary_key(self) -> List[str]: + def primary_key(self) -> list[str]: if not self._legacy_mode: return [column_name for column_name, column_def in self.schema.items() if column_def.primary_key] else: return self._legacy_primary_key @primary_key.setter - def primary_key(self, primary_key: List[str]): + def primary_key(self, primary_key: list[str]): if not primary_key: return @@ -1516,7 +1509,7 @@ def table_metadata(self, table_metadata: TableMetadata): self.schema[col].metadata = {item["key"]: item["value"] for item in val} @property - def created(self) -> Union[datetime, None]: # Created timestamp in the KBC Storage (read only input attribute) + def created(self) -> datetime | None: # Created timestamp in the KBC Storage (read only input attribute) if self._created: return datetime.strptime(self._created, KBC_DEFAULT_TIME_FORMAT) else: @@ -1563,7 +1556,7 @@ def delete_column(self, column_name: str): raise ValueError(f"Column with name {column_name} not found") del self.schema[column_name] - def add_columns(self, columns: Union[List[str], Dict[str, ColumnDefinition]]): + def add_columns(self, columns: list[str] | dict[str, ColumnDefinition]): if isinstance(columns, list): for name in columns: self.add_column(name) @@ -1571,11 +1564,11 @@ def add_columns(self, columns: Union[List[str], Dict[str, ColumnDefinition]]): for name, column in columns.items(): self.add_column(name, column) - def update_columns(self, columns: Dict[str, ColumnDefinition]): + def update_columns(self, columns: dict[str, ColumnDefinition]): for name, column in columns: self.update_column(name, column) - def delete_columns(self, column_names: List[str]): + def delete_columns(self, column_names: list[str]): for name in column_names: self.delete_column(name) @@ -1647,18 +1640,18 @@ class FileDefinition(IODefinition): def __init__( self, full_path: str, - stage: Optional[str] = "out", - tags: Optional[List[str]] = None, - is_public: Optional[bool] = False, - is_permanent: Optional[bool] = False, - is_encrypted: Optional[bool] = False, - notify: Optional[bool] = False, - id: Optional[str] = None, - s3: Optional[dict] = None, - abs: Optional[dict] = None, - created: Optional[str] = None, - size_bytes: Optional[int] = None, - max_age_days: Optional[int] = None, + stage: str | None = "out", + tags: list[str] | None = None, + is_public: bool | None = False, + is_permanent: bool | None = False, + is_encrypted: bool | None = False, + notify: bool | None = False, + id: str | None = None, + s3: dict | None = None, + abs: dict | None = None, + created: str | None = None, + size_bytes: int | None = None, + max_age_days: int | None = None, ): """ @@ -1695,11 +1688,11 @@ def __init__( def build_output_definition( cls, full_path: str, - tags: Optional[List[str]] = None, - is_public: Optional[bool] = False, - is_permanent: Optional[bool] = False, - is_encrypted: Optional[bool] = False, - notify: Optional[bool] = False, + tags: list[str] | None = None, + is_public: bool | None = False, + is_permanent: bool | None = False, + is_encrypted: bool | None = False, + notify: bool | None = False, ): """ Factory method to create an instance of FileDefinition for output files. @@ -1733,12 +1726,12 @@ def build_output_definition( def build_input_definition( cls, full_path: str, - id: Optional[str] = None, - s3: Optional[dict] = None, - abs: Optional[dict] = None, - created: Optional[str] = None, - size_bytes: Optional[int] = None, - max_age_days: Optional[int] = None, + id: str | None = None, + s3: dict | None = None, + abs: dict | None = None, + created: str | None = None, + size_bytes: int | None = None, + max_age_days: int | None = None, ): """ Factory method to create an instance of FileDefinition for input files. @@ -1830,7 +1823,7 @@ def is_system_tag(cls, tag: str) -> bool: return False def get_manifest_dictionary( - self, manifest_type: Optional[str] = None, legacy_queue: bool = False, legacy_manifest: Optional[bool] = None + self, manifest_type: str | None = None, legacy_queue: bool = False, legacy_manifest: bool | None = None ) -> dict: """ Returns manifest dictionary in appropriate manifest_type: either 'in' or 'out'. @@ -1927,23 +1920,23 @@ def _manifest_attributes(self) -> SupportedManifestAttributes: # ########### Output manifest properties - R/W @property - def user_tags(self) -> List[str]: + def user_tags(self) -> list[str]: """ User defined tags excluding the system tags """ # filter system tags - tags: List[str] = [tag for tag in self._tags if not self.is_system_tag(tag)] + tags: list[str] = [tag for tag in self._tags if not self.is_system_tag(tag)] return tags @property - def tags(self) -> List[str]: + def tags(self) -> list[str]: """ All tags specified on the file """ return self._tags @tags.setter - def tags(self, tags: List[str]): + def tags(self, tags: list[str]): if tags is None: tags = list() self._tags = tags @@ -1986,7 +1979,7 @@ def id(self) -> str: # File ID in the KBC Storage (read only input attribute) return self._id @property - def created(self) -> Union[datetime, None]: # Created timestamp in the KBC Storage (read only input attribute) + def created(self) -> datetime | None: # Created timestamp in the KBC Storage (read only input attribute) if self._created: return datetime.strptime(self._created, KBC_DEFAULT_TIME_FORMAT) else: @@ -2028,12 +2021,12 @@ class TableInputMapping(SubscriptableDataclass): source: str = "" destination: str = None limit: int = None - columns: List[str] = dataclasses.field(default_factory=lambda: []) - where_values: List[str] = None + columns: list[str] = dataclasses.field(default_factory=lambda: []) + where_values: list[str] = None full_path: str = None where_operator: str = "" days: int = 0 - column_types: List[TableColumnTypes] = (None,) + column_types: list[TableColumnTypes] = (None,) file_type: str = "csv" @@ -2063,7 +2056,7 @@ class FileInputMapping(SubscriptableDataclass): https://developers.keboola.com/extend/common-interface/config-file/#files) in the config file """ - tags: List[str] + tags: list[str] query: str = "" filter_by_run_id: bool = False @@ -2078,7 +2071,7 @@ class FileOutputMapping(SubscriptableDataclass): source: str is_public: bool = False is_permanent: bool = False - tags: List[str] = dataclasses.field(default_factory=lambda: []) + tags: list[str] = dataclasses.field(default_factory=lambda: []) @dataclass diff --git a/src/keboola/component/interface.py b/src/keboola/component/interface.py index b366617..00bfd27 100644 --- a/src/keboola/component/interface.py +++ b/src/keboola/component/interface.py @@ -1,4 +1,3 @@ -# Python 3.7 support from __future__ import annotations import argparse @@ -9,9 +8,9 @@ import os import sys import warnings +from collections import OrderedDict from datetime import datetime from pathlib import Path -from typing import Dict, List, Optional, OrderedDict, Union from deprecated import deprecated from pygelf import GelfTcpHandler, GelfUdpHandler @@ -233,9 +232,9 @@ def get_state_file(self) -> dict: logging.info("State file not found. First run?") return {} try: - with open(state_file_path, "r") as state_file: + with open(state_file_path) as state_file: return json.load(state_file) - except (OSError, IOError): + except OSError: raise ValueError("State file state.json unable to read ") def write_state_file(self, state_dict: dict): @@ -269,7 +268,7 @@ def get_input_table_definition_by_name(self, table_name: str) -> dao.TableDefini return dao.TableDefinition.build_from_manifest(manifest_path) - def get_input_tables_definitions(self, orphaned_manifests=False) -> List[dao.TableDefinition]: + def get_input_tables_definitions(self, orphaned_manifests=False) -> list[dao.TableDefinition]: """ Return dao.TableDefinition objects by scanning the `data/in/tables` folder. @@ -330,17 +329,17 @@ def _create_table_definition( storage_stage: str = "out", is_sliced: bool = False, destination: str = "", - primary_key: List[str] = None, - columns: List[str] = None, + primary_key: list[str] = None, + columns: list[str] = None, incremental: bool = None, table_metadata: dao.TableMetadata = None, enclosure: str = '"', delimiter: str = ",", delete_where: dict = None, write_always: bool = False, - schema: Union[OrderedDict[str, ColumnDefinition], list[str]] = None, - has_header: Optional[bool] = None, - description: Optional[str] = None, + schema: OrderedDict[str, ColumnDefinition] | list[str] = None, + has_header: bool | None = None, + description: str | None = None, ) -> dao.TableDefinition: """ Helper method for dao.TableDefinition creation along with the "manifest". @@ -403,12 +402,12 @@ def create_in_table_definition( name: str, is_sliced: bool = False, destination: str = "", - primary_key: List[str] = None, - columns: List[str] = None, + primary_key: list[str] = None, + columns: list[str] = None, incremental: bool = None, table_metadata: dao.TableMetadata = None, delete_where: str = None, - schema: List[ColumnDefinition] = None, + schema: list[ColumnDefinition] = None, ) -> dao.TableDefinition: """ Helper method for input dao.TableDefinition creation along with the "manifest". @@ -444,7 +443,7 @@ def create_out_table_definition( name: str, is_sliced: bool = False, destination: str = "", - primary_key: List[str] = None, + primary_key: list[str] = None, schema: TableDefinition.SCHEMA_TYPE = None, incremental: bool = None, table_metadata: dao.TableMetadata = None, @@ -452,8 +451,8 @@ def create_out_table_definition( delimiter: str = ",", delete_where: dict = None, write_always: bool = False, - has_header: Optional[bool] = None, - description: Optional[str] = None, + has_header: bool | None = None, + description: str | None = None, **kwargs, ) -> dao.TableDefinition: """ @@ -501,8 +500,8 @@ def create_out_table_definition( # # File processing def get_input_file_definitions_grouped_by_tag_group( - self, orphaned_manifests=False, only_latest_files=True, tags: List[str] = None, include_system_tags=False - ) -> Dict[str, List[dao.FileDefinition]]: + self, orphaned_manifests=False, only_latest_files=True, tags: list[str] = None, include_system_tags=False + ) -> dict[str, list[dao.FileDefinition]]: """ Convenience method returning lists of files in dictionary grouped by tag group. @@ -525,8 +524,8 @@ def get_input_file_definitions_grouped_by_tag_group( return self.__group_file_defs_by_tag_group(file_definitions, include_system_tags=include_system_tags) def get_input_file_definitions_grouped_by_name( - self, orphaned_manifests=False, only_latest_files=True, tags: List[str] = None - ) -> Dict[str, List[dao.FileDefinition]]: + self, orphaned_manifests=False, only_latest_files=True, tags: list[str] = None + ) -> dict[str, list[dao.FileDefinition]]: """ Convenience method returning lists of files in dictionary grouped by file name. @@ -543,8 +542,8 @@ def get_input_file_definitions_grouped_by_name( return self.__group_files_by_name(file_definitions) def __group_file_defs_by_tag_group( - self, file_definitions: List[dao.FileDefinition], include_system_tags=False - ) -> Dict[str, List[dao.FileDefinition]]: + self, file_definitions: list[dao.FileDefinition], include_system_tags=False + ) -> dict[str, list[dao.FileDefinition]]: files_per_tag: dict = {} for f in file_definitions: tag_group_v1 = f.tags if include_system_tags else f.user_tags @@ -556,8 +555,8 @@ def __group_file_defs_by_tag_group( return files_per_tag def _filter_files( - self, file_definitions: List[dao.FileDefinition], tags: List[str] = None, only_latest: bool = True - ) -> List[dao.FileDefinition]: + self, file_definitions: list[dao.FileDefinition], tags: list[str] = None, only_latest: bool = True + ) -> list[dao.FileDefinition]: filtered_files = file_definitions if only_latest: @@ -575,7 +574,7 @@ def _filter_files( return filtered_files - def __group_files_by_name(self, file_definitions: List[dao.FileDefinition]) -> Dict[str, List[dao.FileDefinition]]: + def __group_files_by_name(self, file_definitions: list[dao.FileDefinition]) -> dict[str, list[dao.FileDefinition]]: files_per_name: dict = {} for f in file_definitions: if not files_per_name.get(f.name): @@ -583,7 +582,7 @@ def __group_files_by_name(self, file_definitions: List[dao.FileDefinition]) -> D files_per_name[f.name].append(f) return files_per_name - def __filter_filedefs_by_latest(self, file_definitions: List[dao.FileDefinition]) -> List[dao.FileDefinition]: + def __filter_filedefs_by_latest(self, file_definitions: list[dao.FileDefinition]) -> list[dao.FileDefinition]: """ Get latest file (according to the timestamp) by each filename Args: @@ -607,8 +606,8 @@ def __filter_filedefs_by_latest(self, file_definitions: List[dao.FileDefinition] return filtered_files def get_input_files_definitions( - self, orphaned_manifests=False, only_latest_files=True, tags: Optional[List[str]] = None - ) -> List[dao.FileDefinition]: + self, orphaned_manifests=False, only_latest_files=True, tags: list[str] | None = None + ) -> list[dao.FileDefinition]: """ Return dao.FileDefinition objects by scanning the `data/in/files` folder. @@ -663,7 +662,7 @@ def _create_file_definition( self, name: str, storage_stage: str = "out", - tags: List[str] = None, + tags: list[str] = None, is_public: bool = False, is_permanent: bool = False, is_encrypted: bool = False, @@ -703,7 +702,7 @@ def _create_file_definition( def create_out_file_definition( self, name: str, - tags: List[str] = None, + tags: list[str] = None, is_public: bool = False, is_permanent: bool = False, is_encrypted: bool = False, @@ -947,7 +946,7 @@ def is_legacy_queue(self) -> bool: return is_legacy_queue def write_manifest( - self, io_definition: Union[dao.FileDefinition, dao.TableDefinition], legacy_manifest: Optional[bool] = None + self, io_definition: dao.FileDefinition | dao.TableDefinition, legacy_manifest: bool | None = None ): """ Write a table manifest from dao.IODefinition. Creates the appropriate manifest file in the proper location. @@ -1005,8 +1004,8 @@ def _expects_legacy_manifest(self) -> bool: def write_manifests( self, - io_definitions: List[Union[dao.FileDefinition, dao.TableDefinition]], - legacy_manifest: Optional[bool] = None, + io_definitions: list[dao.FileDefinition | dao.TableDefinition], + legacy_manifest: bool | None = None, ): """ Process all table definition objects and create appropriate manifest files. @@ -1050,7 +1049,7 @@ def write_filedef_manifest(self, file_definition: dao.FileDefinition): self.write_manifest(file_definition) @deprecated(version="1.3.0", reason="You should use write_manifests function") - def write_filedef_manifests(self, file_definitions: List[dao.FileDefinition]): + def write_filedef_manifests(self, file_definitions: list[dao.FileDefinition]): """ Process all table definition objects and create appropriate manifest files. Args: @@ -1094,7 +1093,7 @@ def write_tabledef_manifest(self, table_definition: dao.TableDefinition): self.write_manifest(table_definition, legacy_manifest=True) @deprecated(version="1.3.0", reason="You should use write_manifests function") - def write_tabledef_manifests(self, table_definitions: List[dao.TableDefinition]): + def write_tabledef_manifests(self, table_definitions: list[dao.TableDefinition]): """ Process all table definition objects and create appropriate manifest files. Args: @@ -1127,9 +1126,9 @@ def __init__(self, data_folder_path: str): self.data_dir = data_folder_path try: - with open(os.path.join(data_folder_path, "config.json"), "r") as config_file: + with open(os.path.join(data_folder_path, "config.json")) as config_file: self.config_data = json.load(config_file) - except (OSError, IOError): + except OSError: raise ValueError( f"Configuration file config.json not found, verify that the data directory is correct and that the " f"config file is present. Dir: " @@ -1164,7 +1163,7 @@ def oauth_credentials(self) -> dao.OauthCredentials: return credentials @property - def tables_input_mapping(self) -> List[dao.TableInputMapping]: + def tables_input_mapping(self) -> list[dao.TableInputMapping]: """ List of table [input mappings](https://developers.keboola.com/extend/common-interface/config-file/#tables) @@ -1189,7 +1188,7 @@ def tables_input_mapping(self) -> List[dao.TableInputMapping]: return tables @property - def tables_output_mapping(self) -> List[dao.TableOutputMapping]: + def tables_output_mapping(self) -> list[dao.TableOutputMapping]: """ List of table [output mappings](https://developers.keboola.com/extend/common-interface/config-file/#tables) @@ -1206,7 +1205,7 @@ def tables_output_mapping(self) -> List[dao.TableOutputMapping]: return tables @property - def files_input_mapping(self) -> List[dao.FileInputMapping]: + def files_input_mapping(self) -> list[dao.FileInputMapping]: """ List of file [input mappings](https://developers.keboola.com/extend/common-interface/config-file/#files) @@ -1223,7 +1222,7 @@ def files_input_mapping(self) -> List[dao.FileInputMapping]: return files @property - def files_output_mapping(self) -> List[dao.FileOutputMapping]: + def files_output_mapping(self) -> list[dao.FileOutputMapping]: """ List of file [output mappings](https://developers.keboola.com/extend/common-interface/config-file/#files) diff --git a/src/keboola/component/sync_actions.py b/src/keboola/component/sync_actions.py index c22f94f..ad8d507 100644 --- a/src/keboola/component/sync_actions.py +++ b/src/keboola/component/sync_actions.py @@ -8,7 +8,6 @@ from abc import ABC from dataclasses import dataclass from enum import Enum -from typing import List, Optional, Union @dataclass @@ -57,7 +56,7 @@ class SelectElement(SyncActionResult): """ value: str - label: Optional[str] = None + label: str | None = None def __post_init__(self): self.label = self.label or self.value @@ -65,7 +64,7 @@ def __post_init__(self): self.status = None -def process_sync_action_result(result: Union[None, List[dict], dict, SyncActionResult, List[SyncActionResult]]) -> str: +def process_sync_action_result(result: None | list[dict] | dict | SyncActionResult | list[SyncActionResult]) -> str: """ Converts Sync Action result into valid string (expected by Sync Action). Args: diff --git a/src/keboola/component/table_schema.py b/src/keboola/component/table_schema.py index 31e0ec3..43ec602 100644 --- a/src/keboola/component/table_schema.py +++ b/src/keboola/component/table_schema.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import Dict, List, Optional, Union from keboola.component.dao import SupportedDataTypes @@ -11,11 +10,11 @@ class FieldSchema: """ name: str - base_type: Optional[Union[SupportedDataTypes, str]] = None - description: Optional[str] = None + base_type: SupportedDataTypes | str | None = None + description: str | None = None nullable: bool = False - length: Optional[str] = None - default: Optional[str] = None + length: str | None = None + default: str | None = None @dataclass @@ -25,13 +24,13 @@ class TableSchema: """ name: str - fields: List[FieldSchema] - primary_keys: Optional[List[str]] = None - parent_tables: Optional[List[str]] = None - description: Optional[str] = None + fields: list[FieldSchema] + primary_keys: list[str] | None = None + parent_tables: list[str] | None = None + description: str | None = None @property - def field_names(self) -> List[str]: + def field_names(self) -> list[str]: return [column.name for column in self.fields] @property @@ -48,7 +47,7 @@ def add_field(self, new_field: FieldSchema) -> None: self.fields.append(new_field) -def init_table_schema_from_dict(json_table_schema: Dict) -> TableSchema: +def init_table_schema_from_dict(json_table_schema: dict) -> TableSchema: """ Function to initialize a Table Schema from a dictionary. Example of the json_table_schema structure: From 1eb473f71837fb40f29abd6c630855906becb211 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maty=C3=A1=C5=A1=20Jir=C3=A1t?= Date: Tue, 24 Feb 2026 17:50:54 +0100 Subject: [PATCH 6/7] Flake Removal --- .github/workflows/deploy.yml | 4 ---- .github/workflows/push.yml | 4 ---- 2 files changed, 8 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 91052b3..efbab67 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -33,10 +33,6 @@ jobs: run: | uv sync --all-groups --frozen - - name: Lint with flake8 ❄️ - run: | - uv run flake8 - - name: Test with pytest ✅ run: | uv run pytest tests diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 15462a4..fbdc716 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -31,10 +31,6 @@ jobs: run: | uv sync --all-groups --frozen - - name: Lint with flake8 ❄️ - run: | - uv run flake8 - - name: Test with pytest ✅ run: | uv run pytest tests From 4b11e63baf4cd61b3260e269cce50b5e98b26045 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maty=C3=A1=C5=A1=20Jir=C3=A1t?= Date: Wed, 25 Feb 2026 14:38:49 +0100 Subject: [PATCH 7/7] Remove pytz dependency in favor of stdlib datetime.timezone pytz was only used for utc.localize() in one place. Since the project requires Python >= 3.10, datetime.timezone.utc is a direct stdlib replacement. Also removes the overly restrictive <2021.0 version pin. Co-Authored-By: Claude Sonnet 4.6 --- pyproject.toml | 9 ++++----- src/keboola/component/interface.py | 5 ++--- uv.lock | 17 +++-------------- 3 files changed, 9 insertions(+), 22 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3370d5a..179ca3c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,10 @@ [project] name = "keboola.component" -version = "0.0.0" # replaced by the actual version based on the release tag in github actions +version = "1.9.0" # replaced by the actual version based on the release tag in github actions dependencies = [ "pygelf", - "pytz<2021.0", "deprecated", - "keboola.vcr", + "keboola.vcr>=0.1.1", ] requires-python = ">=3.10" @@ -40,7 +39,7 @@ Repository = "https://github.com/keboola/python-component" [dependency-groups] dev = [ "pytest>=8.3.5", - "ruff>=0.13.2", + "ruff>=0.15.2", "pdoc3", ] @@ -55,4 +54,4 @@ line-length = 120 target-version = "py310" [tool.ruff.lint] -extend-select = ["I", "UP"] \ No newline at end of file +extend-select = ["I", "UP"] diff --git a/src/keboola/component/interface.py b/src/keboola/component/interface.py index 00bfd27..6d83b4a 100644 --- a/src/keboola/component/interface.py +++ b/src/keboola/component/interface.py @@ -9,12 +9,11 @@ import sys import warnings from collections import OrderedDict -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from deprecated import deprecated from pygelf import GelfTcpHandler, GelfUdpHandler -from pytz import utc from . import dao from .dao import ColumnDefinition, TableDefinition @@ -595,7 +594,7 @@ def __filter_filedefs_by_latest(self, file_definitions: list[dao.FileDefinition] files_per_name = self.__group_files_by_name(file_definitions) for group in files_per_name: max_file = None - max_timestamp = utc.localize(datetime(1900, 5, 17)) + max_timestamp = datetime(1900, 5, 17, tzinfo=timezone.utc) for f in files_per_name[group]: creation_date = f.created # if date not present ignore and add anyway diff --git a/uv.lock b/uv.lock index 8746ca3..57d6fc8 100644 --- a/uv.lock +++ b/uv.lock @@ -58,13 +58,12 @@ wheels = [ [[package]] name = "keboola-component" -version = "0.0.0" +version = "1.9.0" source = { virtual = "." } dependencies = [ { name = "deprecated" }, { name = "keboola-vcr" }, { name = "pygelf" }, - { name = "pytz" }, ] [package.dev-dependencies] @@ -77,16 +76,15 @@ dev = [ [package.metadata] requires-dist = [ { name = "deprecated" }, - { name = "keboola-vcr" }, + { name = "keboola-vcr", specifier = ">=0.1.1" }, { name = "pygelf" }, - { name = "pytz", specifier = "<2021.0" }, ] [package.metadata.requires-dev] dev = [ { name = "pdoc3" }, { name = "pytest", specifier = ">=8.3.5" }, - { name = "ruff", specifier = ">=0.13.2" }, + { name = "ruff", specifier = ">=0.15.2" }, ] [[package]] @@ -287,15 +285,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] -[[package]] -name = "pytz" -version = "2020.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/44/404ec10dca553032900a65bcded8b8280cf7c64cc3b723324e2181bf93c9/pytz-2020.5.tar.gz", hash = "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5", size = 314194, upload-time = "2020-12-24T20:58:07.498Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/89/06/2c2d3034b4d6bf22f2a4ae546d16925898658a33b4400cfb7e2c1e2871a3/pytz-2020.5-py2.py3-none-any.whl", hash = "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", size = 510773, upload-time = "2020-12-24T20:58:04.098Z" }, -] - [[package]] name = "pyyaml" version = "6.0.3"