-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpyproject.toml
More file actions
69 lines (58 loc) · 1.47 KB
/
pyproject.toml
File metadata and controls
69 lines (58 loc) · 1.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
[build-system]
requires = ["setuptools>=61.0", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "ttd-databricks"
version = "0.2.1"
description = "Client implementation and helper functions for integrating with the TTD Databricks services."
readme = "README.md"
requires-python = ">=3.10"
license = {text = "Apache-2.0"}
classifiers = [
"License :: OSI Approved :: Apache Software License",
]
authors = [
{name = "The Trade Desk"}
]
dependencies = [
"ttd-data>=0.0.1",
]
[project.optional-dependencies]
dev = [
"pytest>=7.0.0",
"pytest-cov>=4.0.0",
"mypy>=1.0.0",
"ruff>=0.9.0",
"pyspark>=3.5", # for schema unit tests only; runtime provides its own on Databricks
]
[tool.setuptools.packages.find]
where = ["."]
include = ["ttd_databricks_python*"]
[tool.mypy]
python_version = "3.10"
strict = true
ignore_missing_imports = true
[tool.ruff]
target-version = "py310"
line-length = 120
[tool.ruff.lint]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
"I", # isort
"B", # flake8-bugbear (common bug patterns)
"UP", # pyupgrade (modernise syntax)
]
ignore = [
"UP045", # prefer Optional[X] over X | None
]
[tool.ruff.lint.isort]
known-first-party = ["ttd_databricks_python"]
[tool.pytest.ini_options]
markers = [
"spark: marks tests that require a running SparkSession (deselect with '-m \"not spark\"')",
]
[tool.ruff.format]
quote-style = "double"
indent-style = "space"