Skip to content

Ensure tests run across combinations of python and spark versions and update readme and example notebook #144

Ensure tests run across combinations of python and spark versions and update readme and example notebook

Ensure tests run across combinations of python and spark versions and update readme and example notebook #144

Workflow file for this run

name: Python CI
on:
push:
branches:
- main
pull_request:
jobs:
lint:
name: Lint & type check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]
- name: Ruff lint
run: ruff check ttd_databricks_python
- name: Ruff format check
run: ruff format --check ttd_databricks_python
- name: Mypy
run: mypy ttd_databricks_python
python-unit-tests:
name: Python unit tests (${{ matrix.python-version }})
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]
- name: Run pure unit tests
run: pytest tests/ -m "not spark"
spark-compatibility-tests:
name: Spark compatibility tests (Python ${{ matrix.python-version }}, PySpark ${{ matrix.pyspark-version }})
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- python-version: "3.10"
pyspark-version: "3.4.0"
- python-version: "3.10"
pyspark-version: "3.5.0"
- python-version: "3.11"
pyspark-version: "3.5.0"
- python-version: "3.12"
pyspark-version: "4.0.0"
- python-version: "3.13"
pyspark-version: "4.0.0"
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]
pip install "pyspark==${{ matrix.pyspark-version }}"
- name: Run Spark tests
run: pytest tests/ -m spark