diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 8ac04d24..925f29c4 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -117,6 +117,26 @@ jobs:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OLLAMA_BASE_URL: "URL_PLACEHOLDER"
+ test-distributed:
+ name: test distributed
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: true
+
+ steps:
+ - name: Clone repo
+ uses: actions/checkout@v4
+
+ - name: Set up uv
+ uses: astral-sh/setup-uv@v6
+ with:
+ enable-cache: true
+
+ - name: Run tests
+ run: uv run pytest -m distributed -n 1
+ env:
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
+
build-docs:
name: build docs
runs-on: ubuntu-latest
diff --git a/docs/examples/dask.ipynb b/docs/examples/dask.ipynb
new file mode 100644
index 00000000..87a6faf0
--- /dev/null
+++ b/docs/examples/dask.ipynb
@@ -0,0 +1,789 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "22900abf",
+ "metadata": {},
+ "source": [
+ "# Using Dask with TimeCopilot"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "978cbc41",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " See https://github.com/google-research/timesfm/blob/master/README.md for updated APIs.\n"
+ ]
+ }
+ ],
+ "source": [
+ "import nest_asyncio\n",
+ "\n",
+ "nest_asyncio.apply()\n",
+ "\n",
+ "from timecopilot import TimeCopilotForecaster\n",
+ "\n",
+ "import dask.dataframe as dd\n",
+ "import pandas as pd\n",
+ "\n",
+ "from timecopilot.models import SeasonalNaive\n",
+ "from timecopilot.models.foundation.chronos import Chronos"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c9caf312",
+ "metadata": {},
+ "source": [
+ "## Create the dataframe"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "2514ac97",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " unique_id | \n",
+ " ds | \n",
+ " y | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " Oktoberfest | \n",
+ " 2020-01-31 | \n",
+ " 25376 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Oktoberfest | \n",
+ " 2020-02-29 | \n",
+ " 28470 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Oktoberfest | \n",
+ " 2020-03-31 | \n",
+ " 23816 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Oktoberfest | \n",
+ " 2020-04-30 | \n",
+ " 46186 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Oktoberfest | \n",
+ " 2020-05-31 | \n",
+ " 31213 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " unique_id ds y\n",
+ "0 Oktoberfest 2020-01-31 25376\n",
+ "1 Oktoberfest 2020-02-29 28470\n",
+ "2 Oktoberfest 2020-03-31 23816\n",
+ "3 Oktoberfest 2020-04-30 46186\n",
+ "4 Oktoberfest 2020-05-31 31213"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "pd_df = pd.read_csv(\"https://timecopilot.s3.amazonaws.com/public/data/events_pageviews.csv\", parse_dates=['ds'])\n",
+ "df = dd.from_pandas(pd_df)\n",
+ "df.head()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b648995b",
+ "metadata": {},
+ "source": [
+ "## Create a TimeCopilotForecaster"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "3219bab9",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "tcf = TimeCopilotForecaster(\n",
+ " models=[\n",
+ " SeasonalNaive(),\n",
+ " Chronos(\"autogluon/chronos-2-small\")\n",
+ " ]\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b300ac5b",
+ "metadata": {},
+ "source": [
+ "## Create a Forecast"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d3547827",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-03-23 15:22:02,445\tINFO util.py:154 -- Missing packages: ['ipywidgets']. Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.\n",
+ "2026-03-23 15:22:02,514\tINFO util.py:154 -- Missing packages: ['ipywidgets']. Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-03-23 15:22:04,635 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a initialized by task ('shuffle-transfer-7f748466334296bfdf8578237049791a', 1) executed on worker tcp://127.0.0.1:51589\n",
+ "2026-03-23 15:22:04,693 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a deactivated due to stimulus 'task-finished-1774293724.693407'\n",
+ "2026-03-23 15:22:22,637 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a initialized by task ('shuffle-transfer-7f748466334296bfdf8578237049791a', 1) executed on worker tcp://127.0.0.1:51587\n",
+ "2026-03-23 15:22:22,660 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a deactivated due to stimulus 'task-finished-1774293742.660038'\n",
+ "2026-03-23 15:31:59,193 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a initialized by task ('shuffle-transfer-7f748466334296bfdf8578237049791a', 0) executed on worker tcp://127.0.0.1:51587\n",
+ "2026-03-23 15:31:59,214 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a deactivated due to stimulus 'task-finished-1774294319.214437'\n",
+ "2026-03-23 15:32:04,851 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a initialized by task ('shuffle-transfer-7f748466334296bfdf8578237049791a', 0) executed on worker tcp://127.0.0.1:51601\n",
+ "2026-03-23 15:32:04,874 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a deactivated due to stimulus 'task-finished-1774294324.874474'\n",
+ "2026-03-23 15:32:10,789 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a initialized by task ('shuffle-transfer-7f748466334296bfdf8578237049791a', 1) executed on worker tcp://127.0.0.1:51601\n",
+ "2026-03-23 15:32:10,811 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle 7f748466334296bfdf8578237049791a deactivated due to stimulus 'task-finished-1774294330.810997'\n"
+ ]
+ }
+ ],
+ "source": [
+ "result = tcf.forecast(\n",
+ " df=df,\n",
+ " h=12\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "3b4d9765",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "`torch_dtype` is deprecated! Use `dtype` instead!\n",
+ "`torch_dtype` is deprecated! Use `dtype` instead!\n",
+ "100%|██████████| 1/1 [00:00<00:00, 71.25it/s]\n",
+ "100%|██████████| 1/1 [00:00<00:00, 53.71it/s]\n",
+ "100%|██████████| 1/1 [00:00<00:00, 80.45it/s]\n",
+ "100%|██████████| 1/1 [00:00<00:00, 75.77it/s]\n",
+ "100%|██████████| 1/1 [00:00<00:00, 87.50it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "result_pd = result.compute()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "id": "43f3fc19",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " | \n",
+ " unique_id | \n",
+ " ds | \n",
+ " SeasonalNaive | \n",
+ " Chronos | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " Oktoberfest | \n",
+ " 2025-09-30 00:00:00 | \n",
+ " 181474.0 | \n",
+ " 205939.90625 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Oktoberfest | \n",
+ " 2025-10-31 00:00:00 | \n",
+ " 142094.0 | \n",
+ " 161174.0625 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Oktoberfest | \n",
+ " 2025-11-30 00:00:00 | \n",
+ " 36200.0 | \n",
+ " 35124.382812 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Oktoberfest | \n",
+ " 2025-12-31 00:00:00 | \n",
+ " 26387.0 | \n",
+ " 30676.568359 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Oktoberfest | \n",
+ " 2026-01-31 00:00:00 | \n",
+ " 27944.0 | \n",
+ " 31133.820312 | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Oktoberfest | \n",
+ " 2026-02-28 00:00:00 | \n",
+ " 25764.0 | \n",
+ " 30749.806641 | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " Oktoberfest | \n",
+ " 2026-03-31 00:00:00 | \n",
+ " 30708.0 | \n",
+ " 31413.792969 | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " Oktoberfest | \n",
+ " 2026-04-30 00:00:00 | \n",
+ " 31258.0 | \n",
+ " 31530.412109 | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Oktoberfest | \n",
+ " 2026-05-31 00:00:00 | \n",
+ " 36179.0 | \n",
+ " 32509.914062 | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Oktoberfest | \n",
+ " 2026-06-30 00:00:00 | \n",
+ " 33992.0 | \n",
+ " 32520.060547 | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " Oktoberfest | \n",
+ " 2026-07-31 00:00:00 | \n",
+ " 42317.0 | \n",
+ " 38071.007812 | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " Oktoberfest | \n",
+ " 2026-08-31 00:00:00 | \n",
+ " 74220.0 | \n",
+ " 72824.234375 | \n",
+ "
\n",
+ " \n",
+ " | 12 | \n",
+ " Pride Month | \n",
+ " 2025-09-30 00:00:00 | \n",
+ " 11521.0 | \n",
+ " 13015.404297 | \n",
+ "
\n",
+ " \n",
+ " | 13 | \n",
+ " Pride Month | \n",
+ " 2025-10-31 00:00:00 | \n",
+ " 11757.0 | \n",
+ " 12585.115234 | \n",
+ "
\n",
+ " \n",
+ " | 14 | \n",
+ " Pride Month | \n",
+ " 2025-11-30 00:00:00 | \n",
+ " 10126.0 | \n",
+ " 13316.660156 | \n",
+ "
\n",
+ " \n",
+ " | 15 | \n",
+ " Pride Month | \n",
+ " 2025-12-31 00:00:00 | \n",
+ " 8381.0 | \n",
+ " 12959.761719 | \n",
+ "
\n",
+ " \n",
+ " | 16 | \n",
+ " Pride Month | \n",
+ " 2026-01-31 00:00:00 | \n",
+ " 12556.0 | \n",
+ " 14463.102539 | \n",
+ "
\n",
+ " \n",
+ " | 17 | \n",
+ " Pride Month | \n",
+ " 2026-02-28 00:00:00 | \n",
+ " 19852.0 | \n",
+ " 13103.345703 | \n",
+ "
\n",
+ " \n",
+ " | 18 | \n",
+ " Pride Month | \n",
+ " 2026-03-31 00:00:00 | \n",
+ " 15691.0 | \n",
+ " 12801.917969 | \n",
+ "
\n",
+ " \n",
+ " | 19 | \n",
+ " Pride Month | \n",
+ " 2026-04-30 00:00:00 | \n",
+ " 16590.0 | \n",
+ " 14078.150391 | \n",
+ "
\n",
+ " \n",
+ " | 20 | \n",
+ " Pride Month | \n",
+ " 2026-05-31 00:00:00 | \n",
+ " 45511.0 | \n",
+ " 19983.867188 | \n",
+ "
\n",
+ " \n",
+ " | 21 | \n",
+ " Pride Month | \n",
+ " 2026-06-30 00:00:00 | \n",
+ " 242600.0 | \n",
+ " 70240.101562 | \n",
+ "
\n",
+ " \n",
+ " | 22 | \n",
+ " Pride Month | \n",
+ " 2026-07-31 00:00:00 | \n",
+ " 21471.0 | \n",
+ " 15968.902344 | \n",
+ "
\n",
+ " \n",
+ " | 23 | \n",
+ " Pride Month | \n",
+ " 2026-08-31 00:00:00 | \n",
+ " 13936.0 | \n",
+ " 10876.75 | \n",
+ "
\n",
+ " \n",
+ " | 0 | \n",
+ " Cyber Monday | \n",
+ " 2025-09-30 00:00:00 | \n",
+ " 8519.0 | \n",
+ " 7327.383789 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Cyber Monday | \n",
+ " 2025-10-31 00:00:00 | \n",
+ " 14608.0 | \n",
+ " 10634.869141 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Cyber Monday | \n",
+ " 2025-11-30 00:00:00 | \n",
+ " 62796.0 | \n",
+ " 96634.460938 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Cyber Monday | \n",
+ " 2025-12-31 00:00:00 | \n",
+ " 50174.0 | \n",
+ " 27630.857422 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Cyber Monday | \n",
+ " 2026-01-31 00:00:00 | \n",
+ " 6861.0 | \n",
+ " 6786.728516 | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Cyber Monday | \n",
+ " 2026-02-28 00:00:00 | \n",
+ " 5517.0 | \n",
+ " 6774.277344 | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " Cyber Monday | \n",
+ " 2026-03-31 00:00:00 | \n",
+ " 5748.0 | \n",
+ " 6714.381836 | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " Cyber Monday | \n",
+ " 2026-04-30 00:00:00 | \n",
+ " 6329.0 | \n",
+ " 6664.470703 | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Cyber Monday | \n",
+ " 2026-05-31 00:00:00 | \n",
+ " 5379.0 | \n",
+ " 6736.751953 | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Cyber Monday | \n",
+ " 2026-06-30 00:00:00 | \n",
+ " 5032.0 | \n",
+ " 6536.330078 | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " Cyber Monday | \n",
+ " 2026-07-31 00:00:00 | \n",
+ " 4618.0 | \n",
+ " 6528.674805 | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " Cyber Monday | \n",
+ " 2026-08-31 00:00:00 | \n",
+ " 5582.0 | \n",
+ " 6723.951172 | \n",
+ "
\n",
+ " \n",
+ " | 0 | \n",
+ " Black Friday | \n",
+ " 2025-09-30 00:00:00 | \n",
+ " 2607.0 | \n",
+ " 1740.371094 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Black Friday | \n",
+ " 2025-10-31 00:00:00 | \n",
+ " 2470.0 | \n",
+ " 2226.007568 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Black Friday | \n",
+ " 2025-11-30 00:00:00 | \n",
+ " 11058.0 | \n",
+ " 12542.421875 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Black Friday | \n",
+ " 2025-12-31 00:00:00 | \n",
+ " 3548.0 | \n",
+ " 3203.52417 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Black Friday | \n",
+ " 2026-01-31 00:00:00 | \n",
+ " 1724.0 | \n",
+ " 1596.014893 | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Black Friday | \n",
+ " 2026-02-28 00:00:00 | \n",
+ " 1730.0 | \n",
+ " 1594.216553 | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " Black Friday | \n",
+ " 2026-03-31 00:00:00 | \n",
+ " 1874.0 | \n",
+ " 1539.523438 | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " Black Friday | \n",
+ " 2026-04-30 00:00:00 | \n",
+ " 2311.0 | \n",
+ " 1530.233643 | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Black Friday | \n",
+ " 2026-05-31 00:00:00 | \n",
+ " 1332.0 | \n",
+ " 1492.126953 | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Black Friday | \n",
+ " 2026-06-30 00:00:00 | \n",
+ " 1215.0 | \n",
+ " 1452.619141 | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " Black Friday | \n",
+ " 2026-07-31 00:00:00 | \n",
+ " 1108.0 | \n",
+ " 1441.932861 | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " Black Friday | \n",
+ " 2026-08-31 00:00:00 | \n",
+ " 1690.0 | \n",
+ " 1470.735107 | \n",
+ "
\n",
+ " \n",
+ " | 0 | \n",
+ " Halloween | \n",
+ " 2025-09-30 00:00:00 | \n",
+ " 224365.0 | \n",
+ " 169542.75 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Halloween | \n",
+ " 2025-10-31 00:00:00 | \n",
+ " 1338038.0 | \n",
+ " 1695037.25 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Halloween | \n",
+ " 2025-11-30 00:00:00 | \n",
+ " 436338.0 | \n",
+ " 495548.25 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Halloween | \n",
+ " 2025-12-31 00:00:00 | \n",
+ " 93458.0 | \n",
+ " 90944.203125 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Halloween | \n",
+ " 2026-01-31 00:00:00 | \n",
+ " 75272.0 | \n",
+ " 82160.71875 | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Halloween | \n",
+ " 2026-02-28 00:00:00 | \n",
+ " 67539.0 | \n",
+ " 87817.421875 | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " Halloween | \n",
+ " 2026-03-31 00:00:00 | \n",
+ " 70087.0 | \n",
+ " 87218.734375 | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " Halloween | \n",
+ " 2026-04-30 00:00:00 | \n",
+ " 71045.0 | \n",
+ " 84555.140625 | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Halloween | \n",
+ " 2026-05-31 00:00:00 | \n",
+ " 68043.0 | \n",
+ " 85918.15625 | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Halloween | \n",
+ " 2026-06-30 00:00:00 | \n",
+ " 69383.0 | \n",
+ " 89361.453125 | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " Halloween | \n",
+ " 2026-07-31 00:00:00 | \n",
+ " 67284.0 | \n",
+ " 80965.171875 | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " Halloween | \n",
+ " 2026-08-31 00:00:00 | \n",
+ " 89136.0 | \n",
+ " 86530.6875 | \n",
+ "
\n",
+ " \n",
+ " | 0 | \n",
+ " Thanksgiving | \n",
+ " 2025-09-30 00:00:00 | \n",
+ " 91935.0 | \n",
+ " 78129.5 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Thanksgiving | \n",
+ " 2025-10-31 00:00:00 | \n",
+ " 225173.0 | \n",
+ " 233912.59375 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Thanksgiving | \n",
+ " 2025-11-30 00:00:00 | \n",
+ " 1033092.0 | \n",
+ " 1220213.875 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Thanksgiving | \n",
+ " 2025-12-31 00:00:00 | \n",
+ " 169452.0 | \n",
+ " 171734.734375 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Thanksgiving | \n",
+ " 2026-01-31 00:00:00 | \n",
+ " 58590.0 | \n",
+ " 64222.875 | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Thanksgiving | \n",
+ " 2026-02-28 00:00:00 | \n",
+ " 43547.0 | \n",
+ " 60092.242188 | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " Thanksgiving | \n",
+ " 2026-03-31 00:00:00 | \n",
+ " 58384.0 | \n",
+ " 59032.820312 | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " Thanksgiving | \n",
+ " 2026-04-30 00:00:00 | \n",
+ " 47655.0 | \n",
+ " 57731.476562 | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Thanksgiving | \n",
+ " 2026-05-31 00:00:00 | \n",
+ " 48667.0 | \n",
+ " 57141.867188 | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Thanksgiving | \n",
+ " 2026-06-30 00:00:00 | \n",
+ " 56856.0 | \n",
+ " 56497.53125 | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " Thanksgiving | \n",
+ " 2026-07-31 00:00:00 | \n",
+ " 38935.0 | \n",
+ " 57184.0 | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " Thanksgiving | \n",
+ " 2026-08-31 00:00:00 | \n",
+ " 50620.0 | \n",
+ " 57194.375 | \n",
+ "
\n",
+ " \n",
+ "
"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from IPython.display import HTML\n",
+ "display(HTML(result_pd.to_html()))"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "timecopilot",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.14"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/examples/pandas_baseline.ipynb b/docs/examples/pandas_baseline.ipynb
new file mode 100644
index 00000000..6488d6d3
--- /dev/null
+++ b/docs/examples/pandas_baseline.ipynb
@@ -0,0 +1,757 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "edcef84f",
+ "metadata": {},
+ "source": [
+ "# Baseline Pandas Example\n",
+ "\n",
+ "This is a reference point for using a TimeCopilotForecaster with pandas rather than with the alternative distributed dataframes like pyspark, ray, and dask."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "288c3467",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " See https://github.com/google-research/timesfm/blob/master/README.md for updated APIs.\n"
+ ]
+ }
+ ],
+ "source": [
+ "import nest_asyncio\n",
+ "\n",
+ "nest_asyncio.apply()\n",
+ "\n",
+ "from timecopilot import TimeCopilotForecaster\n",
+ "\n",
+ "from timecopilot.models import SeasonalNaive\n",
+ "from timecopilot.models.foundation.chronos import Chronos\n",
+ "\n",
+ "import pandas as pd"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "88c8c321",
+ "metadata": {},
+ "source": [
+ "## Create the dataframe"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "327db160",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " unique_id | \n",
+ " ds | \n",
+ " y | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " Oktoberfest | \n",
+ " 2020-01-31 | \n",
+ " 25376 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Oktoberfest | \n",
+ " 2020-02-29 | \n",
+ " 28470 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Oktoberfest | \n",
+ " 2020-03-31 | \n",
+ " 23816 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Oktoberfest | \n",
+ " 2020-04-30 | \n",
+ " 46186 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Oktoberfest | \n",
+ " 2020-05-31 | \n",
+ " 31213 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " unique_id ds y\n",
+ "0 Oktoberfest 2020-01-31 25376\n",
+ "1 Oktoberfest 2020-02-29 28470\n",
+ "2 Oktoberfest 2020-03-31 23816\n",
+ "3 Oktoberfest 2020-04-30 46186\n",
+ "4 Oktoberfest 2020-05-31 31213"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "df = pd.read_csv(\"https://timecopilot.s3.amazonaws.com/public/data/events_pageviews.csv\", parse_dates=['ds'])\n",
+ "display(df.head())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "31fb8b36",
+ "metadata": {},
+ "source": [
+ "## Create the TimeCopilotForecaster"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "51a5693e",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "tcf = TimeCopilotForecaster(\n",
+ " models=[\n",
+ " SeasonalNaive(),\n",
+ " Chronos()\n",
+ " ]\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a374e8f7",
+ "metadata": {},
+ "source": [
+ "## Generate a forecast"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "afdfdc58",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "`torch_dtype` is deprecated! Use `dtype` instead!\n",
+ "100%|██████████| 1/1 [00:07<00:00, 7.92s/it]\n"
+ ]
+ }
+ ],
+ "source": [
+ "result = tcf.forecast(\n",
+ " df=df,\n",
+ " h=12\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6ea5fe9c",
+ "metadata": {},
+ "source": [
+ "## Display the results"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "b82c018e",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " | \n",
+ " unique_id | \n",
+ " ds | \n",
+ " SeasonalNaive | \n",
+ " Chronos | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " Black Friday | \n",
+ " 2025-09-30 | \n",
+ " 2607.0 | \n",
+ " 1.910466e+03 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Black Friday | \n",
+ " 2025-10-31 | \n",
+ " 2470.0 | \n",
+ " 2.187718e+03 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Black Friday | \n",
+ " 2025-11-30 | \n",
+ " 11058.0 | \n",
+ " 2.151357e+03 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Black Friday | \n",
+ " 2025-12-31 | \n",
+ " 3548.0 | \n",
+ " 1.902890e+03 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Black Friday | \n",
+ " 2026-01-31 | \n",
+ " 1724.0 | \n",
+ " 1.692300e+03 | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Black Friday | \n",
+ " 2026-02-28 | \n",
+ " 1730.0 | \n",
+ " 1.736236e+03 | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " Black Friday | \n",
+ " 2026-03-31 | \n",
+ " 1874.0 | \n",
+ " 1.681695e+03 | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " Black Friday | \n",
+ " 2026-04-30 | \n",
+ " 2311.0 | \n",
+ " 1.640788e+03 | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Black Friday | \n",
+ " 2026-05-31 | \n",
+ " 1332.0 | \n",
+ " 1.634728e+03 | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Black Friday | \n",
+ " 2026-06-30 | \n",
+ " 1215.0 | \n",
+ " 1.743811e+03 | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " Black Friday | \n",
+ " 2026-07-31 | \n",
+ " 1108.0 | \n",
+ " 1.624123e+03 | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " Black Friday | \n",
+ " 2026-08-31 | \n",
+ " 1690.0 | \n",
+ " 1.636243e+03 | \n",
+ "
\n",
+ " \n",
+ " | 12 | \n",
+ " Cyber Monday | \n",
+ " 2025-09-30 | \n",
+ " 8519.0 | \n",
+ " 8.100767e+03 | \n",
+ "
\n",
+ " \n",
+ " | 13 | \n",
+ " Cyber Monday | \n",
+ " 2025-10-31 | \n",
+ " 14608.0 | \n",
+ " 1.566477e+04 | \n",
+ "
\n",
+ " \n",
+ " | 14 | \n",
+ " Cyber Monday | \n",
+ " 2025-11-30 | \n",
+ " 62796.0 | \n",
+ " 9.580252e+04 | \n",
+ "
\n",
+ " \n",
+ " | 15 | \n",
+ " Cyber Monday | \n",
+ " 2025-12-31 | \n",
+ " 50174.0 | \n",
+ " 1.515886e+04 | \n",
+ "
\n",
+ " \n",
+ " | 16 | \n",
+ " Cyber Monday | \n",
+ " 2026-01-31 | \n",
+ " 6861.0 | \n",
+ " 7.058093e+03 | \n",
+ "
\n",
+ " \n",
+ " | 17 | \n",
+ " Cyber Monday | \n",
+ " 2026-02-28 | \n",
+ " 5517.0 | \n",
+ " 6.114135e+03 | \n",
+ "
\n",
+ " \n",
+ " | 18 | \n",
+ " Cyber Monday | \n",
+ " 2026-03-31 | \n",
+ " 5748.0 | \n",
+ " 6.027760e+03 | \n",
+ "
\n",
+ " \n",
+ " | 19 | \n",
+ " Cyber Monday | \n",
+ " 2026-04-30 | \n",
+ " 6329.0 | \n",
+ " 6.095626e+03 | \n",
+ "
\n",
+ " \n",
+ " | 20 | \n",
+ " Cyber Monday | \n",
+ " 2026-05-31 | \n",
+ " 5379.0 | \n",
+ " 6.330073e+03 | \n",
+ "
\n",
+ " \n",
+ " | 21 | \n",
+ " Cyber Monday | \n",
+ " 2026-06-30 | \n",
+ " 5032.0 | \n",
+ " 6.441127e+03 | \n",
+ "
\n",
+ " \n",
+ " | 22 | \n",
+ " Cyber Monday | \n",
+ " 2026-07-31 | \n",
+ " 4618.0 | \n",
+ " 6.490484e+03 | \n",
+ "
\n",
+ " \n",
+ " | 23 | \n",
+ " Cyber Monday | \n",
+ " 2026-08-31 | \n",
+ " 5582.0 | \n",
+ " 7.644211e+03 | \n",
+ "
\n",
+ " \n",
+ " | 24 | \n",
+ " Halloween | \n",
+ " 2025-09-30 | \n",
+ " 224365.0 | \n",
+ " 2.702025e+05 | \n",
+ "
\n",
+ " \n",
+ " | 25 | \n",
+ " Halloween | \n",
+ " 2025-10-31 | \n",
+ " 1338038.0 | \n",
+ " 1.894165e+06 | \n",
+ "
\n",
+ " \n",
+ " | 26 | \n",
+ " Halloween | \n",
+ " 2025-11-30 | \n",
+ " 436338.0 | \n",
+ " 5.341704e+05 | \n",
+ "
\n",
+ " \n",
+ " | 27 | \n",
+ " Halloween | \n",
+ " 2025-12-31 | \n",
+ " 93458.0 | \n",
+ " 9.499884e+04 | \n",
+ "
\n",
+ " \n",
+ " | 28 | \n",
+ " Halloween | \n",
+ " 2026-01-31 | \n",
+ " 75272.0 | \n",
+ " 8.200123e+04 | \n",
+ "
\n",
+ " \n",
+ " | 29 | \n",
+ " Halloween | \n",
+ " 2026-02-28 | \n",
+ " 67539.0 | \n",
+ " 7.513257e+04 | \n",
+ "
\n",
+ " \n",
+ " | 30 | \n",
+ " Halloween | \n",
+ " 2026-03-31 | \n",
+ " 70087.0 | \n",
+ " 7.788003e+04 | \n",
+ "
\n",
+ " \n",
+ " | 31 | \n",
+ " Halloween | \n",
+ " 2026-04-30 | \n",
+ " 71045.0 | \n",
+ " 7.502690e+04 | \n",
+ "
\n",
+ " \n",
+ " | 32 | \n",
+ " Halloween | \n",
+ " 2026-05-31 | \n",
+ " 68043.0 | \n",
+ " 7.153973e+04 | \n",
+ "
\n",
+ " \n",
+ " | 33 | \n",
+ " Halloween | \n",
+ " 2026-06-30 | \n",
+ " 69383.0 | \n",
+ " 6.551645e+04 | \n",
+ "
\n",
+ " \n",
+ " | 34 | \n",
+ " Halloween | \n",
+ " 2026-07-31 | \n",
+ " 67284.0 | \n",
+ " 8.200123e+04 | \n",
+ "
\n",
+ " \n",
+ " | 35 | \n",
+ " Halloween | \n",
+ " 2026-08-31 | \n",
+ " 89136.0 | \n",
+ " 1.106382e+05 | \n",
+ "
\n",
+ " \n",
+ " | 36 | \n",
+ " Oktoberfest | \n",
+ " 2025-09-30 | \n",
+ " 181474.0 | \n",
+ " 2.297598e+05 | \n",
+ "
\n",
+ " \n",
+ " | 37 | \n",
+ " Oktoberfest | \n",
+ " 2025-10-31 | \n",
+ " 142094.0 | \n",
+ " 1.654765e+05 | \n",
+ "
\n",
+ " \n",
+ " | 38 | \n",
+ " Oktoberfest | \n",
+ " 2025-11-30 | \n",
+ " 36200.0 | \n",
+ " 4.120208e+04 | \n",
+ "
\n",
+ " \n",
+ " | 39 | \n",
+ " Oktoberfest | \n",
+ " 2025-12-31 | \n",
+ " 26387.0 | \n",
+ " 3.109897e+04 | \n",
+ "
\n",
+ " \n",
+ " | 40 | \n",
+ " Oktoberfest | \n",
+ " 2026-01-31 | \n",
+ " 27944.0 | \n",
+ " 3.146341e+04 | \n",
+ "
\n",
+ " \n",
+ " | 41 | \n",
+ " Oktoberfest | \n",
+ " 2026-02-28 | \n",
+ " 25764.0 | \n",
+ " 2.972219e+04 | \n",
+ "
\n",
+ " \n",
+ " | 42 | \n",
+ " Oktoberfest | \n",
+ " 2026-03-31 | \n",
+ " 30708.0 | \n",
+ " 3.077502e+04 | \n",
+ "
\n",
+ " \n",
+ " | 43 | \n",
+ " Oktoberfest | \n",
+ " 2026-04-30 | \n",
+ " 31258.0 | \n",
+ " 3.389301e+04 | \n",
+ "
\n",
+ " \n",
+ " | 44 | \n",
+ " Oktoberfest | \n",
+ " 2026-05-31 | \n",
+ " 36179.0 | \n",
+ " 3.559374e+04 | \n",
+ "
\n",
+ " \n",
+ " | 45 | \n",
+ " Oktoberfest | \n",
+ " 2026-06-30 | \n",
+ " 33992.0 | \n",
+ " 3.385252e+04 | \n",
+ "
\n",
+ " \n",
+ " | 46 | \n",
+ " Oktoberfest | \n",
+ " 2026-07-31 | \n",
+ " 42317.0 | \n",
+ " 3.852952e+04 | \n",
+ "
\n",
+ " \n",
+ " | 47 | \n",
+ " Oktoberfest | \n",
+ " 2026-08-31 | \n",
+ " 74220.0 | \n",
+ " 6.201571e+04 | \n",
+ "
\n",
+ " \n",
+ " | 48 | \n",
+ " Pride Month | \n",
+ " 2025-09-30 | \n",
+ " 11521.0 | \n",
+ " 1.161106e+04 | \n",
+ "
\n",
+ " \n",
+ " | 49 | \n",
+ " Pride Month | \n",
+ " 2025-10-31 | \n",
+ " 11757.0 | \n",
+ " 1.181722e+04 | \n",
+ "
\n",
+ " \n",
+ " | 50 | \n",
+ " Pride Month | \n",
+ " 2025-11-30 | \n",
+ " 10126.0 | \n",
+ " 1.046247e+04 | \n",
+ "
\n",
+ " \n",
+ " | 51 | \n",
+ " Pride Month | \n",
+ " 2025-12-31 | \n",
+ " 8381.0 | \n",
+ " 1.034467e+04 | \n",
+ "
\n",
+ " \n",
+ " | 52 | \n",
+ " Pride Month | \n",
+ " 2026-01-31 | \n",
+ " 12556.0 | \n",
+ " 1.781050e+04 | \n",
+ "
\n",
+ " \n",
+ " | 53 | \n",
+ " Pride Month | \n",
+ " 2026-02-28 | \n",
+ " 19852.0 | \n",
+ " 1.810501e+04 | \n",
+ "
\n",
+ " \n",
+ " | 54 | \n",
+ " Pride Month | \n",
+ " 2026-03-31 | \n",
+ " 15691.0 | \n",
+ " 1.757489e+04 | \n",
+ "
\n",
+ " \n",
+ " | 55 | \n",
+ " Pride Month | \n",
+ " 2026-04-30 | \n",
+ " 16590.0 | \n",
+ " 1.966591e+04 | \n",
+ "
\n",
+ " \n",
+ " | 56 | \n",
+ " Pride Month | \n",
+ " 2026-05-31 | \n",
+ " 45511.0 | \n",
+ " 4.811559e+04 | \n",
+ "
\n",
+ " \n",
+ " | 57 | \n",
+ " Pride Month | \n",
+ " 2026-06-30 | \n",
+ " 242600.0 | \n",
+ " 2.083732e+05 | \n",
+ "
\n",
+ " \n",
+ " | 58 | \n",
+ " Pride Month | \n",
+ " 2026-07-31 | \n",
+ " 21471.0 | \n",
+ " 4.162900e+04 | \n",
+ "
\n",
+ " \n",
+ " | 59 | \n",
+ " Pride Month | \n",
+ " 2026-08-31 | \n",
+ " 13936.0 | \n",
+ " 1.243569e+04 | \n",
+ "
\n",
+ " \n",
+ " | 60 | \n",
+ " Thanksgiving | \n",
+ " 2025-09-30 | \n",
+ " 91935.0 | \n",
+ " 9.539117e+04 | \n",
+ "
\n",
+ " \n",
+ " | 61 | \n",
+ " Thanksgiving | \n",
+ " 2025-10-31 | \n",
+ " 225173.0 | \n",
+ " 2.561442e+05 | \n",
+ "
\n",
+ " \n",
+ " | 62 | \n",
+ " Thanksgiving | \n",
+ " 2025-11-30 | \n",
+ " 1033092.0 | \n",
+ " 1.193636e+06 | \n",
+ "
\n",
+ " \n",
+ " | 63 | \n",
+ " Thanksgiving | \n",
+ " 2025-12-31 | \n",
+ " 169452.0 | \n",
+ " 1.859904e+05 | \n",
+ "
\n",
+ " \n",
+ " | 64 | \n",
+ " Thanksgiving | \n",
+ " 2026-01-31 | \n",
+ " 58590.0 | \n",
+ " 7.111211e+04 | \n",
+ "
\n",
+ " \n",
+ " | 65 | \n",
+ " Thanksgiving | \n",
+ " 2026-02-28 | \n",
+ " 43547.0 | \n",
+ " 5.520293e+04 | \n",
+ "
\n",
+ " \n",
+ " | 66 | \n",
+ " Thanksgiving | \n",
+ " 2026-03-31 | \n",
+ " 58384.0 | \n",
+ " 5.303060e+04 | \n",
+ "
\n",
+ " \n",
+ " | 67 | \n",
+ " Thanksgiving | \n",
+ " 2026-04-30 | \n",
+ " 47655.0 | \n",
+ " 4.817478e+04 | \n",
+ "
\n",
+ " \n",
+ " | 68 | \n",
+ " Thanksgiving | \n",
+ " 2026-05-31 | \n",
+ " 48667.0 | \n",
+ " 4.472460e+04 | \n",
+ "
\n",
+ " \n",
+ " | 69 | \n",
+ " Thanksgiving | \n",
+ " 2026-06-30 | \n",
+ " 56856.0 | \n",
+ " 4.306340e+04 | \n",
+ "
\n",
+ " \n",
+ " | 70 | \n",
+ " Thanksgiving | \n",
+ " 2026-07-31 | \n",
+ " 38935.0 | \n",
+ " 4.613023e+04 | \n",
+ "
\n",
+ " \n",
+ " | 71 | \n",
+ " Thanksgiving | \n",
+ " 2026-08-31 | \n",
+ " 50620.0 | \n",
+ " 5.386120e+04 | \n",
+ "
\n",
+ " \n",
+ "
"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from IPython.display import HTML\n",
+ "\n",
+ "display(HTML(result.to_html()))"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "timecopilot",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.14"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/examples/pyspark.ipynb b/docs/examples/pyspark.ipynb
new file mode 100644
index 00000000..55e65f4a
--- /dev/null
+++ b/docs/examples/pyspark.ipynb
@@ -0,0 +1,354 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "b68555af",
+ "metadata": {},
+ "source": [
+ "# Using PySpark with TimeCopilot\n",
+ "\n",
+ "When using PySpark Java needs to be installed with a `JAVA_HOME` environment variable set. The environment variable may have been set when installing Java.\n",
+ "\n",
+ "The required Java version may vary depending on the PySpark version in use.\n",
+ "For this example PySpark version `4.0.2` was used with OpenJDK version `21.0.10` used as the Java version."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "0e729daf",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " See https://github.com/google-research/timesfm/blob/master/README.md for updated APIs.\n"
+ ]
+ }
+ ],
+ "source": [
+ "import nest_asyncio\n",
+ "\n",
+ "nest_asyncio.apply()\n",
+ "\n",
+ "from timecopilot import TimeCopilotForecaster\n",
+ "\n",
+ "from pyspark.sql import SparkSession\n",
+ "import pandas as pd\n",
+ "\n",
+ "from timecopilot.models import SeasonalNaive\n",
+ "from timecopilot.models.foundation.chronos import Chronos"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e41b85bd",
+ "metadata": {},
+ "source": [
+ "checking the `JAVA_HOME` environment variable.\n",
+ "\n",
+ "the path below for a version of openjdk installed via homebrew on a mac"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "c4289472",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "/opt/homebrew/opt/openjdk@21/libexec/openjdk.jdk/Contents/Home\n"
+ ]
+ }
+ ],
+ "source": [
+ "import os\n",
+ "print(os.environ['JAVA_HOME'])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b09469a4",
+ "metadata": {},
+ "source": [
+ "## Start the Spark session"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "10307391",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "WARNING: Using incubator modules: jdk.incubator.vector\n",
+ "Using Spark's default log4j profile: org/apache/spark/log4j2-defaults.properties\n",
+ "Setting default log level to \"WARN\".\n",
+ "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n",
+ "26/03/23 15:41:56 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "spark = SparkSession.builder.getOrCreate()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c2dfb6ac",
+ "metadata": {},
+ "source": [
+ "## Create the dataframe"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "77e9bb47",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "DataFrame[unique_id: string, ds: timestamp, y: bigint]\n",
+ "+-----------+-------------------+-----+\n",
+ "| unique_id| ds| y|\n",
+ "+-----------+-------------------+-----+\n",
+ "|Oktoberfest|2020-01-31 00:00:00|25376|\n",
+ "|Oktoberfest|2020-02-29 00:00:00|28470|\n",
+ "|Oktoberfest|2020-03-31 00:00:00|23816|\n",
+ "|Oktoberfest|2020-04-30 00:00:00|46186|\n",
+ "|Oktoberfest|2020-05-31 00:00:00|31213|\n",
+ "+-----------+-------------------+-----+\n",
+ "only showing top 5 rows\n"
+ ]
+ }
+ ],
+ "source": [
+ "df = pd.read_csv(\"https://timecopilot.s3.amazonaws.com/public/data/events_pageviews.csv\", parse_dates=[\"ds\"])\n",
+ "s_df = spark.createDataFrame(df)\n",
+ "print(s_df)\n",
+ "s_df.show(n=5)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "554a7732",
+ "metadata": {},
+ "source": [
+ "## Create a TimeCopilotForecaster"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "989502e7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "tcf = TimeCopilotForecaster(\n",
+ " models=[\n",
+ " SeasonalNaive(),\n",
+ " Chronos(repo_id=\"autogluon/chronos-2-small\")\n",
+ " ]\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4f0f026e",
+ "metadata": {},
+ "source": [
+ "## Create a forecast"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "dbb483e1",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-03-23 15:42:30,213\tINFO util.py:154 -- Missing packages: ['ipywidgets']. Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.\n",
+ "2026-03-23 15:42:30,268\tINFO util.py:154 -- Missing packages: ['ipywidgets']. Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.\n"
+ ]
+ }
+ ],
+ "source": [
+ "result = tcf.forecast(\n",
+ " df=s_df,\n",
+ " h=12\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "efdc0afc",
+ "metadata": {},
+ "source": [
+ "## View forecast results"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "id": "da664a68",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "DataFrame[unique_id: string, ds: timestamp, SeasonalNaive: double, Chronos: double]\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ " See https://github.com/google-research/timesfm/blob/master/README.md for updated APIs.\n",
+ "`torch_dtype` is deprecated! Use `dtype` instead!\n",
+ "`torch_dtype` is deprecated! Use `dtype` instead!\n",
+ " 0%| | 0/1 [00:00, ?it/s]"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "+------------+-------------------+-------------+-----------------+\n",
+ "| unique_id| ds|SeasonalNaive| Chronos|\n",
+ "+------------+-------------------+-------------+-----------------+\n",
+ "|Black Friday|2025-09-30 00:00:00| 2607.0| 1740.37158203125|\n",
+ "|Black Friday|2025-10-31 00:00:00| 2470.0| 2226.00732421875|\n",
+ "|Black Friday|2025-11-30 00:00:00| 11058.0| 12542.42578125|\n",
+ "|Black Friday|2025-12-31 00:00:00| 3548.0| 3203.5224609375|\n",
+ "|Black Friday|2026-01-31 00:00:00| 1724.0| 1596.01318359375|\n",
+ "|Black Friday|2026-02-28 00:00:00| 1730.0|1594.218017578125|\n",
+ "|Black Friday|2026-03-31 00:00:00| 1874.0|1539.522705078125|\n",
+ "|Black Friday|2026-04-30 00:00:00| 2311.0|1530.237548828125|\n",
+ "|Black Friday|2026-05-31 00:00:00| 1332.0| 1492.1279296875|\n",
+ "|Black Friday|2026-06-30 00:00:00| 1215.0| 1452.61865234375|\n",
+ "|Black Friday|2026-07-31 00:00:00| 1108.0| 1441.93115234375|\n",
+ "|Black Friday|2026-08-31 00:00:00| 1690.0| 1470.73583984375|\n",
+ "|Cyber Monday|2025-09-30 00:00:00| 8519.0| 7327.388671875|\n",
+ "|Cyber Monday|2025-10-31 00:00:00| 14608.0| 10634.865234375|\n",
+ "|Cyber Monday|2025-11-30 00:00:00| 62796.0| 96634.4453125|\n",
+ "|Cyber Monday|2025-12-31 00:00:00| 50174.0| 27630.859375|\n",
+ "|Cyber Monday|2026-01-31 00:00:00| 6861.0| 6786.732421875|\n",
+ "|Cyber Monday|2026-02-28 00:00:00| 5517.0| 6774.2783203125|\n",
+ "|Cyber Monday|2026-03-31 00:00:00| 5748.0| 6714.388671875|\n",
+ "|Cyber Monday|2026-04-30 00:00:00| 6329.0| 6664.4873046875|\n",
+ "|Cyber Monday|2026-05-31 00:00:00| 5379.0| 6736.76171875|\n",
+ "|Cyber Monday|2026-06-30 00:00:00| 5032.0| 6536.314453125|\n",
+ "|Cyber Monday|2026-07-31 00:00:00| 4618.0| 6528.6708984375|\n",
+ "|Cyber Monday|2026-08-31 00:00:00| 5582.0| 6723.9482421875|\n",
+ "| Halloween|2025-09-30 00:00:00| 224365.0| 169542.90625|\n",
+ "| Halloween|2025-10-31 00:00:00| 1338038.0| 1695037.25|\n",
+ "| Halloween|2025-11-30 00:00:00| 436338.0| 495548.5|\n",
+ "| Halloween|2025-12-31 00:00:00| 93458.0| 90944.046875|\n",
+ "| Halloween|2026-01-31 00:00:00| 75272.0| 82160.671875|\n",
+ "| Halloween|2026-02-28 00:00:00| 67539.0| 87817.421875|\n",
+ "| Halloween|2026-03-31 00:00:00| 70087.0| 87218.671875|\n",
+ "| Halloween|2026-04-30 00:00:00| 71045.0| 84555.078125|\n",
+ "| Halloween|2026-05-31 00:00:00| 68043.0| 85918.078125|\n",
+ "| Halloween|2026-06-30 00:00:00| 69383.0| 89361.375|\n",
+ "| Halloween|2026-07-31 00:00:00| 67284.0| 80965.171875|\n",
+ "| Halloween|2026-08-31 00:00:00| 89136.0| 86530.6875|\n",
+ "| Oktoberfest|2025-09-30 00:00:00| 181474.0| 205939.90625|\n",
+ "| Oktoberfest|2025-10-31 00:00:00| 142094.0| 161174.0625|\n",
+ "| Oktoberfest|2025-11-30 00:00:00| 36200.0| 35124.3828125|\n",
+ "| Oktoberfest|2025-12-31 00:00:00| 26387.0| 30676.568359375|\n",
+ "| Oktoberfest|2026-01-31 00:00:00| 27944.0| 31133.8203125|\n",
+ "| Oktoberfest|2026-02-28 00:00:00| 25764.0| 30749.806640625|\n",
+ "| Oktoberfest|2026-03-31 00:00:00| 30708.0| 31413.79296875|\n",
+ "| Oktoberfest|2026-04-30 00:00:00| 31258.0| 31530.412109375|\n",
+ "| Oktoberfest|2026-05-31 00:00:00| 36179.0| 32509.9140625|\n",
+ "| Oktoberfest|2026-06-30 00:00:00| 33992.0| 32520.060546875|\n",
+ "| Oktoberfest|2026-07-31 00:00:00| 42317.0| 38071.0078125|\n",
+ "| Oktoberfest|2026-08-31 00:00:00| 74220.0| 72824.234375|\n",
+ "| Pride Month|2025-09-30 00:00:00| 11521.0| 13015.404296875|\n",
+ "| Pride Month|2025-10-31 00:00:00| 11757.0| 12585.115234375|\n",
+ "| Pride Month|2025-11-30 00:00:00| 10126.0| 13316.66015625|\n",
+ "| Pride Month|2025-12-31 00:00:00| 8381.0| 12959.76171875|\n",
+ "| Pride Month|2026-01-31 00:00:00| 12556.0| 14463.1025390625|\n",
+ "| Pride Month|2026-02-28 00:00:00| 19852.0| 13103.345703125|\n",
+ "| Pride Month|2026-03-31 00:00:00| 15691.0| 12801.91796875|\n",
+ "| Pride Month|2026-04-30 00:00:00| 16590.0| 14078.150390625|\n",
+ "| Pride Month|2026-05-31 00:00:00| 45511.0| 19983.8671875|\n",
+ "| Pride Month|2026-06-30 00:00:00| 242600.0| 70240.1015625|\n",
+ "| Pride Month|2026-07-31 00:00:00| 21471.0| 15968.90234375|\n",
+ "| Pride Month|2026-08-31 00:00:00| 13936.0| 10876.75|\n",
+ "|Thanksgiving|2025-09-30 00:00:00| 91935.0| 78129.40625|\n",
+ "|Thanksgiving|2025-10-31 00:00:00| 225173.0| 233912.421875|\n",
+ "|Thanksgiving|2025-11-30 00:00:00| 1033092.0| 1220213.5|\n",
+ "|Thanksgiving|2025-12-31 00:00:00| 169452.0| 171734.765625|\n",
+ "|Thanksgiving|2026-01-31 00:00:00| 58590.0| 64222.796875|\n",
+ "|Thanksgiving|2026-02-28 00:00:00| 43547.0| 60092.15625|\n",
+ "|Thanksgiving|2026-03-31 00:00:00| 58384.0| 59032.8828125|\n",
+ "|Thanksgiving|2026-04-30 00:00:00| 47655.0| 57731.4375|\n",
+ "|Thanksgiving|2026-05-31 00:00:00| 48667.0| 57141.8984375|\n",
+ "|Thanksgiving|2026-06-30 00:00:00| 56856.0| 56497.7421875|\n",
+ "|Thanksgiving|2026-07-31 00:00:00| 38935.0| 57184.03125|\n",
+ "|Thanksgiving|2026-08-31 00:00:00| 50620.0| 57194.4453125|\n",
+ "+------------+-------------------+-------------+-----------------+\n",
+ "\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 1/1 [00:00<00:00, 9.61it/s]\n",
+ " \r"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/Users/shane/.local/share/uv/python/cpython-3.11.14-macos-aarch64-none/lib/python3.11/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 1 leaked semaphore objects to clean up at shutdown\n",
+ " warnings.warn('resource_tracker: There appear to be %d '\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(result)\n",
+ "result.show(n=72)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "timecopilot",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.14"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/examples/ray.ipynb b/docs/examples/ray.ipynb
new file mode 100644
index 00000000..b447a2cc
--- /dev/null
+++ b/docs/examples/ray.ipynb
@@ -0,0 +1,1539 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "3cf37056",
+ "metadata": {},
+ "source": [
+ "# Using Ray with TimeCopilot"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "0e729daf",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-03-23 15:35:03,721\tINFO util.py:154 -- Missing packages: ['ipywidgets']. Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " See https://github.com/google-research/timesfm/blob/master/README.md for updated APIs.\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "import nest_asyncio\n",
+ "\n",
+ "nest_asyncio.apply()\n",
+ "\n",
+ "from timecopilot import TimeCopilotForecaster\n",
+ "\n",
+ "import ray \n",
+ "\n",
+ "from timecopilot.models import SeasonalNaive\n",
+ "from timecopilot.models.foundation.chronos import Chronos"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4a08e5fe",
+ "metadata": {},
+ "source": [
+ "## Create the dataframe"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "77e9bb47",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-03-23 15:35:05,235\tINFO util.py:154 -- Missing packages: ['ipywidgets']. Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.\n",
+ "2026-03-23 15:35:07,522\tINFO worker.py:1927 -- Started a local Ray instance.\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 3/14 CPU, 768.0MB/1.0GB object store: : 0.00 row [00:05, ? row/s] \n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\u001b[36m(MapBatches(_udf) pid=21501)\u001b[0m See https://github.com/google-research/timesfm/blob/master/README.md for updated APIs.\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \u001b[36m(MapBatches(_udf) pid=21491)\u001b[0m `torch_dtype` is deprecated! Use `dtype` instead!\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 3/14 CPU, 768.0MB/1.0GB object store: : 0.00 row [00:07, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \u001b[36m(MapBatches(_udf) pid=21491)\u001b[0m `torch_dtype` is deprecated! Use `dtype` instead!\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 12/14 CPU, 10.9KB/1.0GB object store: : 0.00 row [00:07, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " \n",
+ "\u001b[A \n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " 0%| | 0/1 [00:00, ?it/s] \u001b[36m(MapBatches(_udf) pid=21491)\u001b[0m \n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 12/14 CPU, 10.9KB/1.0GB object store: : 0.00 row [00:07, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "100%|██████████| 1/1 [00:00<00:00, 35.09it/s]91)\u001b[0m \n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 12/14 CPU, 10.9KB/1.0GB object store: : 0.00 row [00:08, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:11, ? row/s] \n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\u001b[36m(MapBatches(_udf) pid=21497)\u001b[0m See https://github.com/google-research/timesfm/blob/master/README.md for updated APIs.\u001b[32m [repeated 3x across cluster] (Ray deduplicates logs by default. Set RAY_DEDUP_LOGS=0 to disable log deduplication, or see https://docs.ray.io/en/master/ray-observability/user-guides/configure-logging.html#log-deduplication for more options.)\u001b[0m\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \u001b[36m(MapBatches(_udf) pid=21494)\u001b[0m `torch_dtype` is deprecated! Use `dtype` instead!\u001b[32m [repeated 5x across cluster]\u001b[0m\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:15, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "100%|██████████| 1/1 [00:00<00:00, 53.20it/s]\u001b[32m [repeated 2x across cluster]\u001b[0m \u001b[36m(MapBatches(_udf) pid=21501)\u001b[0m \n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:15, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "100%|██████████| 1/1 [00:00<00:00, 58.28it/s] \u001b[36m(MapBatches(_udf) pid=21495)\u001b[0m \n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 1/14 CPU, 6.4KB/1.0GB object store: 0%| | 0.00/39.0 [00:16, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A"
+ ]
+ }
+ ],
+ "source": [
+ "df = ray.data.read_csv(\"https://timecopilot.s3.amazonaws.com/public/data/events_pageviews.csv\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "0dcb83c0",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-03-23 15:35:08,973\tINFO dataset.py:3055 -- Tip: Use `take_batch()` instead of `take() / show()` to return records in pandas or numpy batch format.\n",
+ "2026-03-23 15:35:08,978\tINFO logging.py:295 -- Registered dataset logger for dataset dataset_1_0\n",
+ "2026-03-23 15:35:08,997\tINFO streaming_executor.py:117 -- Starting execution of Dataset dataset_1_0. Full logs are in /tmp/ray/session_2026-03-23_15-35-05_459775_21356/logs/ray-data\n",
+ "2026-03-23 15:35:08,997\tINFO streaming_executor.py:118 -- Execution plan of Dataset dataset_1_0: InputDataBuffer[Input] -> TaskPoolMapOperator[ReadCSV] -> LimitOperator[limit=5]\n",
+ "Running 0: 0.00 row [00:00, ? row/s]\n",
+ "\u001b[A2026-03-23 15:35:09,013\tWARNING resource_manager.py:130 -- ⚠️ Ray's object store is configured to use only 6.9% of available memory (2.1GB out of 31.3GB total). For optimal Ray Data performance, we recommend setting the object store to at least 50% of available memory. You can do this by setting the 'object_store_memory' parameter when calling ray.init() or by setting the RAY_DEFAULT_OBJECT_STORE_MEMORY_PROPORTION environment variable.\n",
+ "2026-03-23 15:35:09,738\tINFO streaming_executor.py:231 -- ✔️ Dataset dataset_1_0 execution finished in 0.74 seconds\n",
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "✔️ Dataset dataset_1_0 execution finished in 0.74 seconds: : 5.00 row [00:00, 6.81 row/s]\n",
+ "\n",
+ "\u001b[A\n",
+ "\u001b[A \n",
+ "\n",
+ "- ReadCSV->SplitBlocks(28): Tasks: 1; Actors: 0; Queued blocks: 0; Resources: 1.0 CPU, 1.6KB object store: : 75.0 row [00:00, 102 row/s]\n",
+ "\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\n",
+ "- limit=5: Tasks: 0; Actors: 0; Queued blocks: 2; Resources: 0.0 CPU, 135.0B object store: : 5.00 row [00:00, 6.79 row/s]"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{'unique_id': 'Oktoberfest', 'ds': datetime.date(2020, 1, 31), 'y': 25376}\n",
+ "{'unique_id': 'Oktoberfest', 'ds': datetime.date(2020, 2, 29), 'y': 28470}\n",
+ "{'unique_id': 'Oktoberfest', 'ds': datetime.date(2020, 3, 31), 'y': 23816}\n",
+ "{'unique_id': 'Oktoberfest', 'ds': datetime.date(2020, 4, 30), 'y': 46186}\n",
+ "{'unique_id': 'Oktoberfest', 'ds': datetime.date(2020, 5, 31), 'y': 31213}\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "\n"
+ ]
+ }
+ ],
+ "source": [
+ "df.show(5)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "079631ca",
+ "metadata": {},
+ "source": [
+ "## Create a TimeCopilotForecaster"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "989502e7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "tcf = TimeCopilotForecaster(\n",
+ " models=[\n",
+ " SeasonalNaive(),\n",
+ " Chronos(\"autogluon/chronos-2-small\")\n",
+ " ]\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "10a5d1e5",
+ "metadata": {},
+ "source": [
+ "## Create a forecast"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "dbb483e1",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-03-23 15:35:18,354\tINFO logging.py:295 -- Registered dataset logger for dataset dataset_2_0\n",
+ "2026-03-23 15:35:18,356\tINFO streaming_executor.py:117 -- Starting execution of Dataset dataset_2_0. Full logs are in /tmp/ray/session_2026-03-23_15-35-05_459775_21356/logs/ray-data\n",
+ "2026-03-23 15:35:18,356\tINFO streaming_executor.py:118 -- Execution plan of Dataset dataset_2_0: InputDataBuffer[Input] -> TaskPoolMapOperator[ReadCSV]\n",
+ "Running 0: 0.00 row [00:00, ? row/s]2026-03-23 15:35:18,502\tINFO streaming_executor.py:231 -- ✔️ Dataset dataset_2_0 execution finished in 0.15 seconds\n",
+ " \n",
+ "✔️ Dataset dataset_2_0 execution finished in 0.15 seconds: 100%|██████████| 408/408 [00:00<00:00, 2.76k row/s]\n",
+ "\n",
+ "- ReadCSV->SplitBlocks(28): Tasks: 0; Actors: 0; Queued blocks: 0; Resources: 0.0 CPU, 7.2KB object store: : 408 row [00:00, 2.75k row/s]\n",
+ "2026-03-23 15:35:18,551\tINFO logging.py:295 -- Registered dataset logger for dataset dataset_4_0\n",
+ "2026-03-23 15:35:18,554\tINFO streaming_executor.py:117 -- Starting execution of Dataset dataset_4_0. Full logs are in /tmp/ray/session_2026-03-23_15-35-05_459775_21356/logs/ray-data\n",
+ "2026-03-23 15:35:18,554\tINFO streaming_executor.py:118 -- Execution plan of Dataset dataset_4_0: InputDataBuffer[Input] -> TaskPoolMapOperator[ReadCSV]\n",
+ "Running 0: 0.00 row [00:00, ? row/s]2026-03-23 15:35:18,707\tINFO streaming_executor.py:231 -- ✔️ Dataset dataset_4_0 execution finished in 0.15 seconds\n",
+ " \n",
+ "✔️ Dataset dataset_4_0 execution finished in 0.15 seconds: 100%|██████████| 408/408 [00:00<00:00, 2.63k row/s]\n",
+ "\n",
+ "- ReadCSV->SplitBlocks(28): Tasks: 0; Actors: 0; Queued blocks: 0; Resources: 0.0 CPU, 10.8KB object store: : 408 row [00:00, 2.62k row/s]\n",
+ "2026-03-23 15:35:18,745\tINFO logging.py:295 -- Registered dataset logger for dataset dataset_10_0\n",
+ "2026-03-23 15:35:18,747\tINFO streaming_executor.py:117 -- Starting execution of Dataset dataset_10_0. Full logs are in /tmp/ray/session_2026-03-23_15-35-05_459775_21356/logs/ray-data\n",
+ "2026-03-23 15:35:18,747\tINFO streaming_executor.py:118 -- Execution plan of Dataset dataset_10_0: InputDataBuffer[Input] -> TaskPoolMapOperator[MapBatches(add_coarse_key)] -> AllToAllOperator[Sort] -> TaskPoolMapOperator[MapBatches(_udf)]\n",
+ "Running 0: 0.00 row [00:00, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A2026-03-23 15:35:19,742\tWARNING streaming_executor_state.py:764 -- Operator produced a RefBundle with a different schema than the previous one. Previous schema: unique_id: string\n",
+ "ds: date32[day]\n",
+ "y: int64\n",
+ "__ray_partition_key__: uint64, new schema: . This may lead to unexpected behavior.\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 3/14 CPU, 768.0MB/1.0GB object store: : 0.00 row [00:01, ? row/s]\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 3/14 CPU, 768.0MB/1.0GB object store: : 0.00 row [00:02, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 3/14 CPU, 768.0MB/1.0GB object store: : 0.00 row [00:03, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 3/14 CPU, 768.0MB/1.0GB object store: : 0.00 row [00:04, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 3/14 CPU, 768.0MB/1.0GB object store: : 0.00 row [00:05, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 12/14 CPU, 10.9KB/1.0GB object store: : 0.00 row [00:07, ? row/s]\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:08, ? row/s] \n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:09, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:10, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:11, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:12, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:13, ? row/s]\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 11/14 CPU, 9.1KB/1.0GB object store: : 0.00 row [00:14, ? row/s]\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 1/14 CPU, 6.4KB/1.0GB object store: 0%| | 0.00/39.0 [00:15, ? row/s]\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "Running Dataset: dataset_10_0. Active & requested resources: 1/14 CPU, 6.4KB/1.0GB object store: 0%| | 0.00/39.0 [00:16, ? row/s]2026-03-23 15:35:35,595\tWARNING streaming_executor_state.py:764 -- Operator produced a RefBundle with a different schema than the previous one. Previous schema: unique_id: string\n",
+ "ds: date32[day]\n",
+ "SeasonalNaive: double\n",
+ "Chronos: double\n",
+ "-- schema metadata --\n",
+ "pandas: '{\"index_columns\": [], \"column_indexes\": [], \"columns\": [{\"name\":' + 536, new schema: . This may lead to unexpected behavior.\n",
+ "2026-03-23 15:35:35,601\tINFO streaming_executor.py:231 -- ✔️ Dataset dataset_10_0 execution finished in 16.85 seconds\n",
+ " \n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "✔️ Dataset dataset_10_0 execution finished in 16.85 seconds: 100%|██████████| 72.0/72.0 [00:16<00:00, 4.27 row/s] \n",
+ "\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\u001b[A \n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "- MapBatches(add_coarse_key): Tasks: 0; Actors: 0; Queued blocks: 0; Resources: 0.0 CPU, 0.0B object store: 100%|██████████| 408/408 [00:16<00:00, 24.2 row/s]\n",
+ "\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\u001b[A\n",
+ "\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "- Sort: Tasks: 0; Actors: 0; Queued blocks: 0; Resources: 0.0 CPU, 0.0B object store; 408 rows output: 100%|██████████| 408/408 [00:16<00:00, 24.2 row/s]\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " *- Sort Sample: 100%|██████████| 280/280 [00:16<00:00, 16.6 row/s] \n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " *- Shuffle Map: 100%|██████████| 408/408 [00:16<00:00, 24.2 row/s] \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ " *- Shuffle Reduce: 100%|██████████| 408/408 [00:16<00:00, 24.2 row/s] \n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "- MapBatches(_udf): Tasks: 0; Actors: 0; Queued blocks: 0; Resources: 0.0 CPU, 2.5KB object store: 100%|██████████| 72.0/72.0 [00:16<00:00, 4.26 row/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "result = tcf.forecast(\n",
+ " df=df,\n",
+ " h=12\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "510bf490",
+ "metadata": {},
+ "source": [
+ "## View Forecast Results"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "da664a68",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " | \n",
+ " unique_id | \n",
+ " ds | \n",
+ " SeasonalNaive | \n",
+ " Chronos | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " Oktoberfest | \n",
+ " 2025-09-30 | \n",
+ " 181474.0 | \n",
+ " 205939.90625 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Oktoberfest | \n",
+ " 2025-10-31 | \n",
+ " 142094.0 | \n",
+ " 161174.0625 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Oktoberfest | \n",
+ " 2025-11-30 | \n",
+ " 36200.0 | \n",
+ " 35124.382812 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Oktoberfest | \n",
+ " 2025-12-31 | \n",
+ " 26387.0 | \n",
+ " 30676.568359 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Oktoberfest | \n",
+ " 2026-01-31 | \n",
+ " 27944.0 | \n",
+ " 31133.820312 | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Oktoberfest | \n",
+ " 2026-02-28 | \n",
+ " 25764.0 | \n",
+ " 30749.806641 | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " Oktoberfest | \n",
+ " 2026-03-31 | \n",
+ " 30708.0 | \n",
+ " 31413.792969 | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " Oktoberfest | \n",
+ " 2026-04-30 | \n",
+ " 31258.0 | \n",
+ " 31530.412109 | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Oktoberfest | \n",
+ " 2026-05-31 | \n",
+ " 36179.0 | \n",
+ " 32509.914062 | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Oktoberfest | \n",
+ " 2026-06-30 | \n",
+ " 33992.0 | \n",
+ " 32520.060547 | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " Oktoberfest | \n",
+ " 2026-07-31 | \n",
+ " 42317.0 | \n",
+ " 38071.007812 | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " Oktoberfest | \n",
+ " 2026-08-31 | \n",
+ " 74220.0 | \n",
+ " 72824.234375 | \n",
+ "
\n",
+ " \n",
+ " | 12 | \n",
+ " Pride Month | \n",
+ " 2025-09-30 | \n",
+ " 11521.0 | \n",
+ " 13015.404297 | \n",
+ "
\n",
+ " \n",
+ " | 13 | \n",
+ " Pride Month | \n",
+ " 2025-10-31 | \n",
+ " 11757.0 | \n",
+ " 12585.115234 | \n",
+ "
\n",
+ " \n",
+ " | 14 | \n",
+ " Pride Month | \n",
+ " 2025-11-30 | \n",
+ " 10126.0 | \n",
+ " 13316.660156 | \n",
+ "
\n",
+ " \n",
+ " | 15 | \n",
+ " Pride Month | \n",
+ " 2025-12-31 | \n",
+ " 8381.0 | \n",
+ " 12959.761719 | \n",
+ "
\n",
+ " \n",
+ " | 16 | \n",
+ " Pride Month | \n",
+ " 2026-01-31 | \n",
+ " 12556.0 | \n",
+ " 14463.102539 | \n",
+ "
\n",
+ " \n",
+ " | 17 | \n",
+ " Pride Month | \n",
+ " 2026-02-28 | \n",
+ " 19852.0 | \n",
+ " 13103.345703 | \n",
+ "
\n",
+ " \n",
+ " | 18 | \n",
+ " Pride Month | \n",
+ " 2026-03-31 | \n",
+ " 15691.0 | \n",
+ " 12801.917969 | \n",
+ "
\n",
+ " \n",
+ " | 19 | \n",
+ " Pride Month | \n",
+ " 2026-04-30 | \n",
+ " 16590.0 | \n",
+ " 14078.150391 | \n",
+ "
\n",
+ " \n",
+ " | 20 | \n",
+ " Pride Month | \n",
+ " 2026-05-31 | \n",
+ " 45511.0 | \n",
+ " 19983.867188 | \n",
+ "
\n",
+ " \n",
+ " | 21 | \n",
+ " Pride Month | \n",
+ " 2026-06-30 | \n",
+ " 242600.0 | \n",
+ " 70240.101562 | \n",
+ "
\n",
+ " \n",
+ " | 22 | \n",
+ " Pride Month | \n",
+ " 2026-07-31 | \n",
+ " 21471.0 | \n",
+ " 15968.902344 | \n",
+ "
\n",
+ " \n",
+ " | 23 | \n",
+ " Pride Month | \n",
+ " 2026-08-31 | \n",
+ " 13936.0 | \n",
+ " 10876.75 | \n",
+ "
\n",
+ " \n",
+ " | 24 | \n",
+ " Cyber Monday | \n",
+ " 2025-09-30 | \n",
+ " 8519.0 | \n",
+ " 7327.383789 | \n",
+ "
\n",
+ " \n",
+ " | 25 | \n",
+ " Cyber Monday | \n",
+ " 2025-10-31 | \n",
+ " 14608.0 | \n",
+ " 10634.869141 | \n",
+ "
\n",
+ " \n",
+ " | 26 | \n",
+ " Cyber Monday | \n",
+ " 2025-11-30 | \n",
+ " 62796.0 | \n",
+ " 96634.460938 | \n",
+ "
\n",
+ " \n",
+ " | 27 | \n",
+ " Cyber Monday | \n",
+ " 2025-12-31 | \n",
+ " 50174.0 | \n",
+ " 27630.857422 | \n",
+ "
\n",
+ " \n",
+ " | 28 | \n",
+ " Cyber Monday | \n",
+ " 2026-01-31 | \n",
+ " 6861.0 | \n",
+ " 6786.728516 | \n",
+ "
\n",
+ " \n",
+ " | 29 | \n",
+ " Cyber Monday | \n",
+ " 2026-02-28 | \n",
+ " 5517.0 | \n",
+ " 6774.277344 | \n",
+ "
\n",
+ " \n",
+ " | 30 | \n",
+ " Cyber Monday | \n",
+ " 2026-03-31 | \n",
+ " 5748.0 | \n",
+ " 6714.381836 | \n",
+ "
\n",
+ " \n",
+ " | 31 | \n",
+ " Cyber Monday | \n",
+ " 2026-04-30 | \n",
+ " 6329.0 | \n",
+ " 6664.470703 | \n",
+ "
\n",
+ " \n",
+ " | 32 | \n",
+ " Cyber Monday | \n",
+ " 2026-05-31 | \n",
+ " 5379.0 | \n",
+ " 6736.751953 | \n",
+ "
\n",
+ " \n",
+ " | 33 | \n",
+ " Cyber Monday | \n",
+ " 2026-06-30 | \n",
+ " 5032.0 | \n",
+ " 6536.330078 | \n",
+ "
\n",
+ " \n",
+ " | 34 | \n",
+ " Cyber Monday | \n",
+ " 2026-07-31 | \n",
+ " 4618.0 | \n",
+ " 6528.674805 | \n",
+ "
\n",
+ " \n",
+ " | 35 | \n",
+ " Cyber Monday | \n",
+ " 2026-08-31 | \n",
+ " 5582.0 | \n",
+ " 6723.951172 | \n",
+ "
\n",
+ " \n",
+ " | 36 | \n",
+ " Black Friday | \n",
+ " 2025-09-30 | \n",
+ " 2607.0 | \n",
+ " 1740.371094 | \n",
+ "
\n",
+ " \n",
+ " | 37 | \n",
+ " Black Friday | \n",
+ " 2025-10-31 | \n",
+ " 2470.0 | \n",
+ " 2226.007568 | \n",
+ "
\n",
+ " \n",
+ " | 38 | \n",
+ " Black Friday | \n",
+ " 2025-11-30 | \n",
+ " 11058.0 | \n",
+ " 12542.421875 | \n",
+ "
\n",
+ " \n",
+ " | 39 | \n",
+ " Black Friday | \n",
+ " 2025-12-31 | \n",
+ " 3548.0 | \n",
+ " 3203.52417 | \n",
+ "
\n",
+ " \n",
+ " | 40 | \n",
+ " Black Friday | \n",
+ " 2026-01-31 | \n",
+ " 1724.0 | \n",
+ " 1596.014893 | \n",
+ "
\n",
+ " \n",
+ " | 41 | \n",
+ " Black Friday | \n",
+ " 2026-02-28 | \n",
+ " 1730.0 | \n",
+ " 1594.216553 | \n",
+ "
\n",
+ " \n",
+ " | 42 | \n",
+ " Black Friday | \n",
+ " 2026-03-31 | \n",
+ " 1874.0 | \n",
+ " 1539.523438 | \n",
+ "
\n",
+ " \n",
+ " | 43 | \n",
+ " Black Friday | \n",
+ " 2026-04-30 | \n",
+ " 2311.0 | \n",
+ " 1530.233643 | \n",
+ "
\n",
+ " \n",
+ " | 44 | \n",
+ " Black Friday | \n",
+ " 2026-05-31 | \n",
+ " 1332.0 | \n",
+ " 1492.126953 | \n",
+ "
\n",
+ " \n",
+ " | 45 | \n",
+ " Black Friday | \n",
+ " 2026-06-30 | \n",
+ " 1215.0 | \n",
+ " 1452.619141 | \n",
+ "
\n",
+ " \n",
+ " | 46 | \n",
+ " Black Friday | \n",
+ " 2026-07-31 | \n",
+ " 1108.0 | \n",
+ " 1441.932861 | \n",
+ "
\n",
+ " \n",
+ " | 47 | \n",
+ " Black Friday | \n",
+ " 2026-08-31 | \n",
+ " 1690.0 | \n",
+ " 1470.735107 | \n",
+ "
\n",
+ " \n",
+ " | 48 | \n",
+ " Halloween | \n",
+ " 2025-09-30 | \n",
+ " 224365.0 | \n",
+ " 169542.75 | \n",
+ "
\n",
+ " \n",
+ " | 49 | \n",
+ " Halloween | \n",
+ " 2025-10-31 | \n",
+ " 1338038.0 | \n",
+ " 1695037.25 | \n",
+ "
\n",
+ " \n",
+ " | 50 | \n",
+ " Halloween | \n",
+ " 2025-11-30 | \n",
+ " 436338.0 | \n",
+ " 495548.25 | \n",
+ "
\n",
+ " \n",
+ " | 51 | \n",
+ " Halloween | \n",
+ " 2025-12-31 | \n",
+ " 93458.0 | \n",
+ " 90944.203125 | \n",
+ "
\n",
+ " \n",
+ " | 52 | \n",
+ " Halloween | \n",
+ " 2026-01-31 | \n",
+ " 75272.0 | \n",
+ " 82160.71875 | \n",
+ "
\n",
+ " \n",
+ " | 53 | \n",
+ " Halloween | \n",
+ " 2026-02-28 | \n",
+ " 67539.0 | \n",
+ " 87817.421875 | \n",
+ "
\n",
+ " \n",
+ " | 54 | \n",
+ " Halloween | \n",
+ " 2026-03-31 | \n",
+ " 70087.0 | \n",
+ " 87218.734375 | \n",
+ "
\n",
+ " \n",
+ " | 55 | \n",
+ " Halloween | \n",
+ " 2026-04-30 | \n",
+ " 71045.0 | \n",
+ " 84555.140625 | \n",
+ "
\n",
+ " \n",
+ " | 56 | \n",
+ " Halloween | \n",
+ " 2026-05-31 | \n",
+ " 68043.0 | \n",
+ " 85918.15625 | \n",
+ "
\n",
+ " \n",
+ " | 57 | \n",
+ " Halloween | \n",
+ " 2026-06-30 | \n",
+ " 69383.0 | \n",
+ " 89361.453125 | \n",
+ "
\n",
+ " \n",
+ " | 58 | \n",
+ " Halloween | \n",
+ " 2026-07-31 | \n",
+ " 67284.0 | \n",
+ " 80965.171875 | \n",
+ "
\n",
+ " \n",
+ " | 59 | \n",
+ " Halloween | \n",
+ " 2026-08-31 | \n",
+ " 89136.0 | \n",
+ " 86530.6875 | \n",
+ "
\n",
+ " \n",
+ " | 60 | \n",
+ " Thanksgiving | \n",
+ " 2025-09-30 | \n",
+ " 91935.0 | \n",
+ " 78129.5 | \n",
+ "
\n",
+ " \n",
+ " | 61 | \n",
+ " Thanksgiving | \n",
+ " 2025-10-31 | \n",
+ " 225173.0 | \n",
+ " 233912.59375 | \n",
+ "
\n",
+ " \n",
+ " | 62 | \n",
+ " Thanksgiving | \n",
+ " 2025-11-30 | \n",
+ " 1033092.0 | \n",
+ " 1220213.875 | \n",
+ "
\n",
+ " \n",
+ " | 63 | \n",
+ " Thanksgiving | \n",
+ " 2025-12-31 | \n",
+ " 169452.0 | \n",
+ " 171734.734375 | \n",
+ "
\n",
+ " \n",
+ " | 64 | \n",
+ " Thanksgiving | \n",
+ " 2026-01-31 | \n",
+ " 58590.0 | \n",
+ " 64222.875 | \n",
+ "
\n",
+ " \n",
+ " | 65 | \n",
+ " Thanksgiving | \n",
+ " 2026-02-28 | \n",
+ " 43547.0 | \n",
+ " 60092.242188 | \n",
+ "
\n",
+ " \n",
+ " | 66 | \n",
+ " Thanksgiving | \n",
+ " 2026-03-31 | \n",
+ " 58384.0 | \n",
+ " 59032.820312 | \n",
+ "
\n",
+ " \n",
+ " | 67 | \n",
+ " Thanksgiving | \n",
+ " 2026-04-30 | \n",
+ " 47655.0 | \n",
+ " 57731.476562 | \n",
+ "
\n",
+ " \n",
+ " | 68 | \n",
+ " Thanksgiving | \n",
+ " 2026-05-31 | \n",
+ " 48667.0 | \n",
+ " 57141.867188 | \n",
+ "
\n",
+ " \n",
+ " | 69 | \n",
+ " Thanksgiving | \n",
+ " 2026-06-30 | \n",
+ " 56856.0 | \n",
+ " 56497.53125 | \n",
+ "
\n",
+ " \n",
+ " | 70 | \n",
+ " Thanksgiving | \n",
+ " 2026-07-31 | \n",
+ " 38935.0 | \n",
+ " 57184.0 | \n",
+ "
\n",
+ " \n",
+ " | 71 | \n",
+ " Thanksgiving | \n",
+ " 2026-08-31 | \n",
+ " 50620.0 | \n",
+ " 57194.375 | \n",
+ "
\n",
+ " \n",
+ "
"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from IPython.display import HTML\n",
+ "display(HTML(result.to_pandas().to_html()))"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "timecopilot",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.14"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/mkdocs.yml b/mkdocs.yml
index a40294e6..6920ca9b 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -32,6 +32,10 @@ nav:
- examples/cryptocurrency-quickstart.ipynb
- examples/sktime.ipynb
- examples/patchtst-fm.ipynb
+ - examples/pandas_baseline.ipynb
+ - examples/dask.ipynb
+ - examples/pyspark.ipynb
+ - examples/ray.ipynb
- Experiments:
- experiments/gift-eval.md
- experiments/fev.md
diff --git a/pyproject.toml b/pyproject.toml
index d1aaae77..2cf33792 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,8 +4,11 @@ requires = ["hatchling"]
[dependency-groups]
dev = [
+ "fugue[dask,ray,spark]>=0.9.0",
"mktestdocs>=0.2.5",
"pre-commit",
+ "pyspark<4.1",
+ "pytest",
"pytest-asyncio>=1.1.0",
"pytest-cov",
"pytest-mock>=3.15.1",
@@ -83,7 +86,7 @@ dependencies = [
"prophet>=1.1.7",
"pydantic-ai>=0.7.0",
"pytorch-lightning==2.4.0",
- "ray==2.49.2",
+ "ray==2.48",
"scipy<=1.15.3",
"statsforecast>=2.0.2",
"tabpfn-time-series==1.0.3 ; python_full_version < '3.13'",
@@ -108,6 +111,14 @@ readme = "README.md"
requires-python = ">=3.10"
version = "0.0.24"
+[project.optional-dependencies]
+distributed = [
+ "dask<=2024.12.1",
+ "fugue[dask,ray,spark]>=0.9.0",
+ "pyspark<4.1",
+ "ray==2.48",
+]
+
[project.scripts]
timecopilot = "timecopilot._cli:main"
@@ -131,8 +142,9 @@ allow-direct-references = true
disable_error_code = ["no-redef"] # for fasthtml
[tool.pytest.ini_options]
-addopts = "-m 'not docs and not live and not gift_eval' -n auto"
+addopts = "-m 'not docs and not live and not gift_eval and not distributed' -n auto"
markers = [
+ "distributed: marks tests that require distributed backends (spark, dask, ray)",
"docs: marks tests related to documentation",
"flaky: rerun failures (provided by pytest-rerunfailures)",
"gift_eval: marks tests related to gift eval results replication",
diff --git a/tests/models/utils/test_forecaster.py b/tests/models/utils/test_forecaster.py
index 2b96d450..e2a0e512 100644
--- a/tests/models/utils/test_forecaster.py
+++ b/tests/models/utils/test_forecaster.py
@@ -173,7 +173,7 @@ def test_maybe_convert_quantiles_to_level(n_models, level):
check_names=False,
)
else:
- alpha = 1 - lv / 100
+ alpha = round(1 - lv / 100, 2)
q_lo = int((alpha / 2) * 100)
q_hi = int((1 - alpha / 2) * 100)
pd.testing.assert_series_equal(
diff --git a/tests/test_distributed.py b/tests/test_distributed.py
new file mode 100644
index 00000000..63635a08
--- /dev/null
+++ b/tests/test_distributed.py
@@ -0,0 +1,514 @@
+"""Tests for distributed DataFrame support in TimeCopilotForecaster."""
+
+
+import pandas as pd
+import pytest
+from utilsforecast.data import generate_series
+
+from timecopilot.forecaster import TimeCopilotForecaster
+from timecopilot.models import SeasonalNaive, ZeroModel
+from timecopilot.models.foundation.chronos import Chronos
+from timecopilot.models.utils.forecaster import Forecaster
+
+
+class SimpleTestModel(Forecaster):
+ """A simple model for testing that doesn't use any parallel processing."""
+
+ alias = "SimpleTestModel"
+
+ def forecast(
+ self,
+ df: pd.DataFrame,
+ h: int,
+ freq: str | None = None,
+ level: list[int | float] | None = None,
+ quantiles: list[float] | None = None,
+ ) -> pd.DataFrame:
+ """Return zeros for all forecasts."""
+ freq = self._maybe_infer_freq(df, freq)
+ unique_ids = df["unique_id"].unique()
+ results = []
+ for uid in unique_ids:
+ uid_df = df[df["unique_id"] == uid]
+ last_ds = uid_df["ds"].max()
+ future_ds = pd.date_range(start=last_ds, periods=h + 1, freq=freq)[1:]
+ results.append(
+ pd.DataFrame(
+ {
+ "unique_id": [uid] * h,
+ "ds": future_ds,
+ self.alias: [0.0] * h,
+ }
+ )
+ )
+ return pd.concat(results, ignore_index=True)
+
+
+@pytest.fixture
+def models():
+ return [SeasonalNaive(), ZeroModel()]
+
+
+@pytest.fixture
+def simple_models():
+ """
+ Models that don't use parallel processing internally - safe for
+ distributed tests.
+ """
+ return [SimpleTestModel()]
+
+
+@pytest.fixture
+def foundation_model():
+ # return [Chronos(repo_id="autogluon/chronos-bolt-tiny")]
+ return [Chronos(repo_id="autogluon/chronos-t5-tiny")]
+
+
+@pytest.fixture
+def sample_df():
+ df = generate_series(n_series=3, freq="D", min_length=30)
+ df["unique_id"] = df["unique_id"].astype(str)
+ return df
+
+
+@pytest.fixture
+def event_df():
+ return pd.read_csv(
+ "https://timecopilot.s3.amazonaws.com/public/data/events_pageviews.csv",
+ parse_dates=["ds"],
+ )
+
+
+# --- Type detection tests ---
+
+
+def test_is_distributed_df_pandas(sample_df):
+ assert TimeCopilotForecaster._is_distributed_df(sample_df) is False
+
+
+# --- Pandas path tests (baseline) ---
+
+
+def test_forecast_pandas(models, sample_df):
+ forecaster = TimeCopilotForecaster(models=models)
+ result = forecaster.forecast(df=sample_df, h=2, freq="D")
+ assert isinstance(result, pd.DataFrame)
+ assert len(result) == 2 * 3
+ for model in models:
+ assert model.alias in result.columns
+
+
+def test_cross_validation_pandas(models, sample_df):
+ forecaster = TimeCopilotForecaster(models=models)
+ result = forecaster.cross_validation(
+ df=sample_df, h=2, freq="D", n_windows=2, step_size=1
+ )
+ assert isinstance(result, pd.DataFrame)
+ assert "cutoff" in result.columns
+ for model in models:
+ assert model.alias in result.columns
+
+
+def test_detect_anomalies_pandas(models, sample_df):
+ forecaster = TimeCopilotForecaster(models=models)
+ result = forecaster.detect_anomalies(df=sample_df, h=2, freq="D")
+ assert isinstance(result, pd.DataFrame)
+ assert "cutoff" in result.columns
+ for model in models:
+ assert model.alias in result.columns
+ assert f"{model.alias}-anomaly" in result.columns
+
+
+# --- Spark tests ---
+
+
+@pytest.fixture
+def spark_session():
+ pytest.importorskip("pyspark")
+ from pyspark.sql import SparkSession
+
+ spark = (
+ SparkSession.builder.master("local[2]")
+ .appName("timecopilot-test")
+ .config("spark.sql.shuffle.partitions", "2")
+ .getOrCreate()
+ )
+ yield spark
+ spark.stop()
+
+
+@pytest.fixture
+def spark_df(spark_session, sample_df):
+ return spark_session.createDataFrame(sample_df)
+
+
+@pytest.mark.distributed
+def test_is_distributed_df_spark(spark_df):
+ assert TimeCopilotForecaster._is_distributed_df(spark_df) is True
+
+
+@pytest.mark.distributed
+def test_forecast_spark(simple_models, spark_df):
+ forecaster = TimeCopilotForecaster(models=simple_models)
+ result = forecaster.forecast(df=spark_df, h=2, freq="D")
+ # Result should be a Spark DataFrame
+ from pyspark.sql import DataFrame as SparkDataFrame
+
+ assert isinstance(result, SparkDataFrame)
+ result_pd = result.toPandas()
+ assert len(result_pd) == 2 * 3
+ for model in simple_models:
+ assert model.alias in result_pd.columns
+
+
+@pytest.mark.distributed
+def test_cross_validation_spark(simple_models, spark_df):
+ forecaster = TimeCopilotForecaster(models=simple_models)
+ result = forecaster.cross_validation(
+ df=spark_df, h=2, freq="D", n_windows=2, step_size=1
+ )
+ from pyspark.sql import DataFrame as SparkDataFrame
+
+ assert isinstance(result, SparkDataFrame)
+ result_pd = result.toPandas()
+ assert "cutoff" in result_pd.columns
+ for model in simple_models:
+ assert model.alias in result_pd.columns
+
+
+@pytest.mark.distributed
+def test_using_level_spark(foundation_model, spark_session):
+ # level = [0, 20, 40, 60, 80] # corresponds to qs [0.1, 0.2, ..., 0.9]
+ level: list[int | float] = [20, 80]
+ df = generate_series(
+ n_series=2, freq="D", max_length=100, static_as_categorical=False
+ )
+ spark_df = spark_session.createDataFrame(df)
+ tcf = TimeCopilotForecaster(models=foundation_model)
+ excluded_models = [
+ "AutoLGBM",
+ "AutoNHITS",
+ "AutoTFT",
+ "PatchTST-FM",
+ ]
+ if any(m.alias in excluded_models for m in foundation_model):
+ # These models do not support levels yet
+ with pytest.raises(ValueError) as excinfo:
+ tcf.forecast(
+ df=spark_df,
+ h=2,
+ freq="D",
+ level=level,
+ )
+ assert "not supported" in str(excinfo.value)
+ return
+ fcst_df = tcf.forecast(
+ df=spark_df,
+ h=2,
+ freq="D",
+ level=level,
+ )
+ fcst_df_pd = fcst_df.toPandas()
+ exp_lv_cols = []
+ for lv in level:
+ for model in foundation_model:
+ exp_lv_cols.extend([f"{model.alias}-lo-{lv}", f"{model.alias}-hi-{lv}"])
+ assert len(exp_lv_cols) == len(fcst_df_pd.columns) - 3 # 3 is unique_id, ds, point
+ assert all(col in fcst_df_pd.columns for col in exp_lv_cols)
+ assert not any(("-q-" in col) for col in fcst_df_pd.columns)
+ # test monotonicity of levels
+ exp_lv_cols = exp_lv_cols[2:] # remove level 0
+ for c1, c2 in zip(exp_lv_cols[:-1:2], exp_lv_cols[1::2], strict=False):
+ for model in foundation_model:
+ if model.alias == "ZeroModel":
+ # ZeroModel is a constant model, so all levels should be the same
+ assert fcst_df_pd[c1].eq(fcst_df_pd[c2]).all()
+ elif "chronos" in model.alias.lower() or "median" in model.alias.lower():
+ # sometimes it gives this condition
+ assert fcst_df_pd[c1].le(fcst_df_pd[c2]).all()
+ elif "tabpfn" in model.alias.lower():
+ # we are testing the mock mode, so we don't care about monotonicity
+ continue
+ else:
+ assert fcst_df_pd[c1].lt(fcst_df_pd[c2]).all()
+
+
+# --- Dask tests ---
+
+
+@pytest.fixture
+def dask_df(sample_df):
+ pytest.importorskip("dask")
+ import dask.dataframe as dd
+
+ return dd.from_pandas(sample_df, npartitions=2)
+
+
+@pytest.mark.distributed
+def test_is_distributed_df_dask(dask_df):
+ assert TimeCopilotForecaster._is_distributed_df(dask_df) is True
+
+
+@pytest.mark.distributed
+def test_forecast_dask(simple_models, dask_df):
+ import dask.dataframe as dd
+
+ forecaster = TimeCopilotForecaster(models=simple_models)
+ result = forecaster.forecast(df=dask_df, h=2, freq="D")
+ # Result should be a Dask DataFrame
+ assert isinstance(result, dd.DataFrame)
+ result_pd = result.compute()
+ assert len(result_pd) == 2 * 3
+ for model in simple_models:
+ assert model.alias in result_pd.columns
+
+
+@pytest.mark.distributed
+@pytest.mark.parametrize(
+ "n_series,n_partitions",
+ [
+ (1, 1),
+ (3, 1),
+ (1, 2),
+ (3, 2),
+ ],
+)
+def test_forecast_dask_series(n_series, n_partitions, foundation_model):
+ pytest.importorskip("dask")
+ import dask.dataframe as dd
+
+ # models = foundation_model()
+ models = foundation_model
+ h = 2
+
+ df = generate_series(n_series=n_series, freq="D", min_length=30)
+ df["unique_id"] = df["unique_id"].astype(str)
+
+ dask_df = dd.from_pandas(df, npartitions=n_partitions)
+ forecaster = TimeCopilotForecaster(models=models)
+ result = forecaster.forecast(df=dask_df, h=h, freq="D")
+ result_pd = result.compute()
+ assert len(result_pd) == h * n_series
+ for model in models:
+ assert model.alias in result_pd.columns
+
+
+@pytest.mark.distributed
+def test_cross_validation_dask(simple_models, dask_df):
+ import dask.dataframe as dd
+
+ forecaster = TimeCopilotForecaster(models=simple_models)
+ result = forecaster.cross_validation(
+ df=dask_df, h=2, freq="D", n_windows=2, step_size=1
+ )
+ assert isinstance(result, dd.DataFrame)
+ result_pd = result.compute()
+ assert "cutoff" in result_pd.columns
+ for model in simple_models:
+ assert model.alias in result_pd.columns
+
+
+@pytest.mark.distributed
+def test_using_level_dask(foundation_model):
+ import dask.dataframe as dd
+
+ # level = [0, 20, 40, 60, 80] # corresponds to qs [0.1, 0.2, ..., 0.9]
+ level: list[int | float] = [20, 80]
+ n_partitions = 2
+ df = generate_series(
+ n_series=2, freq="D", max_length=100, static_as_categorical=False
+ )
+ dask_df = dd.from_pandas(df, npartitions=n_partitions)
+ tcf = TimeCopilotForecaster(models=foundation_model)
+ excluded_models = [
+ "AutoLGBM",
+ "AutoNHITS",
+ "AutoTFT",
+ "PatchTST-FM",
+ ]
+ if any(m.alias in excluded_models for m in foundation_model):
+ # These models do not support levels yet
+ with pytest.raises(ValueError) as excinfo:
+ tcf.forecast(
+ df=dask_df,
+ h=2,
+ freq="D",
+ level=level,
+ )
+ assert "not supported" in str(excinfo.value)
+ return
+ fcst_df = tcf.forecast(
+ df=dask_df,
+ h=2,
+ freq="D",
+ level=level,
+ )
+ fcst_df_pd = fcst_df.compute()
+ exp_lv_cols = []
+ for lv in level:
+ for model in foundation_model:
+ exp_lv_cols.extend([f"{model.alias}-lo-{lv}", f"{model.alias}-hi-{lv}"])
+ assert len(exp_lv_cols) == len(fcst_df_pd.columns) - 3 # 3 is unique_id, ds, point
+ assert all(col in fcst_df_pd.columns for col in exp_lv_cols)
+ assert not any(("-q-" in col) for col in fcst_df_pd.columns)
+ # test monotonicity of levels
+ exp_lv_cols = exp_lv_cols[2:] # remove level 0
+ for c1, c2 in zip(exp_lv_cols[:-1:2], exp_lv_cols[1::2], strict=False):
+ for model in foundation_model:
+ if model.alias == "ZeroModel":
+ # ZeroModel is a constant model, so all levels should be the same
+ assert fcst_df_pd[c1].eq(fcst_df_pd[c2]).all()
+ elif "chronos" in model.alias.lower() or "median" in model.alias.lower():
+ # sometimes it gives this condition
+ assert fcst_df_pd[c1].le(fcst_df_pd[c2]).all()
+ elif "tabpfn" in model.alias.lower():
+ # we are testing the mock mode, so we don't care about monotonicity
+ continue
+ else:
+ assert fcst_df_pd[c1].lt(fcst_df_pd[c2]).all()
+
+
+# --- Ray tests ---
+
+
+@pytest.fixture
+def ray_dataset(sample_df):
+ pytest.importorskip("ray")
+ import ray
+
+ if not ray.is_initialized():
+ # Use local mode to avoid working directory upload issues
+ ray.init(
+ ignore_reinit_error=True,
+ num_cpus=2,
+ include_dashboard=False,
+ runtime_env={"working_dir": None},
+ )
+ return ray.data.from_pandas(sample_df)
+
+
+@pytest.mark.distributed
+def test_is_distributed_df_ray(ray_dataset):
+ assert TimeCopilotForecaster._is_distributed_df(ray_dataset) is True
+
+
+@pytest.mark.distributed
+def test_forecast_ray(simple_models, ray_dataset):
+ import ray.data
+
+ forecaster = TimeCopilotForecaster(models=simple_models)
+ result = forecaster.forecast(df=ray_dataset, h=2, freq="D")
+ # Result should be a Ray Dataset
+ assert isinstance(result, ray.data.Dataset)
+ result_pd = result.to_pandas()
+ assert len(result_pd) == 2 * 3
+ for model in simple_models:
+ assert model.alias in result_pd.columns
+
+
+@pytest.mark.distributed
+def test_cross_validation_ray(simple_models, ray_dataset):
+ import ray.data
+
+ forecaster = TimeCopilotForecaster(models=simple_models)
+ result = forecaster.cross_validation(
+ df=ray_dataset, h=2, freq="D", n_windows=2, step_size=1
+ )
+ assert isinstance(result, ray.data.Dataset)
+ result_pd = result.to_pandas()
+ assert "cutoff" in result_pd.columns
+ for model in simple_models:
+ assert model.alias in result_pd.columns
+
+
+@pytest.mark.distributed
+def test_using_level_ray(foundation_model):
+ import ray
+
+ # level = [0, 20, 40, 60, 80] # corresponds to qs [0.1, 0.2, ..., 0.9]
+ level: list[int | float] = [20, 80]
+ df = generate_series(
+ n_series=2, freq="D", max_length=100, static_as_categorical=False
+ )
+ if not ray.is_initialized():
+ # Use local mode to avoid working directory upload issues
+ ray.init(
+ ignore_reinit_error=True,
+ num_cpus=2,
+ include_dashboard=False,
+ runtime_env={"working_dir": None},
+ object_store_memory=78_643_200,
+ )
+ ray_df = ray.data.from_pandas(df)
+ tcf = TimeCopilotForecaster(models=foundation_model)
+ excluded_models = [
+ "AutoLGBM",
+ "AutoNHITS",
+ "AutoTFT",
+ "PatchTST-FM",
+ ]
+ if any(m.alias in excluded_models for m in foundation_model):
+ # These models do not support levels yet
+ with pytest.raises(ValueError) as excinfo:
+ tcf.forecast(
+ df=ray_df,
+ h=2,
+ freq="D",
+ level=level,
+ )
+ assert "not supported" in str(excinfo.value)
+ return
+ fcst_df = tcf.forecast(
+ df=ray_df,
+ h=2,
+ freq="D",
+ level=level,
+ )
+ fcst_df_pd = fcst_df.to_pandas()
+ exp_lv_cols = []
+ for lv in level:
+ for model in foundation_model:
+ exp_lv_cols.extend([f"{model.alias}-lo-{lv}", f"{model.alias}-hi-{lv}"])
+ assert len(exp_lv_cols) == len(fcst_df_pd.columns) - 3 # 3 is unique_id, ds, point
+ assert all(col in fcst_df_pd.columns for col in exp_lv_cols)
+ assert not any(("-q-" in col) for col in fcst_df_pd.columns)
+ # test monotonicity of levels
+ exp_lv_cols = exp_lv_cols[2:] # remove level 0
+ for c1, c2 in zip(exp_lv_cols[:-1:2], exp_lv_cols[1::2], strict=False):
+ for model in foundation_model:
+ if model.alias == "ZeroModel":
+ # ZeroModel is a constant model, so all levels should be the same
+ assert fcst_df_pd[c1].eq(fcst_df_pd[c2]).all()
+ elif "chronos" in model.alias.lower() or "median" in model.alias.lower():
+ # sometimes it gives this condition
+ assert fcst_df_pd[c1].le(fcst_df_pd[c2]).all()
+ elif "tabpfn" in model.alias.lower():
+ # we are testing the mock mode, so we don't care about monotonicity
+ continue
+ else:
+ assert fcst_df_pd[c1].lt(fcst_df_pd[c2]).all()
+
+
+# --- num_partitions parameter tests ---
+
+
+@pytest.mark.distributed
+def test_forecast_spark_with_num_partitions(simple_models, spark_df):
+ forecaster = TimeCopilotForecaster(models=simple_models)
+ result = forecaster.forecast(df=spark_df, h=2, freq="D", num_partitions=4)
+ from pyspark.sql import DataFrame as SparkDataFrame
+
+ assert isinstance(result, SparkDataFrame)
+ result_pd = result.toPandas()
+ assert len(result_pd) == 2 * 3
+
+
+@pytest.mark.distributed
+def test_forecast_dask_with_num_partitions(simple_models, dask_df):
+ import dask.dataframe as dd
+
+ forecaster = TimeCopilotForecaster(models=simple_models)
+ result = forecaster.forecast(df=dask_df, h=2, freq="D", num_partitions=4)
+ assert isinstance(result, dd.DataFrame)
+ result_pd = result.compute()
+ assert len(result_pd) == 2 * 3
diff --git a/timecopilot/forecaster.py b/timecopilot/forecaster.py
index a27ac5f4..1bf8eeca 100644
--- a/timecopilot/forecaster.py
+++ b/timecopilot/forecaster.py
@@ -1,7 +1,33 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, TypeVar
+
import pandas as pd
from .models.utils.forecaster import Forecaster
+if TYPE_CHECKING:
+ from dask.dataframe import DataFrame as DaskDataFrame
+ from pyspark.sql import DataFrame as SparkDataFrame
+ from ray.data import Dataset as RayDataset
+
+# Type variable for any supported DataFrame type
+AnyDataFrame = TypeVar(
+ "AnyDataFrame",
+ pd.DataFrame,
+ "DaskDataFrame",
+ "SparkDataFrame",
+ "RayDataset",
+)
+
+# Type variable for distributed DataFrame types only
+DistributedDataFrame = TypeVar(
+ "DistributedDataFrame",
+ "DaskDataFrame",
+ "SparkDataFrame",
+ "RayDataset",
+)
+
class TimeCopilotForecaster(Forecaster):
"""
@@ -62,6 +88,19 @@ def _validate_unique_aliases(self, models: list[Forecaster]) -> None:
f"same class."
)
+ @staticmethod
+ def _is_distributed_df(df: AnyDataFrame) -> bool:
+ """
+ Check if a DataFrame is a distributed DataFrame type.
+
+ Args:
+ df: DataFrame to check.
+
+ Returns:
+ True if the DataFrame is Spark, Dask, or Ray; False if pandas.
+ """
+ return not isinstance(df, pd.DataFrame)
+
def _call_models(
self,
attr: str,
@@ -118,7 +157,7 @@ def _call_models(
res_df = res_df.merge(res_df_model, on=merge_on, how="left")
return res_df
- def forecast(
+ def _forecast_pandas(
self,
df: pd.DataFrame,
h: int,
@@ -126,6 +165,96 @@ def forecast(
level: list[int | float] | None = None,
quantiles: list[float] | None = None,
) -> pd.DataFrame:
+ """
+ Internal pandas-only forecast implementation.
+
+ This method is called directly for pandas DataFrames or by the
+ distributed wrapper for each partition.
+ """
+ return self._call_models(
+ "forecast",
+ merge_on=["unique_id", "ds"],
+ df=df,
+ h=h,
+ freq=freq,
+ level=level,
+ quantiles=quantiles,
+ )
+
+ def _forecast_distributed(
+ self,
+ df: DistributedDataFrame,
+ h: int,
+ freq: str | None = None,
+ level: list[int | float] | None = None,
+ quantiles: list[float] | None = None,
+ num_partitions: int | None = None,
+ ) -> DistributedDataFrame:
+ """
+ Distributed forecast implementation using Fugue.
+
+ This method handles Spark, Dask, and Ray DataFrames by partitioning
+ the data by unique_id and running the pandas forecast on each partition.
+
+ Args:
+ df: Distributed DataFrame (Spark, Dask, or Ray).
+ h: Forecast horizon.
+ freq: Frequency of the time series.
+ level: Confidence levels for prediction intervals.
+ quantiles: Quantiles to forecast.
+ num_partitions: Number of partitions to use.
+
+ Returns:
+ Distributed DataFrame with forecast results (same type as input).
+ """
+ import fugue.api as fa
+
+ from .utils.distributed import (
+ _distributed_setup,
+ _forecast_wrapper,
+ _maybe_repartition_df,
+ )
+
+ df = _maybe_repartition_df(df)
+
+ schema, partition_config = _distributed_setup(
+ df=df,
+ method="forecast",
+ id_col="unique_id",
+ time_col="ds",
+ target_col="y",
+ level=level,
+ quantiles=quantiles,
+ num_partitions=num_partitions,
+ models=self.models,
+ )
+
+ result_df = fa.transform(
+ df,
+ using=_forecast_wrapper,
+ schema=schema,
+ params={
+ "forecaster": self,
+ "h": h,
+ "freq": freq,
+ "level": level,
+ "quantiles": quantiles,
+ },
+ partition=partition_config,
+ as_fugue=True,
+ )
+
+ return fa.get_native_as_df(result_df)
+
+ def forecast(
+ self,
+ df: AnyDataFrame,
+ h: int,
+ freq: str | None = None,
+ level: list[int | float] | None = None,
+ quantiles: list[float] | None = None,
+ num_partitions: int | None = None,
+ ) -> AnyDataFrame:
"""
Generate forecasts for one or more time series using all models.
@@ -133,10 +262,15 @@ def forecast(
intervals or quantile forecasts. The input DataFrame can contain one
or multiple time series in stacked (long) format.
+ Supports pandas, Spark, Dask, and Ray DataFrames. For distributed
+ DataFrames, the data is partitioned by unique_id and processed in
+ parallel using Fugue.
+
Args:
- df (pd.DataFrame):
- DataFrame containing the time series to forecast. It must
- include as columns:
+ df (AnyDataFrame):
+ DataFrame containing the time series to forecast. Supports
+ pandas DataFrame, Spark DataFrame, Dask DataFrame, or Ray Dataset.
+ It must include as columns:
- "unique_id": an ID column to distinguish multiple series.
- "ds": a time column indicating timestamps or periods.
@@ -161,10 +295,15 @@ def forecast(
provided, the output DataFrame will contain additional columns
named in the format "model-q-{percentile}", where {percentile}
= 100 × quantile value.
+ num_partitions (int, optional):
+ Number of partitions to use for distributed DataFrames. Only
+ used when df is a Spark, Dask, or Ray DataFrame. If not provided,
+ the default partitioning is used.
Returns:
- pd.DataFrame:
- DataFrame containing forecast results. Includes:
+ AnyDataFrame:
+ DataFrame containing forecast results (same type as input).
+ Includes:
- point forecasts for each timestamp, series and model.
- prediction intervals if `level` is specified.
@@ -173,9 +312,19 @@ def forecast(
For multi-series data, the output retains the same unique
identifiers as the input DataFrame.
"""
- return self._call_models(
- "forecast",
- merge_on=["unique_id", "ds"],
+ # Route to distributed implementation for non-pandas DataFrames
+ if self._is_distributed_df(df):
+ return self._forecast_distributed(
+ df=df,
+ h=h,
+ freq=freq,
+ level=level,
+ quantiles=quantiles,
+ num_partitions=num_partitions,
+ )
+
+ # Pandas DataFrame path
+ return self._forecast_pandas(
df=df,
h=h,
freq=freq,
@@ -183,7 +332,7 @@ def forecast(
quantiles=quantiles,
)
- def cross_validation(
+ def _cross_validation_pandas(
self,
df: pd.DataFrame,
h: int,
@@ -193,16 +342,122 @@ def cross_validation(
level: list[int | float] | None = None,
quantiles: list[float] | None = None,
) -> pd.DataFrame:
+ """
+ Internal pandas-only cross-validation implementation.
+
+ This method is called directly for pandas DataFrames or by the
+ distributed wrapper for each partition.
+ """
+ return self._call_models(
+ "cross_validation",
+ merge_on=["unique_id", "ds", "cutoff"],
+ df=df,
+ h=h,
+ freq=freq,
+ n_windows=n_windows,
+ step_size=step_size,
+ level=level,
+ quantiles=quantiles,
+ )
+
+ def _cross_validation_distributed(
+ self,
+ df: DistributedDataFrame,
+ h: int,
+ freq: str | None = None,
+ n_windows: int = 1,
+ step_size: int | None = None,
+ level: list[int | float] | None = None,
+ quantiles: list[float] | None = None,
+ num_partitions: int | None = None,
+ ) -> DistributedDataFrame:
+ """
+ Distributed cross-validation implementation using Fugue.
+
+ This method handles Spark, Dask, and Ray DataFrames by partitioning
+ the data by unique_id and running the pandas cross-validation
+ on each partition.
+
+ Args:
+ df: Distributed DataFrame (Spark, Dask, or Ray).
+ h: Forecast horizon.
+ freq: Frequency of the time series.
+ n_windows: Number of cross-validation windows.
+ step_size: Step size between windows.
+ level: Confidence levels for prediction intervals.
+ quantiles: Quantiles to forecast.
+ num_partitions: Number of partitions to use.
+
+ Returns:
+ Distributed DataFrame with cross-validation results (same type as input).
+ """
+ import fugue.api as fa
+
+ from .utils.distributed import (
+ _cross_validation_wrapper,
+ _distributed_setup,
+ _maybe_repartition_df,
+ )
+
+ df = _maybe_repartition_df(df)
+
+ schema, partition_config = _distributed_setup(
+ df=df,
+ method="cross_validation",
+ id_col="unique_id",
+ time_col="ds",
+ target_col="y",
+ level=level,
+ quantiles=quantiles,
+ num_partitions=num_partitions,
+ models=self.models,
+ )
+
+ result_df = fa.transform(
+ df,
+ using=_cross_validation_wrapper,
+ schema=schema,
+ params={
+ "forecaster": self,
+ "h": h,
+ "freq": freq,
+ "n_windows": n_windows,
+ "step_size": step_size,
+ "level": level,
+ "quantiles": quantiles,
+ },
+ partition=partition_config,
+ as_fugue=True,
+ )
+
+ return fa.get_native_as_df(result_df)
+
+ def cross_validation(
+ self,
+ df: AnyDataFrame,
+ h: int,
+ freq: str | None = None,
+ n_windows: int = 1,
+ step_size: int | None = None,
+ level: list[int | float] | None = None,
+ quantiles: list[float] | None = None,
+ num_partitions: int | None = None,
+ ) -> AnyDataFrame:
"""
This method splits the time series into multiple training and testing
windows and generates forecasts for each window. It enables evaluating
forecast accuracy over different historical periods. Supports point
forecasts and, optionally, prediction intervals or quantile forecasts.
+ Supports pandas, Spark, Dask, and Ray DataFrames. For distributed
+ DataFrames, the data is partitioned by unique_id and processed in
+ parallel using Fugue.
+
Args:
- df (pd.DataFrame):
- DataFrame containing the time series to forecast. It must
- include as columns:
+ df (AnyDataFrame):
+ DataFrame containing the time series to forecast. Supports
+ pandas DataFrame, Spark DataFrame, Dask DataFrame, or Ray Dataset.
+ It must include as columns:
- "unique_id": an ID column to distinguish multiple series.
- "ds": a time column indicating timestamps or periods.
@@ -232,11 +487,15 @@ def cross_validation(
Should not be used simultaneously with `level`. If provided,
additional columns named "model-q-{percentile}" will appear in
the output, where {percentile} is 100 × quantile value.
+ num_partitions (int, optional):
+ Number of partitions to use for distributed DataFrames. Only
+ used when df is a Spark, Dask, or Ray DataFrame. If not provided,
+ the default partitioning is used.
Returns:
- pd.DataFrame:
+ AnyDataFrame:
DataFrame containing the forecasts for each cross-validation
- window. The output includes:
+ window (same type as input). The output includes:
- "unique_id" column to indicate the series.
- "ds" column to indicate the timestamp.
@@ -247,9 +506,21 @@ def cross_validation(
- prediction intervals if `level` is specified.
- quantile forecasts if `quantiles` is specified.
"""
- return self._call_models(
- "cross_validation",
- merge_on=["unique_id", "ds", "cutoff"],
+ # Route to distributed implementation for non-pandas DataFrames
+ if self._is_distributed_df(df):
+ return self._cross_validation_distributed(
+ df=df,
+ h=h,
+ freq=freq,
+ n_windows=n_windows,
+ step_size=step_size,
+ level=level,
+ quantiles=quantiles,
+ num_partitions=num_partitions,
+ )
+
+ # Pandas DataFrame path
+ return self._cross_validation_pandas(
df=df,
h=h,
freq=freq,
@@ -259,7 +530,7 @@ def cross_validation(
quantiles=quantiles,
)
- def detect_anomalies(
+ def _detect_anomalies_pandas(
self,
df: pd.DataFrame,
h: int | None = None,
@@ -267,6 +538,97 @@ def detect_anomalies(
n_windows: int | None = None,
level: int | float = 99,
) -> pd.DataFrame:
+ """Internal pandas-only anomaly detection implementation.
+
+ This method is called directly for pandas DataFrames or by the
+ distributed wrapper for each partition.
+ """
+ return self._call_models(
+ "detect_anomalies",
+ merge_on=["unique_id", "ds", "cutoff"],
+ df=df,
+ h=h, # type: ignore
+ freq=freq,
+ n_windows=n_windows,
+ level=level, # type: ignore
+ quantiles=None,
+ )
+
+ def _detect_anomalies_distributed(
+ self,
+ df: DistributedDataFrame,
+ h: int | None = None,
+ freq: str | None = None,
+ n_windows: int | None = None,
+ level: int | float = 99,
+ num_partitions: int | None = None,
+ ) -> DistributedDataFrame:
+ """
+ Distributed anomaly detection implementation using Fugue.
+
+ This method handles Spark, Dask, and Ray DataFrames by partitioning
+ the data by unique_id and running the pandas anomaly detection
+ on each partition.
+
+ Args:
+ df: Distributed DataFrame (Spark, Dask, or Ray).
+ h: Forecast horizon.
+ freq: Frequency of the time series.
+ n_windows: Number of cross-validation windows.
+ level: Confidence level for anomaly detection.
+ num_partitions: Number of partitions to use.
+
+ Returns:
+ Distributed DataFrame with anomaly detection results (same type as input).
+ """
+ import fugue.api as fa
+
+ from .utils.distributed import (
+ _detect_anomalies_wrapper,
+ _distributed_setup,
+ _maybe_repartition_df,
+ )
+
+ df = _maybe_repartition_df(df)
+
+ schema, partition_config = _distributed_setup(
+ df=df,
+ method="detect_anomalies",
+ id_col="unique_id",
+ time_col="ds",
+ target_col="y",
+ level=level,
+ quantiles=None,
+ num_partitions=num_partitions,
+ models=self.models,
+ )
+
+ result_df = fa.transform(
+ df,
+ using=_detect_anomalies_wrapper,
+ schema=schema,
+ params={
+ "forecaster": self,
+ "h": h,
+ "freq": freq,
+ "n_windows": n_windows,
+ "level": level,
+ },
+ partition=partition_config,
+ as_fugue=True,
+ )
+
+ return fa.get_native_as_df(result_df)
+
+ def detect_anomalies(
+ self,
+ df: AnyDataFrame,
+ h: int | None = None,
+ freq: str | None = None,
+ n_windows: int | None = None,
+ level: int | float = 99,
+ num_partitions: int | None = None,
+ ) -> AnyDataFrame:
"""
Detect anomalies in time-series using a cross-validated z-score test.
@@ -276,9 +638,15 @@ def detect_anomalies(
flags values outside a two-sided prediction interval (with confidence `level`),
and returns a DataFrame with results.
+ Supports pandas, Spark, Dask, and Ray DataFrames. For distributed
+ DataFrames, the data is partitioned by unique_id and processed in
+ parallel using Fugue.
+
Args:
- df (pd.DataFrame):
+ df (AnyDataFrame):
DataFrame containing the time series to detect anomalies.
+ Supports pandas DataFrame, Spark DataFrame, Dask DataFrame,
+ or Ray Dataset.
h (int, optional):
Forecast horizon specifying how many future steps to predict.
In each cross validation window. If not provided, the seasonality
@@ -300,11 +668,15 @@ def detect_anomalies(
level (int | float):
Confidence levels for z-score, expressed as
percentages (e.g. 80, 95). Default is 99.
+ num_partitions (int, optional):
+ Number of partitions to use for distributed DataFrames. Only
+ used when df is a Spark, Dask, or Ray DataFrame. If not provided,
+ the default partitioning is used.
Returns:
- pd.DataFrame:
+ AnyDataFrame:
DataFrame containing the forecasts for each cross-validation
- window. The output includes:
+ window (same type as input). The output includes:
- "unique_id" column to indicate the series.
- "ds" column to indicate the timestamp.
@@ -316,13 +688,22 @@ def detect_anomalies(
an anomaly is defined as a value that is outside of the
prediction interval (True or False).
"""
- return self._call_models(
- "detect_anomalies",
- merge_on=["unique_id", "ds", "cutoff"],
+ # Route to distributed implementation for non-pandas DataFrames
+ if self._is_distributed_df(df):
+ return self._detect_anomalies_distributed(
+ df=df,
+ h=h,
+ freq=freq,
+ n_windows=n_windows,
+ level=level,
+ num_partitions=num_partitions,
+ )
+
+ # Pandas DataFrame path
+ return self._detect_anomalies_pandas(
df=df,
- h=h, # type: ignore
+ h=h,
freq=freq,
n_windows=n_windows,
- level=level, # type: ignore
- quantiles=None,
+ level=level,
)
diff --git a/timecopilot/models/stats.py b/timecopilot/models/stats.py
index e5a7b742..01c6454e 100644
--- a/timecopilot/models/stats.py
+++ b/timecopilot/models/stats.py
@@ -1,4 +1,5 @@
import os
+from multiprocessing import current_process
import pandas as pd
from statsforecast import StatsForecast
@@ -53,10 +54,13 @@ def run_statsforecast_model(
level: list[int | float] | None,
quantiles: list[float] | None,
) -> pd.DataFrame:
+ # Use n_jobs=1 when running in a daemon process (e.g. Dask/Fugue worker)
+ # to avoid "daemonic processes are not allowed to have children"
+ n_jobs = 1 if current_process().daemon else -1
sf = StatsForecast(
models=[model],
freq=freq,
- n_jobs=-1,
+ n_jobs=n_jobs,
fallback_model=_SeasonalNaive(
season_length=get_seasonality(freq),
),
diff --git a/timecopilot/models/utils/forecaster.py b/timecopilot/models/utils/forecaster.py
index 276e7856..21205490 100644
--- a/timecopilot/models/utils/forecaster.py
+++ b/timecopilot/models/utils/forecaster.py
@@ -549,7 +549,8 @@ def _level_to_quantiles(level: int | float) -> tuple[float, float]:
Given a prediction interval level (e.g. 80) return the lower & upper
quantiles that delimit the central interval (e.g. 0.10, 0.90).
"""
- alpha = 1 - level / 100
+ # handle trailing 9s, can occur with level == 80
+ alpha = round(1 - level / 100, 2)
q_lo = alpha / 2
q_hi = 1 - q_lo
return q_lo, q_hi
diff --git a/timecopilot/models/utils/parallel_forecaster.py b/timecopilot/models/utils/parallel_forecaster.py
index 4873f3d5..cfa30356 100644
--- a/timecopilot/models/utils/parallel_forecaster.py
+++ b/timecopilot/models/utils/parallel_forecaster.py
@@ -1,6 +1,6 @@
import os
from collections.abc import Callable
-from multiprocessing import Pool
+from multiprocessing import Pool, current_process
import pandas as pd
@@ -26,6 +26,10 @@ def _apply_parallel(
func: Callable,
**kwargs,
) -> pd.DataFrame:
+ if current_process().daemon:
+ # Running inside a Dask/Fugue worker: do not spawn children
+ results = [self._process_group(df, func, **kwargs) for _, df in df_grouped]
+ return pd.concat(results)
with Pool(max(1, (os.cpu_count() or 1) - 1)) as executor:
futures = [
executor.apply_async(
diff --git a/timecopilot/utils/distributed.py b/timecopilot/utils/distributed.py
new file mode 100644
index 00000000..d34b2f61
--- /dev/null
+++ b/timecopilot/utils/distributed.py
@@ -0,0 +1,348 @@
+"""Distributed DataFrame utilities for TimeCopilot using Fugue."""
+
+from __future__ import annotations
+
+from contextlib import contextmanager, suppress
+from multiprocessing import current_process
+from typing import TYPE_CHECKING, Any
+
+import pandas as pd
+
+from timecopilot.forecaster import AnyDataFrame
+
+if TYPE_CHECKING:
+ from ..models.utils.forecaster import Forecaster
+
+
+def _register_fugue_backends() -> None:
+ """
+ Register Fugue backends for distributed DataFrames.
+
+ This ensures that Fugue can recognize and handle Dask, Spark,
+ and Ray DataFrames properly.
+ """
+ with suppress(ImportError):
+ import fugue_dask # noqa: F401
+
+ with suppress(ImportError):
+ import fugue_spark # noqa: F401
+
+ with suppress(ImportError):
+ import fugue_ray # noqa: F401
+
+
+def _get_schema(
+ df: Any,
+ method: str,
+ id_col: str,
+ time_col: str,
+ target_col: str,
+ level: int | float | list[int | float] | None,
+ quantiles: list[float] | None,
+ models: list[Forecaster],
+):
+ """
+ Build the output schema for distributed operations.
+
+ Args:
+ df: Input DataFrame (any distributed type).
+ method: The method being called ("forecast", "cross_validation", "detect_anomalies").
+ id_col: Name of the ID column.
+ time_col: Name of the time column.
+ target_col: Name of the target column.
+ level: Confidence levels for prediction intervals.
+ quantiles: Quantiles to forecast.
+ models: List of forecaster models.
+
+ Returns:
+ Schema object for the output DataFrame.
+ """ # noqa: E501
+ _register_fugue_backends()
+ import fugue.api as fa
+
+ # Base columns depend on the method
+ base_cols = [id_col, time_col]
+ if method != "forecast":
+ base_cols.append(target_col)
+
+ # Extract base schema from input DataFrame
+ schema = fa.get_schema(df).extract(base_cols).copy()
+
+ # Add model columns
+ for model in models:
+ schema.append(f"{model.alias}:double")
+
+ # Add method-specific columns
+ if method == "detect_anomalies":
+ # Add cutoff column with same type as time_col
+ schema.append(("cutoff", schema[time_col].type))
+ # Add anomaly detection columns for each model
+ for model in models:
+ # ignore type errors, should be a compatible type
+ # for detect_anomalies
+ schema.append(f"{model.alias}-lo-{int(level)}:double") # type: ignore
+ schema.append(f"{model.alias}-hi-{int(level)}:double") # type: ignore
+ schema.append(f"{model.alias}-anomaly:bool")
+ elif method == "cross_validation":
+ # Add cutoff column with same type as time_col
+ schema.append(("cutoff", schema[time_col].type))
+
+ # Add prediction interval columns if level is provided
+ if level is not None and method != "detect_anomalies":
+ if not isinstance(level, list):
+ level = [level]
+ level = sorted(level)
+ for model in models:
+ for lv in reversed(level):
+ schema.append(f"{model.alias}-lo-{lv}:double")
+ for lv in level:
+ schema.append(f"{model.alias}-hi-{lv}:double")
+
+ # Add quantile columns if quantiles are provided
+ if quantiles is not None:
+ quantiles = sorted(quantiles)
+ for model in models:
+ for q in quantiles:
+ schema.append(f"{model.alias}-q-{int(q * 100)}:double")
+
+ return schema
+
+
+def _is_supported_distributed_df(df: Any) -> bool:
+ """Check if the DataFrame is a supported distributed type.
+
+ Args:
+ df: DataFrame to check.
+
+ Returns:
+ True if supported (Spark, Dask, or Ray), False otherwise.
+ """
+ df_module = type(df).__module__
+ df_name = type(df).__name__
+
+ # Check for Dask DataFrame (both old and new dask-expr backend)
+ if "dask.dataframe" in df_module or "dask_expr" in df_module:
+ return True
+
+ # Check for Spark DataFrame
+ if "pyspark.sql" in df_module and "DataFrame" in df_name:
+ return True
+
+ # Check for Ray Dataset
+ if "ray.data" in df_module:
+ return True
+
+ # Try Fugue's inference as fallback
+ try:
+ from fugue.execution import infer_execution_engine
+
+ return infer_execution_engine([df]) is not None
+ except Exception:
+ return False
+
+
+def _maybe_repartition_df(df: AnyDataFrame) -> AnyDataFrame:
+ df_module = type(df).__module__
+ if (
+ "dask.dataframe" in df_module or "dask_expr" in df_module
+ ) and df.npartitions == 1:
+ return df.repartition(npartitions=2)
+ return df
+
+
+def _distributed_setup(
+ df: Any,
+ method: str,
+ id_col: str,
+ time_col: str,
+ target_col: str,
+ level: int | float | list[int | float] | None,
+ quantiles: list[float] | None,
+ num_partitions: int | None,
+ models: list[Forecaster],
+) -> tuple[Any, dict[str, Any]]:
+ """Set up schema and partition configuration for distributed operations.
+
+ Args:
+ df: Input DataFrame (any distributed type).
+ method: The method being called.
+ id_col: Name of the ID column.
+ time_col: Name of the time column.
+ target_col: Name of the target column.
+ level: Confidence levels for prediction intervals.
+ quantiles: Quantiles to forecast.
+ num_partitions: Number of partitions to use.
+ models: List of forecaster models.
+
+ Returns:
+ Tuple of (schema, partition_config).
+
+ Raises:
+ ValueError: If execution engine cannot be inferred from DataFrame type.
+ """
+ if not _is_supported_distributed_df(df):
+ raise ValueError(
+ f"Could not infer execution engine for type {type(df).__name__}. "
+ "Expected a Spark or Dask DataFrame or a Ray Dataset."
+ )
+
+ # Build output schema based on method
+ schema = _get_schema(
+ df=df,
+ method=method,
+ id_col=id_col,
+ time_col=time_col,
+ target_col=target_col,
+ level=level,
+ quantiles=quantiles,
+ models=models,
+ )
+
+ # Configure partitioning: by id_col with coarse algorithm
+ # "coarse" means series with the same unique_id are grouped together
+ partition_config: dict[str, Any] = {"by": id_col, "algo": "coarse"}
+ if num_partitions is not None:
+ partition_config["num"] = num_partitions
+
+ return schema, partition_config
+
+
+@contextmanager
+def _patch_torch_dataloader_for_daemon():
+ """Force PyTorch DataLoader to use num_workers=0 when running in a daemon process.
+
+ Chronos (and other libs) create DataLoaders with num_workers>0, which spawns
+ child processes. Daemon processes cannot have children, so we patch
+ DataLoader to use num_workers=0 for the duration of the context.
+ """
+ if not current_process().daemon:
+ yield
+ return
+ try:
+ import torch.utils.data
+ except ImportError:
+ yield
+ return
+ _DataLoader = torch.utils.data.DataLoader
+ _orig_init = _DataLoader.__init__
+
+ def _patched_init(self, *args, **kwargs):
+ kwargs["num_workers"] = 0
+ _orig_init(self, *args, **kwargs)
+
+ torch.utils.data.DataLoader.__init__ = _patched_init
+ try:
+ yield
+ finally:
+ torch.utils.data.DataLoader.__init__ = _orig_init
+
+
+def _forecast_wrapper(
+ df: pd.DataFrame,
+ forecaster: Any, # TimeCopilotForecaster - using Any to avoid import issues
+ h: int,
+ freq: str | None,
+ level: list[int | float] | None,
+ quantiles: list[float] | None,
+) -> pd.DataFrame:
+ """Wrapper function for distributed forecast.
+
+ This function is called by Fugue's transform for each partition.
+ It receives a pandas DataFrame partition and calls the regular
+ forecast method.
+
+ Args:
+ df: Pandas DataFrame partition (grouped by unique_id).
+ forecaster: The TimeCopilotForecaster instance.
+ h: Forecast horizon.
+ freq: Frequency of the time series.
+ level: Confidence levels for prediction intervals.
+ quantiles: Quantiles to forecast.
+
+ Returns:
+ Pandas DataFrame with forecast results.
+ """
+ with _patch_torch_dataloader_for_daemon():
+ return forecaster._forecast_pandas(
+ df=df,
+ h=h,
+ freq=freq,
+ level=level,
+ quantiles=quantiles,
+ )
+
+
+def _cross_validation_wrapper(
+ df: pd.DataFrame,
+ forecaster: Any, # TimeCopilotForecaster - using Any to avoid import issues
+ h: int,
+ freq: str | None,
+ n_windows: int,
+ step_size: int | None,
+ level: list[int | float] | None,
+ quantiles: list[float] | None,
+) -> pd.DataFrame:
+ """Wrapper function for distributed cross-validation.
+
+ This function is called by Fugue's transform for each partition.
+ It receives a pandas DataFrame partition and calls the regular
+ cross_validation method.
+
+ Args:
+ df: Pandas DataFrame partition (grouped by unique_id).
+ forecaster: The TimeCopilotForecaster instance.
+ h: Forecast horizon.
+ freq: Frequency of the time series.
+ n_windows: Number of cross-validation windows.
+ step_size: Step size between windows.
+ level: Confidence levels for prediction intervals.
+ quantiles: Quantiles to forecast.
+
+ Returns:
+ Pandas DataFrame with cross-validation results.
+ """
+ with _patch_torch_dataloader_for_daemon():
+ return forecaster._cross_validation_pandas(
+ df=df,
+ h=h,
+ freq=freq,
+ n_windows=n_windows,
+ step_size=step_size,
+ level=level,
+ quantiles=quantiles,
+ )
+
+
+def _detect_anomalies_wrapper(
+ df: pd.DataFrame,
+ forecaster: Any, # TimeCopilotForecaster - using Any to avoid import issues
+ h: int | None,
+ freq: str | None,
+ n_windows: int | None,
+ level: int | float,
+) -> pd.DataFrame:
+ """Wrapper function for distributed anomaly detection.
+
+ This function is called by Fugue's transform for each partition.
+ It receives a pandas DataFrame partition and calls the regular
+ detect_anomalies method.
+
+ Args:
+ df: Pandas DataFrame partition (grouped by unique_id).
+ forecaster: The TimeCopilotForecaster instance.
+ h: Forecast horizon.
+ freq: Frequency of the time series.
+ n_windows: Number of cross-validation windows.
+ level: Confidence level for anomaly detection.
+
+ Returns:
+ Pandas DataFrame with anomaly detection results.
+ """
+ with _patch_torch_dataloader_for_daemon():
+ return forecaster._detect_anomalies_pandas(
+ df=df,
+ h=h,
+ freq=freq,
+ n_windows=n_windows,
+ level=level,
+ )
diff --git a/uv.lock b/uv.lock
index aa3dbe4e..76d4b4ec 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1,14 +1,19 @@
requires-python = ">=3.10"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
- "python_full_version >= '3.14' and sys_platform != 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version >= '3.14' and sys_platform == 'linux'",
]
revision = 3
@@ -282,15 +287,6 @@ wheels = [
{hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z", url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl"},
]
-[[package]]
-name = "appdirs"
-sdist = {hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", size = 13470, upload-time = "2020-05-11T07:59:51.037Z", url = "https://files.pythonhosted.org/packages/d7/d8/05696357e0311f5b5c316d7b95f46c669dd9c15aaeecbb48c7d0aeb88c40/appdirs-1.4.4.tar.gz"}
-source = {registry = "https://pypi.org/simple"}
-version = "1.4.4"
-wheels = [
- {hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566, upload-time = "2020-05-11T07:59:49.499Z", url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl"},
-]
-
[[package]]
name = "appnope"
sdist = {hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z", url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz"}
@@ -804,7 +800,8 @@ dependencies = [
]
name = "contourpy"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:b6945942715a034c671b7fc54f9588126b0b8bf23db2696e3ca8328f3ff0ab54", size = 13466130, upload-time = "2025-04-15T17:47:53.79Z", url = "https://files.pythonhosted.org/packages/66/54/eb9bfc647b19f2009dd5c7f5ec51c4e6ca831725f1aea7a993034f483147/contourpy-1.3.2.tar.gz"}
@@ -876,13 +873,17 @@ dependencies = [
]
name = "contourpy"
resolution-markers = [
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
- "python_full_version >= '3.14' and sys_platform != 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version >= '3.14' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z", url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz"}
@@ -1136,16 +1137,63 @@ wheels = [
{hash = "sha256:4c1819588d83bed2cbcf5807daa5c2380c8c85789a6935a733f018f04ad8a6a2", size = 7900661, upload-time = "2025-07-31T19:18:50.679Z", url = "https://files.pythonhosted.org/packages/d3/36/e0010483ca49b9bf6f389631ccea07b3ff6b678d14d8c7a0a4357860c36a/dash-3.2.0-py3-none-any.whl"},
]
+[[package]]
+dependencies = [
+ {marker = "python_full_version < '3.12'", name = "importlib-metadata"},
+ {name = "click"},
+ {name = "cloudpickle"},
+ {name = "fsspec"},
+ {name = "packaging"},
+ {name = "partd"},
+ {name = "pyyaml"},
+ {name = "toolz"},
+]
+name = "dask"
+sdist = {hash = "sha256:bac809af21c2dd7eb06827bccbfc612504f3ee6435580e548af912828f823195", size = 10693689, upload-time = "2024-12-17T20:26:53.546Z", url = "https://files.pythonhosted.org/packages/b6/19/1d1e57c0fa24dfd241bbec46d4b70c37ec15e8071a7e06d43d327c8dafbb/dask-2024.12.1.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "2024.12.1"
+wheels = [
+ {hash = "sha256:1f32acddf1a6994e3af6734756f0a92467c47050bc29f3555bb9b140420e8e19", size = 1269300, upload-time = "2024-12-17T20:26:41.118Z", url = "https://files.pythonhosted.org/packages/6d/5a/cdc78a77bb1c7290fd1ccfe6001437f99a2af63e28343299abd09336236e/dask-2024.12.1-py3-none-any.whl"},
+]
+
+[package.optional-dependencies]
+dataframe = [
+ {marker = "python_full_version < '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "1.26.4"},
+ {marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
+ {marker = "python_full_version >= '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "2.1.3"},
+ {marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
+ {name = "dask-expr"},
+]
+distributed = [
+ {name = "distributed"},
+]
+
+[[package]]
+dependencies = [
+ {marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
+ {marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
+ {name = "dask"},
+ {name = "pyarrow"},
+]
+name = "dask-expr"
+sdist = {hash = "sha256:eb45de8e6fea1ce2608a431b4e03a484592defb1796665530c91386ffac581d3", size = 223929, upload-time = "2024-12-17T20:26:49.519Z", url = "https://files.pythonhosted.org/packages/e2/d3/50af8a5826231a804b0286704ed7be494d685337e159bf600cb396fcfcf9/dask_expr-1.1.21.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "1.1.21"
+wheels = [
+ {hash = "sha256:2c2a9a0b0e66b26cf918679988f97e947bc936544f3a106102055adb9a9edeba", size = 244297, upload-time = "2024-12-17T20:26:47.647Z", url = "https://files.pythonhosted.org/packages/a9/99/60c73ccb5a272ff396bc766bfa3c9caa71484424983f0334070263a16581/dask_expr-1.1.21-py3-none-any.whl"},
+]
+
[[package]]
dependencies = [
{extra = ["http"], name = "fsspec"},
+ {marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'", name = "dill", source = {registry = "https://pypi.org/simple"}, version = "0.4.0"},
+ {marker = "platform_machine == 'x86_64' and sys_platform == 'darwin'", name = "dill", source = {registry = "https://pypi.org/simple"}, version = "0.3.8"},
{marker = "python_full_version < '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "1.26.4"},
{marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
{marker = "python_full_version < '3.13'", name = "xxhash", source = {registry = "https://pypi.org/simple"}, version = "3.5.0"},
{marker = "python_full_version >= '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "2.1.3"},
{marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
{marker = "python_full_version >= '3.13'", name = "xxhash", source = {registry = "https://pypi.org/simple"}, version = "3.6.0"},
- {name = "dill"},
{name = "filelock"},
{name = "huggingface-hub"},
{name = "multiprocess"},
@@ -1224,6 +1272,34 @@ wheels = [
[[package]]
name = "dill"
+resolution-markers = [
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+]
+sdist = {hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca", size = 184847, upload-time = "2024-01-27T23:42:16.145Z", url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "0.3.8"
+wheels = [
+ {hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7", size = 116252, upload-time = "2024-01-27T23:42:14.239Z", url = "https://files.pythonhosted.org/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl"},
+]
+
+[[package]]
+name = "dill"
+resolution-markers = [
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and sys_platform == 'linux'",
+ "python_full_version == '3.11.*' and sys_platform == 'linux'",
+ "python_full_version == '3.12.*' and sys_platform == 'linux'",
+ "python_full_version == '3.13.*' and sys_platform == 'linux'",
+ "python_full_version >= '3.14' and sys_platform == 'linux'",
+]
sdist = {hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0", size = 186976, upload-time = "2025-04-16T00:41:48.867Z", url = "https://files.pythonhosted.org/packages/12/80/630b4b88364e9a8c8c5797f4602d0f76ef820909ee32f0bacb9f90654042/dill-0.4.0.tar.gz"}
source = {registry = "https://pypi.org/simple"}
version = "0.4.0"
@@ -1240,6 +1316,32 @@ wheels = [
{hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z", url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl"},
]
+[[package]]
+dependencies = [
+ {name = "click"},
+ {name = "cloudpickle"},
+ {name = "dask"},
+ {name = "jinja2"},
+ {name = "locket"},
+ {name = "msgpack"},
+ {name = "packaging"},
+ {name = "psutil"},
+ {name = "pyyaml"},
+ {name = "sortedcontainers"},
+ {name = "tblib"},
+ {name = "toolz"},
+ {name = "tornado"},
+ {name = "urllib3"},
+ {name = "zict"},
+]
+name = "distributed"
+sdist = {hash = "sha256:438aa3ae48bfac9c2bb2ad03f9d47899286f9cb3db8a627b3b8c0de9e26f53dd", size = 1115786, upload-time = "2024-12-17T20:26:47.227Z", url = "https://files.pythonhosted.org/packages/67/ce/0ca6d4e1da90f5b3af135b3abbf0487b2602d046cc090b793869928880b5/distributed-2024.12.1.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "2024.12.1"
+wheels = [
+ {hash = "sha256:87e31abaa0ee3dc517b44fec4993d4b5d92257f926a8d2a12d52c005227154e7", size = 1022935, upload-time = "2024-12-17T20:26:42.471Z", url = "https://files.pythonhosted.org/packages/e8/90/82171cc7fe1c6d10bac57587c7ac012be80412ad06ef8c4952c5f067f869/distributed-2024.12.1-py3-none-any.whl"},
+]
+
[[package]]
name = "distro"
sdist = {hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z", url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz"}
@@ -1258,6 +1360,48 @@ wheels = [
{hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z", url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl"},
]
+[[package]]
+name = "duckdb"
+sdist = {hash = "sha256:8bba52fd2acb67668a4615ee17ee51814124223de836d9e2fdcbc4c9021b3d3c", size = 18419763, upload-time = "2026-01-26T11:50:37.68Z", url = "https://files.pythonhosted.org/packages/36/9d/ab66a06e416d71b7bdcb9904cdf8d4db3379ef632bb8e9495646702d9718/duckdb-1.4.4.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "1.4.4"
+wheels = [
+ {hash = "sha256:0509b39ea7af8cff0198a99d206dca753c62844adab54e545984c2e2c1381616", size = 15350691, upload-time = "2026-01-26T11:49:43.242Z", url = "https://files.pythonhosted.org/packages/9c/9b/3c7c5e48456b69365d952ac201666053de2700f5b0144a699a4dc6854507/duckdb-1.4.4-cp313-cp313-macosx_10_13_x86_64.whl"},
+ {hash = "sha256:0d636ceda422e7babd5e2f7275f6a0d1a3405e6a01873f00d38b72118d30c10b", size = 18422740, upload-time = "2026-01-26T11:49:49.034Z", url = "https://files.pythonhosted.org/packages/09/5b/3e7aa490841784d223de61beb2ae64e82331501bf5a415dc87a0e27b4663/duckdb-1.4.4-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl"},
+ {hash = "sha256:1af6e76fe8bd24875dc56dd8e38300d64dc708cd2e772f67b9fbc635cc3066a3", size = 18426882, upload-time = "2026-01-26T11:50:08.97Z", url = "https://files.pythonhosted.org/packages/2d/94/24364da564b27aeebe44481f15bd0197a0b535ec93f188a6b1b98c22f082/duckdb-1.4.4-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl"},
+ {hash = "sha256:1f8d55843cc940e36261689054f7dfb6ce35b1f5b0953b0d355b6adb654b0d52", size = 13672403, upload-time = "2026-01-26T11:49:26.741Z", url = "https://files.pythonhosted.org/packages/5d/8c/c13d396fd4e9bf970916dc5b4fea410c1b10fe531069aea65f1dcf849a71/duckdb-1.4.4-cp312-cp312-macosx_11_0_arm64.whl"},
+ {hash = "sha256:25874f8b1355e96178079e37312c3ba6d61a2354f51319dae860cf21335c3a20", size = 28909554, upload-time = "2026-01-26T11:50:00.107Z", url = "https://files.pythonhosted.org/packages/97/a6/f19e2864e651b0bd8e4db2b0c455e7e0d71e0d4cd2cd9cc052f518e43eb3/duckdb-1.4.4-cp314-cp314-macosx_10_15_universal2.whl"},
+ {hash = "sha256:337f8b24e89bc2e12dadcfe87b4eb1c00fd920f68ab07bc9b70960d6523b8bc3", size = 28899349, upload-time = "2026-01-26T11:49:40.294Z", url = "https://files.pythonhosted.org/packages/7f/fe/64810fee20030f2bf96ce28b527060564864ce5b934b50888eda2cbf99dd/duckdb-1.4.4-cp313-cp313-macosx_10_13_universal2.whl"},
+ {hash = "sha256:452c5b5d6c349dc5d1154eb2062ee547296fcbd0c20e9df1ed00b5e1809089da", size = 15353804, upload-time = "2026-01-26T11:50:03.382Z", url = "https://files.pythonhosted.org/packages/0e/93/8a24e932c67414fd2c45bed83218e62b73348996bf859eda020c224774b2/duckdb-1.4.4-cp314-cp314-macosx_10_15_x86_64.whl"},
+ {hash = "sha256:453b115f4777467f35103d8081770ac2f223fb5799178db5b06186e3ab51d1f2", size = 20407046, upload-time = "2026-01-26T11:48:55.673Z", url = "https://files.pythonhosted.org/packages/a2/5f/23bd586ecb21273b41b5aa4b16fd88b7fecb53ed48d897273651c0c3d66f/duckdb-1.4.4-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl"},
+ {hash = "sha256:47dd4162da6a2be59a0aef640eb08d6360df1cf83c317dcc127836daaf3b7f7c", size = 13670044, upload-time = "2026-01-26T11:49:06.627Z", url = "https://files.pythonhosted.org/packages/d3/f0/cf4241a040ec4f571859a738007ec773b642fbc27df4cbcf34b0c32ea559/duckdb-1.4.4-cp311-cp311-macosx_11_0_arm64.whl"},
+ {hash = "sha256:49123b579e4a6323e65139210cd72dddc593a72d840211556b60f9703bda8526", size = 15339148, upload-time = "2026-01-26T11:48:45.343Z", url = "https://files.pythonhosted.org/packages/6b/7a/e9277d0567884c21f345ad43cc01aeaa2abe566d5fdf22e35c3861dd44fa/duckdb-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl"},
+ {hash = "sha256:4c25d5b0febda02b7944e94fdae95aecf952797afc8cb920f677b46a7c251955", size = 20411739, upload-time = "2026-01-26T11:49:12.652Z", url = "https://files.pythonhosted.org/packages/79/a2/ac0f5ee16df890d141304bcd48733516b7202c0de34cd3555634d6eb4551/duckdb-1.4.4-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl"},
+ {hash = "sha256:50f2eb173c573811b44aba51176da7a4e5c487113982be6a6a1c37337ec5fa57", size = 13007490, upload-time = "2026-01-26T11:49:37.413Z", url = "https://files.pythonhosted.org/packages/f8/15/37af97f5717818f3d82d57414299c293b321ac83e048c0a90bb8b6a09072/duckdb-1.4.4-cp312-cp312-win_arm64.whl"},
+ {hash = "sha256:53cd6423136ab44383ec9955aefe7599b3fb3dd1fe006161e6396d8167e0e0d4", size = 13458633, upload-time = "2026-01-26T11:50:17.657Z", url = "https://files.pythonhosted.org/packages/dd/2d/13e6024e613679d8a489dd922f199ef4b1d08a456a58eadd96dc2f05171f/duckdb-1.4.4-cp314-cp314-win_arm64.whl"},
+ {hash = "sha256:5536eb952a8aa6ae56469362e344d4e6403cc945a80bc8c5c2ebdd85d85eb64b", size = 15339662, upload-time = "2026-01-26T11:49:04.058Z", url = "https://files.pythonhosted.org/packages/b3/3e/cec70e546c298ab76d80b990109e111068d82cca67942c42328eaa7d6fdb/duckdb-1.4.4-cp311-cp311-macosx_10_9_x86_64.whl"},
+ {hash = "sha256:59c8d76016dde854beab844935b1ec31de358d4053e792988108e995b18c08e7", size = 12762360, upload-time = "2026-01-26T11:50:14.76Z", url = "https://files.pythonhosted.org/packages/d2/b1/fd5c37c53d45efe979f67e9bd49aaceef640147bb18f0699a19edd1874d6/duckdb-1.4.4-cp314-cp314-win_amd64.whl"},
+ {hash = "sha256:5ba684f498d4e924c7e8f30dd157da8da34c8479746c5011b6c0e037e9c60ad2", size = 28883816, upload-time = "2026-01-26T11:49:01.009Z", url = "https://files.pythonhosted.org/packages/04/68/19233412033a2bc5a144a3f531f64e3548d4487251e3f16b56c31411a06f/duckdb-1.4.4-cp311-cp311-macosx_10_9_universal2.whl"},
+ {hash = "sha256:5e1933fac5293fea5926b0ee75a55b8cfe7f516d867310a5b251831ab61fe62b", size = 13668431, upload-time = "2026-01-26T11:48:47.864Z", url = "https://files.pythonhosted.org/packages/4a/96/3a7630d2779d2bae6f3cdf540a088ed45166adefd3c429971e5b85ce8f84/duckdb-1.4.4-cp310-cp310-macosx_11_0_arm64.whl"},
+ {hash = "sha256:6703dd1bb650025b3771552333d305d62ddd7ff182de121483d4e042ea6e2e00", size = 12256972, upload-time = "2026-01-26T11:49:15.468Z", url = "https://files.pythonhosted.org/packages/37/a2/9a3402edeedaecf72de05fe9ff7f0303d701b8dfc136aea4a4be1a5f7eee/duckdb-1.4.4-cp311-cp311-win_amd64.whl"},
+ {hash = "sha256:6792ca647216bd5c4ff16396e4591cfa9b4a72e5ad7cdd312cec6d67e8431a7c", size = 15349716, upload-time = "2026-01-26T11:49:23.989Z", url = "https://files.pythonhosted.org/packages/76/66/82413f386df10467affc87f65bac095b7c88dbd9c767584164d5f4dc4cb8/duckdb-1.4.4-cp312-cp312-macosx_10_13_x86_64.whl"},
+ {hash = "sha256:6cb357cfa3403910e79e2eb46c8e445bb1ee2fd62e9e9588c6b999df4256abc1", size = 18409951, upload-time = "2026-01-26T11:49:09.808Z", url = "https://files.pythonhosted.org/packages/11/64/de2bb4ec1e35ec9ebf6090a95b930fc56934a0ad6f34a24c5972a14a77ef/duckdb-1.4.4-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl"},
+ {hash = "sha256:6fb1225a9ea5877421481d59a6c556a9532c32c16c7ae6ca8d127e2b878c9389", size = 12268083, upload-time = "2026-01-26T11:49:54.615Z", url = "https://files.pythonhosted.org/packages/a4/f0/620323fd87062ea43e527a2d5ed9e55b525e0847c17d3b307094ddab98a2/duckdb-1.4.4-cp313-cp313-win_amd64.whl"},
+ {hash = "sha256:707530f6637e91dc4b8125260595299ec9dd157c09f5d16c4186c5988bfbd09a", size = 18409546, upload-time = "2026-01-26T11:48:51.142Z", url = "https://files.pythonhosted.org/packages/8e/ad/f62a3a65d200e8afc1f75cf0dd3f0aa84ef0dd07c484414a11f2abed810e/duckdb-1.4.4-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl"},
+ {hash = "sha256:7df7351328ffb812a4a289732f500d621e7de9942a3a2c9b6d4afcf4c0e72526", size = 20435578, upload-time = "2026-01-26T11:49:51.946Z", url = "https://files.pythonhosted.org/packages/53/32/256df3dbaa198c58539ad94f9a41e98c2c8ff23f126b8f5f52c7dcd0a738/duckdb-1.4.4-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl"},
+ {hash = "sha256:8e5c2d8a0452df55e092959c0bfc8ab8897ac3ea0f754cb3b0ab3e165cd79aff", size = 13676157, upload-time = "2026-01-26T11:50:06.232Z", url = "https://files.pythonhosted.org/packages/62/13/e5378ff5bb1d4397655d840b34b642b1b23cdd82ae19599e62dc4b9461c9/duckdb-1.4.4-cp314-cp314-macosx_11_0_arm64.whl"},
+ {hash = "sha256:a3c8542db7ffb128aceb7f3b35502ebaddcd4f73f1227569306cc34bad06680c", size = 12256576, upload-time = "2026-01-26T11:48:58.203Z", url = "https://files.pythonhosted.org/packages/8b/d0/4ce78bf341c930d4a22a56cb686bfc2c975eaf25f653a7ac25e3929d98bb/duckdb-1.4.4-cp310-cp310-win_amd64.whl"},
+ {hash = "sha256:b297eff642503fd435a9de5a9cb7db4eccb6f61d61a55b30d2636023f149855f", size = 20437385, upload-time = "2026-01-26T11:49:32.302Z", url = "https://files.pythonhosted.org/packages/2e/a3/97715bba30040572fb15d02c26f36be988d48bc00501e7ac02b1d65ef9d0/duckdb-1.4.4-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl"},
+ {hash = "sha256:bf138201f56e5d6fc276a25138341b3523e2f84733613fc43f02c54465619a95", size = 13006696, upload-time = "2026-01-26T11:49:18.054Z", url = "https://files.pythonhosted.org/packages/f6/e6/052ea6dcdf35b259fd182eff3efd8d75a071de4010c9807556098df137b9/duckdb-1.4.4-cp311-cp311-win_arm64.whl"},
+ {hash = "sha256:c65d15c440c31e06baaebfd2c06d71ce877e132779d309f1edf0a85d23c07e92", size = 18419001, upload-time = "2026-01-26T11:49:29.353Z", url = "https://files.pythonhosted.org/packages/db/77/2446a0b44226bb95217748d911c7ca66a66ca10f6481d5178d9370819631/duckdb-1.4.4-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl"},
+ {hash = "sha256:d0440f59e0cd9936a9ebfcf7a13312eda480c79214ffed3878d75947fc3b7d6d", size = 20435641, upload-time = "2026-01-26T11:50:12.188Z", url = "https://files.pythonhosted.org/packages/26/0a/6ae31b2914b4dc34243279b2301554bcbc5f1a09ccc82600486c49ab71d1/duckdb-1.4.4-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl"},
+ {hash = "sha256:d525de5f282b03aa8be6db86b1abffdceae5f1055113a03d5b50cd2fb8cf2ef8", size = 12267343, upload-time = "2026-01-26T11:49:34.985Z", url = "https://files.pythonhosted.org/packages/8b/0a/18b9167adf528cbe3867ef8a84a5f19f37bedccb606a8a9e59cfea1880c8/duckdb-1.4.4-cp312-cp312-win_amd64.whl"},
+ {hash = "sha256:ddcfd9c6ff234da603a1edd5fd8ae6107f4d042f74951b65f91bc5e2643856b3", size = 28896535, upload-time = "2026-01-26T11:49:21.232Z", url = "https://files.pythonhosted.org/packages/58/33/beadaa69f8458afe466126f2c5ee48c4759cc9d5d784f8703d44e0b52c3c/duckdb-1.4.4-cp312-cp312-macosx_10_13_universal2.whl"},
+ {hash = "sha256:e870a441cb1c41d556205deb665749f26347ed13b3a247b53714f5d589596977", size = 28884338, upload-time = "2026-01-26T11:48:41.591Z", url = "https://files.pythonhosted.org/packages/a2/9f/67a75f1e88f84946909826fa7aadd0c4b0dc067f24956142751fd9d59fe6/duckdb-1.4.4-cp310-cp310-macosx_10_9_universal2.whl"},
+ {hash = "sha256:f28a18cc790217e5b347bb91b2cab27aafc557c58d3d8382e04b4fe55d0c3f66", size = 13006123, upload-time = "2026-01-26T11:49:57.092Z", url = "https://files.pythonhosted.org/packages/e5/07/a397fdb7c95388ba9c055b9a3d38dfee92093f4427bc6946cf9543b1d216/duckdb-1.4.4-cp313-cp313-win_arm64.whl"},
+ {hash = "sha256:fb94de6d023de9d79b7edc1ae07ee1d0b4f5fa8a9dcec799650b5befdf7aafec", size = 13672311, upload-time = "2026-01-26T11:49:46.069Z", url = "https://files.pythonhosted.org/packages/a6/7b/64e68a7b857ed0340045501535a0da99ea5d9d5ea3708fec0afb8663eb27/duckdb-1.4.4-cp313-cp313-macosx_11_0_arm64.whl"},
+]
+
[[package]]
name = "einops"
sdist = {hash = "sha256:b2b04ad6081a3b227080c9bf5e3ace7160357ff03043cd66cc5b2319eb7031d1", size = 58522, upload-time = "2023-10-01T01:13:47.95Z", url = "https://files.pythonhosted.org/packages/74/6b/84ea8ea496b32a9c9331e7ca0529c670e1de0af239f597bf4f783e4a5396/einops-0.7.0.tar.gz"}
@@ -1582,20 +1726,6 @@ wheels = [
{hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z", url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl"},
]
-[[package]]
-dependencies = [
- {name = "appdirs"},
- {name = "setuptools"},
- {name = "six"},
-]
-name = "fs"
-sdist = {hash = "sha256:ae97c7d51213f4b70b6a958292530289090de3a7e15841e108fbe144f069d313", size = 187441, upload-time = "2022-05-02T09:25:54.22Z", url = "https://files.pythonhosted.org/packages/5d/a9/af5bfd5a92592c16cdae5c04f68187a309be8a146b528eac3c6e30edbad2/fs-2.4.16.tar.gz"}
-source = {registry = "https://pypi.org/simple"}
-version = "2.4.16"
-wheels = [
- {hash = "sha256:660064febbccda264ae0b6bace80a8d1be9e089e0a5eb2427b7d517f9a91545c", size = 135261, upload-time = "2022-05-02T09:25:52.363Z", url = "https://files.pythonhosted.org/packages/b9/5c/a3d95dc1ec6cdeb032d789b552ecc76effa3557ea9186e1566df6aac18df/fs-2.4.16-py2.py3-none-any.whl"},
-]
-
[[package]]
name = "fsspec"
sdist = {hash = "sha256:19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19", size = 304847, upload-time = "2025-09-02T19:10:49.215Z", url = "https://files.pythonhosted.org/packages/de/e0/bab50af11c2d75c9c4a2a26a5254573c0bd97cea152254401510950486fa/fsspec-2025.9.0.tar.gz"}
@@ -1624,15 +1754,36 @@ wheels = [
[[package]]
dependencies = [
+ {marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
+ {marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
{name = "adagio"},
{name = "triad"},
]
name = "fugue"
-sdist = {hash = "sha256:fb0f9a4780147ac8438be96efc50593e2d771d1cbf528ac56d3bcecd39915b50", size = 224340, upload-time = "2024-06-14T17:03:44.688Z", url = "https://files.pythonhosted.org/packages/91/a1/eca331442c758f8a6f23792dd10a51fb827fad1204805d6c70f02a35ee00/fugue-0.9.1.tar.gz"}
+sdist = {hash = "sha256:01e82574cc580e2217ee34322a76ef702e8da8d40839d1485eb6c741a527ce1f", size = 227046, upload-time = "2026-01-28T02:03:47.708Z", url = "https://files.pythonhosted.org/packages/0f/04/90b03a449a4f0095b07c7605df62bd17f382373710300bd797e8da2b6cee/fugue-0.9.5.tar.gz"}
source = {registry = "https://pypi.org/simple"}
-version = "0.9.1"
+version = "0.9.5"
wheels = [
- {hash = "sha256:5b91e55e6f243af6e2b901dc37914d954d8f0231627b68007850879f8848a3a3", size = 278186, upload-time = "2024-06-14T17:03:41.959Z", url = "https://files.pythonhosted.org/packages/ec/38/46a0ef179f7279207a3263afeb8da4dd73f44d00b6cc999c96a39112d284/fugue-0.9.1-py3-none-any.whl"},
+ {hash = "sha256:6b88038d1a5ea0cc1d4d990d108ce8e2012ad48248b74ccadc5f9bdbb7b869d7", size = 280783, upload-time = "2026-01-28T02:03:45.793Z", url = "https://files.pythonhosted.org/packages/53/ab/6081f256a800f41e181b0afe92bffb5b51df2ced9abf6aaeff53f5c430e0/fugue-0.9.5-py3-none-any.whl"},
+]
+
+[package.optional-dependencies]
+dask = [
+ {extra = ["dataframe", "distributed"], name = "dask"},
+ {marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
+ {marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
+ {name = "pyarrow"},
+]
+ray = [
+ {extra = ["data"], name = "ray"},
+ {marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
+ {marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
+ {name = "duckdb"},
+ {name = "pyarrow"},
+]
+spark = [
+ {name = "pyspark"},
+ {name = "zstandard"},
]
[[package]]
@@ -2003,6 +2154,9 @@ wheels = [
]
[package.optional-dependencies]
+http2 = [
+ {marker = "python_full_version < '3.13'", name = "h2"},
+]
zstd = [
{name = "zstandard"},
]
@@ -2177,7 +2331,8 @@ dependencies = [
]
name = "ipython"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216", size = 5606088, upload-time = "2025-05-31T16:39:09.613Z", url = "https://files.pythonhosted.org/packages/85/31/10ac88f3357fc276dc8a64e8880c82e80e7459326ae1d0a211b40abf6665/ipython-8.37.0.tar.gz"}
@@ -2203,13 +2358,17 @@ dependencies = [
]
name = "ipython"
resolution-markers = [
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
- "python_full_version >= '3.14' and sys_platform != 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version >= '3.14' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932, upload-time = "2025-09-29T10:55:53.948Z", url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz"}
@@ -2259,7 +2418,8 @@ dependencies = [
]
name = "jax"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:a437d29038cbc8300334119692744704ca7941490867b9665406b7f90665cd96", size = 2334091, upload-time = "2025-06-17T23:10:27.186Z", url = "https://files.pythonhosted.org/packages/cf/1e/267f59c8fb7f143c3f778c76cb7ef1389db3fd7e4540f04b9f42ca90764d/jax-0.6.2.tar.gz"}
@@ -2279,9 +2439,11 @@ dependencies = [
]
name = "jax"
resolution-markers = [
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:118f56338c503361d2791f069d24339d8d44a8db442ed851d2e591222fb7a56d", size = 2428411, upload-time = "2025-08-20T15:55:46.098Z", url = "https://files.pythonhosted.org/packages/bc/e8/b393ee314d3b042bd66b986d38e52f4e6046590399d916381265c20467d3/jax-0.7.1.tar.gz"}
@@ -2301,7 +2463,8 @@ dependencies = [
]
name = "jax"
resolution-markers = [
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:71a42b964bc6d52e819311429e6c0f5742e2a4650226dab1a1dd26fd986ca70d", size = 2434085, upload-time = "2025-09-16T16:48:53.108Z", url = "https://files.pythonhosted.org/packages/8f/e7/1e8e8af59b7659c83dc07dfa1dc23bc13551e5ef89bdef19ced044a497fc/jax-0.7.2.tar.gz"}
@@ -2319,7 +2482,8 @@ dependencies = [
]
name = "jaxlib"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
]
source = {registry = "https://pypi.org/simple"}
@@ -2353,9 +2517,11 @@ dependencies = [
]
name = "jaxlib"
resolution-markers = [
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
]
source = {registry = "https://pypi.org/simple"}
@@ -2393,7 +2559,8 @@ dependencies = [
]
name = "jaxlib"
resolution-markers = [
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
]
source = {registry = "https://pypi.org/simple"}
@@ -2838,15 +3005,25 @@ wheels = [
{hash = "sha256:f9c272682d91e0d57f2a76c6d9ebdfccc603a01828cdbe3d15273bdca0c3363a", size = 38132232, upload-time = "2025-10-01T18:04:52.181Z", url = "https://files.pythonhosted.org/packages/a4/56/4c0d503fe03bac820ecdeb14590cf9a248e120f483bcd5c009f2534f23f0/llvmlite-0.45.1-cp311-cp311-win_amd64.whl"},
]
+[[package]]
+name = "locket"
+sdist = {hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632", size = 4350, upload-time = "2022-04-20T22:04:44.312Z", url = "https://files.pythonhosted.org/packages/2f/83/97b29fe05cb6ae28d2dbd30b81e2e402a3eed5f460c26e9eaa5895ceacf5/locket-1.0.0.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "1.0.0"
+wheels = [
+ {hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3", size = 4398, upload-time = "2022-04-20T22:04:42.23Z", url = "https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl"},
+]
+
[[package]]
dependencies = [
{marker = "python_full_version < '3.11'", name = "tomli"},
+ {marker = "python_full_version < '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.0.0"},
+ {marker = "python_full_version >= '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.1.0"},
{name = "executing"},
{name = "opentelemetry-exporter-otlp-proto-http"},
{name = "opentelemetry-instrumentation"},
{name = "opentelemetry-sdk"},
{name = "protobuf"},
- {name = "rich"},
{name = "typing-extensions"},
]
name = "logfire"
@@ -3382,15 +3559,16 @@ wheels = [
[[package]]
dependencies = [
{marker = "python_full_version >= '3.11' and python_full_version < '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "1.26.4"},
+ {marker = "python_full_version >= '3.11' and python_full_version < '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.0.0"},
{marker = "python_full_version >= '3.11'", name = "dacite"},
{marker = "python_full_version >= '3.11'", name = "einops"},
{marker = "python_full_version >= '3.11'", name = "ipykernel"},
{marker = "python_full_version >= '3.11'", name = "matplotlib"},
{marker = "python_full_version >= '3.11'", name = "omegaconf"},
- {marker = "python_full_version >= '3.11'", name = "rich"},
{marker = "python_full_version >= '3.11'", name = "torch"},
{marker = "python_full_version >= '3.11'", name = "tqdm"},
{marker = "python_full_version >= '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "2.1.3"},
+ {marker = "python_full_version >= '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.1.0"},
]
name = "mlstm-kernels"
sdist = {hash = "sha256:683c10f5b5108ab21db60ee43a79333fa06757781a80c2c4de7aef2e74c192b4", size = 200316, upload-time = "2025-07-29T05:34:25.573Z", url = "https://files.pythonhosted.org/packages/85/85/e40077464ed57e46cec32a0f988f6bcd986fd1a3cd85055f02688e9df715/mlstm_kernels-2.0.1.tar.gz"}
@@ -3402,12 +3580,13 @@ wheels = [
[[package]]
dependencies = [
+ {marker = "python_full_version < '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.0.0"},
+ {marker = "python_full_version >= '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.1.0"},
{name = "aiohttp"},
{name = "certifi"},
{name = "click"},
{name = "grpclib"},
{name = "protobuf"},
- {name = "rich"},
{name = "synchronicity"},
{name = "toml"},
{name = "typer"},
@@ -3621,7 +3800,8 @@ wheels = [
[[package]]
dependencies = [
- {name = "dill"},
+ {marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'", name = "dill", source = {registry = "https://pypi.org/simple"}, version = "0.4.0"},
+ {marker = "platform_machine == 'x86_64' and sys_platform == 'darwin'", name = "dill", source = {registry = "https://pypi.org/simple"}, version = "0.3.8"},
]
name = "multiprocess"
sdist = {hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1", size = 1772603, upload-time = "2024-01-28T18:52:34.85Z", url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz"}
@@ -3767,7 +3947,8 @@ wheels = [
[[package]]
name = "networkx"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368, upload-time = "2024-10-21T12:39:38.695Z", url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz"}
@@ -3780,13 +3961,17 @@ wheels = [
[[package]]
name = "networkx"
resolution-markers = [
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
- "python_full_version >= '3.14' and sys_platform != 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version >= '3.14' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037", size = 2471065, upload-time = "2025-05-29T11:35:07.804Z", url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz"}
@@ -3921,11 +4106,14 @@ wheels = [
[[package]]
name = "numpy"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z", url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz"}
@@ -3961,9 +4149,11 @@ wheels = [
[[package]]
name = "numpy"
resolution-markers = [
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
- "python_full_version >= '3.14' and sys_platform != 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version >= '3.14' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:aa08e04e08aaf974d4458def539dece0d28146d866a39da5639596f4921fd761", size = 20166090, upload-time = "2024-11-02T17:48:55.832Z", url = "https://files.pythonhosted.org/packages/25/ca/1166b75c21abd1da445b97bf1fa2f14f423c6cfb4fc7c4ef31dccf9f6a94/numpy-2.1.3.tar.gz"}
@@ -4128,6 +4318,15 @@ wheels = [
{hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691, upload-time = "2025-02-26T00:15:44.104Z", url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl"},
]
+[[package]]
+name = "nvidia-ml-py"
+sdist = {hash = "sha256:8184d1be52914ac7f0991cd1c0d946c65dc88a840c754cd12c274b77b88760dd", size = 49732, upload-time = "2026-01-22T01:14:56.456Z", url = "https://files.pythonhosted.org/packages/af/a0/f4fc18cf72f06821a9a665085435b901449986855519d5b3843532db35e9/nvidia_ml_py-13.590.48.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "13.590.48"
+wheels = [
+ {hash = "sha256:fd43d30ee9cd0b7940f5f9f9220b68d42722975e3992b6c21d14144c48760e43", size = 50680, upload-time = "2026-01-22T01:14:55.281Z", url = "https://files.pythonhosted.org/packages/fd/72/fb2af0d259a651affdce65fd6a495f0e07a685a0136baf585c5065204ee7/nvidia_ml_py-13.590.48-py3-none-any.whl"},
+]
+
[[package]]
name = "nvidia-nccl-cu12"
source = {registry = "https://pypi.org/simple"}
@@ -4438,11 +4637,14 @@ dependencies = [
]
name = "pandas"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7", size = 4274800, upload-time = "2023-12-08T15:38:29.713Z", url = "https://files.pythonhosted.org/packages/6f/41/eb562668eaf93790762f600536b28c97b45803cba9253cd8e436cda96aef/pandas-2.1.4.tar.gz"}
@@ -4483,9 +4685,11 @@ dependencies = [
]
name = "pandas"
resolution-markers = [
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
- "python_full_version >= '3.14' and sys_platform != 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version >= '3.14' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z", url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz"}
@@ -4564,6 +4768,19 @@ wheels = [
{hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z", url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl"},
]
+[[package]]
+dependencies = [
+ {name = "locket"},
+ {name = "toolz"},
+]
+name = "partd"
+sdist = {hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c", size = 21029, upload-time = "2024-05-06T19:51:41.945Z", url = "https://files.pythonhosted.org/packages/b2/3a/3f06f34820a31257ddcabdfafc2672c5816be79c7e353b02c1f318daa7d4/partd-1.4.2.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "1.4.2"
+wheels = [
+ {hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f", size = 18905, upload-time = "2024-05-06T19:51:39.271Z", url = "https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl"},
+]
+
[[package]]
dependencies = [
{marker = "python_full_version < '3.13'", name = "six"},
@@ -5024,6 +5241,15 @@ wheels = [
{hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z", url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl"},
]
+[[package]]
+name = "py4j"
+sdist = {hash = "sha256:f694cad19efa5bd1dee4f3e5270eb406613c974394035e5bfc4ec1aba870b879", size = 761089, upload-time = "2025-01-15T03:53:18.624Z", url = "https://files.pythonhosted.org/packages/38/31/0b210511177070c8d5d3059556194352e5753602fa64b85b7ab81ec1a009/py4j-0.10.9.9.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "0.10.9.9"
+wheels = [
+ {hash = "sha256:c7c26e4158defb37b0bb124933163641a2ff6e3a3913f7811b0ddbe07ed61533", size = 203008, upload-time = "2025-01-15T03:53:15.648Z", url = "https://files.pythonhosted.org/packages/bd/db/ea0203e495be491c85af87b66e37acfd3bf756fd985f87e46fc5e3bf022c/py4j-0.10.9.9-py2.py3-none-any.whl"},
+]
+
[[package]]
name = "pyarrow"
sdist = {hash = "sha256:5051f2dccf0e283ff56335760cbc8622cf52264d67e359d5569541ac11b6d5bc", size = 1133487, upload-time = "2025-07-18T00:57:31.761Z", url = "https://files.pythonhosted.org/packages/ef/c2/ea068b8f00905c06329a3dfcd40d0fcc2b7d0f2e355bdb25b65e0a0e4cd4/pyarrow-21.0.0.tar.gz"}
@@ -5155,10 +5381,11 @@ bedrock = [
{name = "boto3"},
]
cli = [
+ {marker = "python_full_version < '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.0.0"},
+ {marker = "python_full_version >= '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.1.0"},
{name = "argcomplete"},
{name = "prompt-toolkit"},
{name = "pyperclip"},
- {name = "rich"},
]
cohere = [
{marker = "sys_platform != 'emscripten'", name = "cohere"},
@@ -5287,12 +5514,13 @@ wheels = [
[[package]]
dependencies = [
+ {marker = "python_full_version < '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.0.0"},
+ {marker = "python_full_version >= '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.1.0"},
{name = "anyio"},
{name = "logfire-api"},
{name = "pydantic"},
{name = "pydantic-ai-slim"},
{name = "pyyaml"},
- {name = "rich"},
]
name = "pydantic-evals"
sdist = {hash = "sha256:271fd12233bf6c98e92a3d31f2968c15a36ccf267b205c2f9c31437ed01bee08", size = 45868, upload-time = "2025-10-03T23:49:34.78Z", url = "https://files.pythonhosted.org/packages/81/45/f5dc751d08a2e75b85aa9d99197ee6bd085ccf52b9abbafc8442fbbe1bcf/pydantic_evals-1.0.15.tar.gz"}
@@ -5371,6 +5599,15 @@ wheels = [
{hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z", url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl"},
]
+[[package]]
+dependencies = [
+ {name = "py4j"},
+]
+name = "pyspark"
+sdist = {hash = "sha256:938b4a1883383374d331ebfcb5d92debfa1891cf3d7a6d730520a1a2d23f1a90", size = 434209940, upload-time = "2026-02-05T19:31:13.6Z", url = "https://files.pythonhosted.org/packages/96/89/408b42c803db71f4a4d8a3f1ab0745a40dfe41aeacdfc453545665a171f4/pyspark-4.0.2.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "4.0.2"
+
[[package]]
dependencies = [
{marker = "python_full_version < '3.11'", name = "exceptiongroup"},
@@ -5719,30 +5956,38 @@ dependencies = [
]
name = "ray"
source = {registry = "https://pypi.org/simple"}
-version = "2.49.2"
-wheels = [
- {hash = "sha256:08bec467576bc030d8bd0638004e1b8e075588929349112988a4bd4928684e8c", size = 66869076, upload-time = "2025-09-19T19:14:37.371Z", url = "https://files.pythonhosted.org/packages/e4/99/517f224ffd073689c4905bdb185c21d9d8936d75066a96d454878f9e1e47/ray-2.49.2-cp310-cp310-macosx_12_0_arm64.whl"},
- {hash = "sha256:2e2fe20fa90562e73630da9ff7932d3ed6507e73291c4d9bdf566537ae9deddf", size = 66803846, upload-time = "2025-09-19T19:15:56.928Z", url = "https://files.pythonhosted.org/packages/99/dc/a7e569bf7030e0ec50163aed731189e744ca857d74f51b24361ce426697a/ray-2.49.2-cp313-cp313-macosx_12_0_arm64.whl"},
- {hash = "sha256:2ecaaa51f588ccdda2b61563a8be3843bf65dfaaa83a240588a307f4ebb82471", size = 70114942, upload-time = "2025-09-19T19:15:47.536Z", url = "https://files.pythonhosted.org/packages/69/ca/94791be5c3b68ed0df85589a8ca558334818a47bf2978000f85533245aed/ray-2.49.2-cp312-cp312-manylinux2014_x86_64.whl"},
- {hash = "sha256:3e441bf2acd7f368cf45132752066c5c3b83d88cd5f85762e703774bba4f2b6d", size = 69263514, upload-time = "2025-09-19T19:14:45.519Z", url = "https://files.pythonhosted.org/packages/61/c5/c2ceba832fe3f47cfd7e11cd7cc7a1bbc2c028424c5bca70435aa4ca1dec/ray-2.49.2-cp310-cp310-macosx_12_0_x86_64.whl"},
- {hash = "sha256:41e11802ebbc487380e6c21dc041cb405e69fdda717a4eafdfeea294c6c3f9ca", size = 26243798, upload-time = "2025-09-19T19:15:26.405Z", url = "https://files.pythonhosted.org/packages/d7/b5/dfe1240e13d88dc68de03ee7c617f7578ef026e8569a42f7eeeb4729c5e3/ray-2.49.2-cp311-cp311-win_amd64.whl"},
- {hash = "sha256:4fb9f9bf62fd5c92d22da20cd2aacb4ade1fb23033765fa9274f0a0c50bc42f6", size = 66869606, upload-time = "2025-09-19T19:15:05.838Z", url = "https://files.pythonhosted.org/packages/b5/63/27c7fb49513c816b825c809dd33a8570b35d511d1b5e568a4b33b0557997/ray-2.49.2-cp311-cp311-macosx_12_0_arm64.whl"},
- {hash = "sha256:54077dde338c5ffba349a4ab61b72352a3c3be69ea5b4f1b436d98d40b312763", size = 70070382, upload-time = "2025-09-19T19:15:22.048Z", url = "https://files.pythonhosted.org/packages/c1/2b/f2efd0e7bcef06d51422db1af48cc5695a3f9b40a444f9d270a2d4663252/ray-2.49.2-cp311-cp311-manylinux2014_x86_64.whl"},
- {hash = "sha256:6784e076e4418222ef8ee3b6a8bfeb867d8797803b25bcfcce3bf3bc5414bef1", size = 69262599, upload-time = "2025-09-19T19:15:36.732Z", url = "https://files.pythonhosted.org/packages/1a/4c/76f2c7c0946645fdd8d286a3e00e2c42130d676286de206be5d60d271218/ray-2.49.2-cp312-cp312-macosx_12_0_x86_64.whl"},
- {hash = "sha256:74566876af7bf4e48ea4b9b3b75b34db053d1064cc4d4b1670dc4ce78f6894af", size = 69935752, upload-time = "2025-09-19T19:14:56.191Z", url = "https://files.pythonhosted.org/packages/c0/85/a340eba596db3f66d3a338aff43942d8bac32732fb4cf4a20ed4bbbd07eb/ray-2.49.2-cp310-cp310-manylinux2014_x86_64.whl"},
- {hash = "sha256:9ece957a13985f7bbf4077f4ff0204314d7e99a941f95dff2a16b453d5376dc3", size = 69273124, upload-time = "2025-09-19T19:15:11.348Z", url = "https://files.pythonhosted.org/packages/52/9a/9728d1e9dc5473acf0e4f67081dc323d3333c8c87a1e9260ea8878720017/ray-2.49.2-cp311-cp311-macosx_12_0_x86_64.whl"},
- {hash = "sha256:b2f4f0fed936faf688e87ffdcc9356c034513c00259a2f1a8589e345fcfbdbc0", size = 69208426, upload-time = "2025-09-19T19:16:02.085Z", url = "https://files.pythonhosted.org/packages/4e/cf/6667e01f39cd28637f082273e9147f16d5f8fff34e2fb0ca60cc5da76e22/ray-2.49.2-cp313-cp313-macosx_12_0_x86_64.whl"},
- {hash = "sha256:b4c7869688c518e902f7b6288edec2365ab4d28a464291e6d0a7040c7d01b5f7", size = 69198140, upload-time = "2025-09-19T19:16:07.413Z", url = "https://files.pythonhosted.org/packages/c5/84/5361bcdc9c9fb9f4abbf836801803b7df75c76c16a56493413eb154b8a34/ray-2.49.2-cp313-cp313-manylinux2014_aarch64.whl"},
- {hash = "sha256:b7d8214cff86df044fec727eeeabccc3bfc9b0271d28d61ba92c09f0d127d01d", size = 70027331, upload-time = "2025-09-19T19:16:12.968Z", url = "https://files.pythonhosted.org/packages/b0/0c/9e49c3da7502f18483e4deb3273a3104d501c5e9cf1664a136b8ea36df48/ray-2.49.2-cp313-cp313-manylinux2014_x86_64.whl"},
- {hash = "sha256:cba59684f031c9e778c588bc925777967e1b49bab3f00c638e4980bfdab07aec", size = 26223595, upload-time = "2025-09-19T19:15:51.803Z", url = "https://files.pythonhosted.org/packages/e0/22/3f4b77498eefb3152a5946f9f544fcf336e7b9970c5c8af8e2d5eed13f0b/ray-2.49.2-cp312-cp312-win_amd64.whl"},
- {hash = "sha256:d6d612de5c6341b776fc75edeee5b698bb4af7ee84a2ff30552b32a9e6e4a772", size = 66857495, upload-time = "2025-09-19T19:15:31.427Z", url = "https://files.pythonhosted.org/packages/01/66/0d4e518d611486244b357a6cf58a31d7d184f5558e03d5e482c335749616/ray-2.49.2-cp312-cp312-macosx_12_0_arm64.whl"},
- {hash = "sha256:dd0d8d8641d142fafe6d83e87d3c19bd5637d21e34608d3ff69ad71ea3e2f462", size = 69287193, upload-time = "2025-09-19T19:15:42.093Z", url = "https://files.pythonhosted.org/packages/da/99/23b732c0b7b2ee2ffd28bf632257fb98924a03251d251810cb637512fcab/ray-2.49.2-cp312-cp312-manylinux2014_aarch64.whl"},
- {hash = "sha256:e6becc2026d900ca0ba07eff12a130c9d651a91290bb24d43594842b575cc4e5", size = 26246695, upload-time = "2025-09-19T19:15:00.9Z", url = "https://files.pythonhosted.org/packages/ac/e6/809730d87cdf762e76728ea6bb3f96e38fa2dc7ef7d572a49c0d7ebcde95/ray-2.49.2-cp310-cp310-win_amd64.whl"},
- {hash = "sha256:eada9dd89ccda643a3c6c2cba7016b59898432d126e10b38fed52d74165364f4", size = 69266231, upload-time = "2025-09-19T19:15:16.92Z", url = "https://files.pythonhosted.org/packages/38/67/93f0d6d558874a730581059eb6dfa8860991a5410502ea0685dba5e788e4/ray-2.49.2-cp311-cp311-manylinux2014_aarch64.whl"},
- {hash = "sha256:eae07b3fed45f5b041a8bf9795cd26fad2464be5126efd447e4484905a29b677", size = 69125462, upload-time = "2025-09-19T19:14:51.029Z", url = "https://files.pythonhosted.org/packages/63/0e/830df5a0f7e2b582422ee8ad0cdf2a2a9563aa63bb8e60be9ceec494981c/ray-2.49.2-cp310-cp310-manylinux2014_aarch64.whl"},
+version = "2.48.0"
+wheels = [
+ {hash = "sha256:24a70f416ec0be14b975f160044805ccb48cc6bc50de632983eb8f0a8e16682b", size = 69128145, upload-time = "2025-07-18T22:32:51.506Z", url = "https://files.pythonhosted.org/packages/1f/ea/d1f44f5dde662eaf1a61fdfd80b2bac44438506de608c77965be82c2f572/ray-2.48.0-cp311-cp311-manylinux2014_aarch64.whl"},
+ {hash = "sha256:25e4b79fcc8f849d72db1acc4f03f37008c5c0b745df63d8a30cd35676b6545e", size = 70039793, upload-time = "2025-07-18T22:33:54.072Z", url = "https://files.pythonhosted.org/packages/dd/4f/bb511598091f06cc7d781868caf833a0c3459b4f51c0b36cfb75dfaa7e4e/ray-2.48.0-cp313-cp313-manylinux2014_x86_64.whl"},
+ {hash = "sha256:33bda4753ad0acd2b524c9158089d43486cd44cc59fe970466435bc2968fde2d", size = 69823058, upload-time = "2025-07-18T22:32:15.822Z", url = "https://files.pythonhosted.org/packages/38/93/2985888558fa318db805a3797fe4ad1e9a360ddada1ed0557f077f500dcb/ray-2.48.0-cp310-cp310-macosx_12_0_x86_64.whl"},
+ {hash = "sha256:46d4b42a58492dec79caad2d562344689a4f99a828aeea811a0cd2cd653553ef", size = 70079019, upload-time = "2025-07-18T22:32:57.136Z", url = "https://files.pythonhosted.org/packages/5c/46/b376189b9df6b41307754bbc8ed8fe191a86908a8a104b37a602897ec5f0/ray-2.48.0-cp311-cp311-manylinux2014_x86_64.whl"},
+ {hash = "sha256:4b9b92ac29635f555ef341347d9a63dbf02b7d946347239af3c09e364bc45cf8", size = 67315928, upload-time = "2025-07-18T22:32:40.109Z", url = "https://files.pythonhosted.org/packages/bc/79/cd0376eef04d5dabdf0de04c0ae7d71447797c6db4a09a3f71e746018cea/ray-2.48.0-cp311-cp311-macosx_12_0_arm64.whl"},
+ {hash = "sha256:5742b72a514afe5d60f41330200cd508376e16c650f6962e62337aa482d6a0c6", size = 69763475, upload-time = "2025-07-18T22:33:42.297Z", url = "https://files.pythonhosted.org/packages/22/ef/bf5dc762663475fc40680f44df716c553f5d619c6648c8b43ccde00f13ce/ray-2.48.0-cp313-cp313-macosx_12_0_x86_64.whl"},
+ {hash = "sha256:5a6f57126eac9dd3286289e07e91e87b054792f9698b6f7ccab88b624816b542", size = 69823198, upload-time = "2025-07-18T22:33:12.494Z", url = "https://files.pythonhosted.org/packages/df/c5/7de1e9d92a45b1805fe828dcbd18b4c5a1f35ab3cad9134efeb20a3ab3e5/ray-2.48.0-cp312-cp312-macosx_12_0_x86_64.whl"},
+ {hash = "sha256:622e6bcdb78d98040d87bea94e65d0bb6ccc0ae1b43294c6bd69f542bf28e092", size = 69062026, upload-time = "2025-07-18T22:33:48.058Z", url = "https://files.pythonhosted.org/packages/f3/7c/498ceb9684971cb5c9722a2c8400919cd886473b77416c23c23e4e7ddc67/ray-2.48.0-cp313-cp313-manylinux2014_aarch64.whl"},
+ {hash = "sha256:649ed9442dc2d39135c593b6cf0c38e8355170b92672365ab7a3cbc958c42634", size = 69948211, upload-time = "2025-07-18T22:32:29.949Z", url = "https://files.pythonhosted.org/packages/9d/81/cf2a4d6525b31d762bd4582b3d4f01ec87e46c8bd32713f8849cb43fcc8f/ray-2.48.0-cp310-cp310-manylinux2014_x86_64.whl"},
+ {hash = "sha256:6ca2b9ce45ad360cbe2996982fb22691ecfe6553ec8f97a2548295f0f96aac78", size = 67313993, upload-time = "2025-07-18T22:32:08.358Z", url = "https://files.pythonhosted.org/packages/21/7a/9afb1fa75dcc7f0a98d0e613a093dd41baf1b593f0f4e77676e6098bf69d/ray-2.48.0-cp310-cp310-macosx_12_0_arm64.whl"},
+ {hash = "sha256:8de799f3b0896f48d306d5e4a04fc6037a08c495d45f9c79935344e5693e3cf8", size = 67302857, upload-time = "2025-07-18T22:33:06.414Z", url = "https://files.pythonhosted.org/packages/41/53/0d105e1baa6c8c9582f90154ba3f0ca08d58129384ea2707b2e59449b03b/ray-2.48.0-cp312-cp312-macosx_12_0_arm64.whl"},
+ {hash = "sha256:a42ed3b640f4b599a3fc8067c83ee60497c0f03d070d7a7df02a388fa17a546b", size = 70124265, upload-time = "2025-07-18T22:33:25.155Z", url = "https://files.pythonhosted.org/packages/61/02/1894be2ab930b599de0f1f77f785b86c78bda4873c6c2dd65d1de5b40837/ray-2.48.0-cp312-cp312-manylinux2014_x86_64.whl"},
+ {hash = "sha256:a7a6d830d9dc5ae8bb156fcde9a1adab7f4edb004f03918a724d885eceb8264d", size = 67250116, upload-time = "2025-07-18T22:33:36.572Z", url = "https://files.pythonhosted.org/packages/d9/7f/0dc9f5464181ecad93ec2d6f106084d46e5c5ec9a8718c1ba60610ea65fe/ray-2.48.0-cp313-cp313-macosx_12_0_arm64.whl"},
+ {hash = "sha256:b94500fe2d17e491fe2e9bd4a3bf62df217e21a8f2845033c353d4d2ea240f73", size = 69829631, upload-time = "2025-07-18T22:32:45.619Z", url = "https://files.pythonhosted.org/packages/d9/b3/dc73b03bfa75b0668542f77a14d22bee3337754e09af64c7c5c22fdb6649/ray-2.48.0-cp311-cp311-macosx_12_0_x86_64.whl"},
+ {hash = "sha256:be45690565907c4aa035d753d82f6ff892d1e6830057b67399542a035b3682f0", size = 26768461, upload-time = "2025-07-18T22:32:35.131Z", url = "https://files.pythonhosted.org/packages/58/a5/887fc0cd6e11e4a4ad0c7ff69b95c6e823d31aa31d539c9aa8ba5203e22a/ray-2.48.0-cp310-cp310-win_amd64.whl"},
+ {hash = "sha256:cfb48c10371c267fdcf7f4ae359cab706f068178b9c65317ead011972f2c0bf3", size = 26763615, upload-time = "2025-07-18T22:33:01.954Z", url = "https://files.pythonhosted.org/packages/cb/93/98459098f43336ac09c6e5d688468d896f1a791948263727880e1accc7d0/ray-2.48.0-cp311-cp311-win_amd64.whl"},
+ {hash = "sha256:e15fdffa6b60d5729f6025691396b8a01dc3461ba19dc92bba354ec1813ed6b1", size = 26745570, upload-time = "2025-07-18T22:33:31.328Z", url = "https://files.pythonhosted.org/packages/79/8c/d3653d17337fc787af108411d9c9a38333c9fbdf247283ee56dd096d3360/ray-2.48.0-cp312-cp312-win_amd64.whl"},
+ {hash = "sha256:f1cf33d260316f92f77558185f1c36fc35506d76ee7fdfed9f5b70f9c4bdba7f", size = 69151702, upload-time = "2025-07-18T22:33:18.655Z", url = "https://files.pythonhosted.org/packages/b4/a6/e7c969bd371c65b7c233d86f23610489e15164ee7eadb3eb78f9d55eda4d/ray-2.48.0-cp312-cp312-manylinux2014_aarch64.whl"},
+ {hash = "sha256:f820950bc44d7b000c223342f5c800c9c08e7fd89524201125388ea211caad1a", size = 68989098, upload-time = "2025-07-18T22:32:24.097Z", url = "https://files.pythonhosted.org/packages/cc/e1/2d8a0e80a92a8b67933ecc2bac2fe4377557cb6a781c4b1d7b2d7a5fae1c/ray-2.48.0-cp310-cp310-manylinux2014_aarch64.whl"},
]
[package.optional-dependencies]
+data = [
+ {marker = "python_full_version < '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "1.26.4"},
+ {marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
+ {marker = "python_full_version >= '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "2.1.3"},
+ {marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
+ {name = "fsspec"},
+ {name = "pyarrow"},
+]
tune = [
{marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
{marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
@@ -5912,10 +6157,43 @@ wheels = [
[[package]]
dependencies = [
- {name = "markdown-it-py"},
- {name = "pygments"},
+ {marker = "python_full_version < '3.11'", name = "typing-extensions"},
+ {marker = "python_full_version < '3.13'", name = "markdown-it-py"},
+ {marker = "python_full_version < '3.13'", name = "pygments"},
]
name = "rich"
+resolution-markers = [
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version < '3.11' and sys_platform == 'linux'",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version == '3.11.*' and sys_platform == 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version == '3.12.*' and sys_platform == 'linux'",
+]
+sdist = {hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z", url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "14.0.0"
+wheels = [
+ {hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z", url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl"},
+]
+
+[[package]]
+dependencies = [
+ {marker = "python_full_version >= '3.13'", name = "markdown-it-py"},
+ {marker = "python_full_version >= '3.13'", name = "pygments"},
+]
+name = "rich"
+resolution-markers = [
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version == '3.13.*' and sys_platform == 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
+ "python_full_version >= '3.14' and sys_platform == 'linux'",
+]
sdist = {hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z", url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz"}
source = {registry = "https://pypi.org/simple"}
version = "14.1.0"
@@ -6175,11 +6453,14 @@ dependencies = [
]
name = "scikit-learn"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:b4fc2525eca2c69a59260f583c56a7557c6ccdf8deafdba6e060f94c1c59738e", size = 7068312, upload-time = "2025-01-10T08:07:55.348Z", url = "https://files.pythonhosted.org/packages/9e/a5/4ae3b3a0755f7b35a280ac90b28817d1f380318973cff14075ab41ef50d9/scikit_learn-1.6.1.tar.gz"}
@@ -6221,9 +6502,11 @@ dependencies = [
]
name = "scikit-learn"
resolution-markers = [
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
- "python_full_version >= '3.14' and sys_platform != 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version >= '3.14' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda", size = 7193136, upload-time = "2025-09-09T08:21:29.075Z", url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz"}
@@ -6426,6 +6709,15 @@ wheels = [
{hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z", url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl"},
]
+[[package]]
+name = "sortedcontainers"
+sdist = {hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z", url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "2.4.0"
+wheels = [
+ {hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z", url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl"},
+]
+
[[package]]
name = "soupsieve"
sdist = {hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z", url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz"}
@@ -6661,8 +6953,10 @@ wheels = [
dependencies = [
{marker = "python_full_version < '3.13'", name = "einops"},
{marker = "python_full_version < '3.13'", name = "eval-type-backport"},
+ {marker = "python_full_version < '3.13'", name = "filelock"},
{marker = "python_full_version < '3.13'", name = "huggingface-hub"},
{marker = "python_full_version < '3.13'", name = "joblib"},
+ {marker = "python_full_version < '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "1.26.4"},
{marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
{marker = "python_full_version < '3.13'", name = "pydantic"},
{marker = "python_full_version < '3.13'", name = "pydantic-settings"},
@@ -6673,49 +6967,54 @@ dependencies = [
{marker = "python_full_version < '3.13'", name = "typing-extensions"},
]
name = "tabpfn"
-sdist = {hash = "sha256:706c03c3d3dc478118561761c29fa95727b6e239671922c4c40a8e4b3cc28fbf", size = 200487, upload-time = "2025-09-17T10:42:15.649Z", url = "https://files.pythonhosted.org/packages/e5/8b/ab7e3c71fc5fb6be61295ca1fb880b09df567257e9e4228d77cd79a4e1ec/tabpfn-2.2.1.tar.gz"}
+sdist = {hash = "sha256:339470e3c9d74e678e7eed9422f0cc64838e9080a21402edfb5939248edbbefb", size = 632732, upload-time = "2026-02-19T15:11:15.872Z", url = "https://files.pythonhosted.org/packages/7f/f1/0c16302178ade2e7e258e5c9e4820fa5783e4470a8d713d726b112994f91/tabpfn-6.4.1.tar.gz"}
source = {registry = "https://pypi.org/simple"}
-version = "2.2.1"
+version = "6.4.1"
wheels = [
- {hash = "sha256:9aa3a0df8ecebcae5f426b1fec9bb2e2a690880bc8ad079191e8d959bf0f157b", size = 173723, upload-time = "2025-09-17T10:42:14.435Z", url = "https://files.pythonhosted.org/packages/91/e9/085c60d5e447083ac6705b7b8bfac90773eb4191c00ddc70607f4d8b339b/tabpfn-2.2.1-py3-none-any.whl"},
+ {hash = "sha256:1850a3fd39560a8d4c0b4cab09fbbae85e03320270838b97878eb0b221398e9a", size = 621551, upload-time = "2026-02-19T15:11:13.458Z", url = "https://files.pythonhosted.org/packages/15/9a/34250889004af022b9326d96f3342ce8da84ad414a74b96064fb84e7770c/tabpfn-6.4.1-py3-none-any.whl"},
]
[[package]]
dependencies = [
- {marker = "python_full_version < '3.13'", name = "httpx"},
+ {extra = ["http2"], marker = "python_full_version < '3.13'", name = "httpx"},
+ {marker = "python_full_version < '3.13'", name = "backoff"},
{marker = "python_full_version < '3.13'", name = "omegaconf"},
{marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
{marker = "python_full_version < '3.13'", name = "password-strength"},
+ {marker = "python_full_version < '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.0.0"},
{marker = "python_full_version < '3.13'", name = "scikit-learn", source = {registry = "https://pypi.org/simple"}, version = "1.6.1"},
{marker = "python_full_version < '3.13'", name = "sseclient-py"},
{marker = "python_full_version < '3.13'", name = "tqdm"},
+ {marker = "python_full_version < '3.13'", name = "typing-extensions"},
{marker = "python_full_version < '3.13'", name = "xxhash", source = {registry = "https://pypi.org/simple"}, version = "3.5.0"},
]
name = "tabpfn-client"
-sdist = {hash = "sha256:f8b82318ce22240b7622a05bea2422204ddd5374df335a19284eea0e2252924e", size = 17066065, upload-time = "2025-03-18T12:58:03.737Z", url = "https://files.pythonhosted.org/packages/e2/cc/97be739da8d4113c13b8c9fa031908d59210005d38f944e0d678717cf91c/tabpfn_client-0.1.7.tar.gz"}
+sdist = {hash = "sha256:adcf4374f75b0a7a391a4bfa03cad96d46203686187a8cdc769f4c4fe2414706", size = 1888144, upload-time = "2025-11-06T11:51:35.795Z", url = "https://files.pythonhosted.org/packages/66/c5/091234d4bb282c19caf52d23100a2cd91cb6b4399a07fe1986e0dff7b11c/tabpfn_client-0.2.8.tar.gz"}
source = {registry = "https://pypi.org/simple"}
-version = "0.1.7"
+version = "0.2.8"
wheels = [
- {hash = "sha256:84245c893b06aa084795c31c04ec3f58186499dfcd7f6f2b8821f9a0a6028136", size = 1902270, upload-time = "2025-03-18T12:57:52.842Z", url = "https://files.pythonhosted.org/packages/5b/55/ba50a32b222616eb5653958fc94d91f54ab74109417ada52bb2aecb89a15/tabpfn_client-0.1.7-py3-none-any.whl"},
+ {hash = "sha256:27aedf3075258f677983318132e4494b939412dd8ff45e2aef7219b144000026", size = 1904648, upload-time = "2025-11-06T11:51:34.257Z", url = "https://files.pythonhosted.org/packages/c6/7c/8e37e8e15b294cbe41faaab3665c38d40d282f5846d8074514587cc96482/tabpfn_client-0.2.8-py3-none-any.whl"},
]
[[package]]
dependencies = [
{marker = "python_full_version < '3.13'", name = "filelock"},
{marker = "python_full_version < '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "1.26.4"},
+ {marker = "python_full_version < '3.13'", name = "nvidia-ml-py"},
{marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
{marker = "python_full_version < '3.13'", name = "platformdirs"},
{marker = "python_full_version < '3.13'", name = "posthog"},
{marker = "python_full_version < '3.13'", name = "requests"},
+ {marker = "python_full_version < '3.13'", name = "ruff"},
{marker = "python_full_version < '3.13'", name = "scikit-learn", source = {registry = "https://pypi.org/simple"}, version = "1.6.1"},
{marker = "python_full_version < '3.13'", name = "typing-extensions"},
]
name = "tabpfn-common-utils"
-sdist = {hash = "sha256:d4a3b9acd7d36502ac75bfce549acda26aafbe9c1e7617052fbe1932c211d174", size = 1927582, upload-time = "2025-10-02T11:54:59.704Z", url = "https://files.pythonhosted.org/packages/cf/0a/dae8031fd9a478c0b6b682f1585d2e4e288727f569eb3316af3b981d1ec0/tabpfn_common_utils-0.2.2.tar.gz"}
+sdist = {hash = "sha256:8ce89488c411ea42c6bb6d0e2b5b6bf7c4dad4d34891687d0adae765968afecd", size = 1931260, upload-time = "2026-03-09T18:40:35.456Z", url = "https://files.pythonhosted.org/packages/9f/f2/e8acbf55eae63f711e6fffea9a10cbac63bca42432864e8b6869dd148403/tabpfn_common_utils-0.2.18.tar.gz"}
source = {registry = "https://pypi.org/simple"}
-version = "0.2.2"
+version = "0.2.18"
wheels = [
- {hash = "sha256:9ac650c252be49c34d12d210eceeeb3d9cdc7ac5396df7426e5b86b93ae5429e", size = 32104, upload-time = "2025-10-02T11:54:57.702Z", url = "https://files.pythonhosted.org/packages/7a/be/6307e7f8c55963aa74c72dbbe0e360b62a688a484f73227adf7cc1ddde02/tabpfn_common_utils-0.2.2-py3-none-any.whl"},
+ {hash = "sha256:e1cd000f667eeeb859c5076d18b6c5305c144b6e3e76f4d8df3932263f50e6cc", size = 37186, upload-time = "2026-03-09T18:40:33.834Z", url = "https://files.pythonhosted.org/packages/b3/2f/4b36f10bdda2ab7caf0c944a1daeb63d61df220403ae1900432905acff75/tabpfn_common_utils-0.2.18-py3-none-any.whl"},
]
[[package]]
@@ -6747,6 +7046,15 @@ wheels = [
{hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z", url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl"},
]
+[[package]]
+name = "tblib"
+sdist = {hash = "sha256:e9a652692d91bf4f743d4a15bc174c0b76afc750fe8c7b6d195cc1c1d6d2ccec", size = 35046, upload-time = "2025-11-12T12:21:16.572Z", url = "https://files.pythonhosted.org/packages/f4/8a/14c15ae154895cc131174f858c707790d416c444fc69f93918adfd8c4c0b/tblib-3.2.2.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "3.2.2"
+wheels = [
+ {hash = "sha256:26bdccf339bcce6a88b2b5432c988b266ebbe63a4e593f6b578b1d2e723d2b76", size = 12893, upload-time = "2025-11-12T12:21:14.407Z", url = "https://files.pythonhosted.org/packages/02/be/5d2d47b1fb58943194fb59dcf222f7c4e35122ec0ffe8c36e18b5d728f0b/tblib-3.2.2-py3-none-any.whl"},
+]
+
[[package]]
dependencies = [
{marker = "python_full_version < '3.11'", name = "python-dateutil"},
@@ -6893,8 +7201,10 @@ version = "0.0.24"
[package.dev-dependencies]
dev = [
+ {extra = ["dask", "ray", "spark"], name = "fugue"},
{name = "mktestdocs"},
{name = "pre-commit"},
+ {name = "pyspark"},
{name = "pytest"},
{name = "pytest-asyncio"},
{name = "pytest-cov"},
@@ -6916,9 +7226,14 @@ docs = [
]
[package.metadata]
+provides-extras = ["distributed"]
requires-dist = [
+ {extras = ["dask", "ray", "spark"], marker = "extra == 'distributed'", name = "fugue", specifier = ">=0.9.0"},
{extras = ["plotting"], name = "utilsforecast", specifier = ">=0.2.15"},
{extras = ["torch"], name = "gluonts"},
+ {marker = "extra == 'distributed'", name = "dask", specifier = "<=2024.12.1"},
+ {marker = "extra == 'distributed'", name = "pyspark", specifier = "<4.1"},
+ {marker = "extra == 'distributed'", name = "ray", specifier = "==2.48"},
{marker = "python_full_version < '3.13'", name = "tabpfn-time-series", specifier = "==1.0.3"},
{marker = "python_full_version < '3.13'", name = "transformers", specifier = ">=4.41,<5"},
{marker = "python_full_version < '3.14'", name = "timecopilot-uni2ts", specifier = ">=0.1.2"},
@@ -6951,7 +7266,7 @@ requires-dist = [
{name = "prophet", specifier = ">=1.1.7"},
{name = "pydantic-ai", specifier = ">=0.7.0"},
{name = "pytorch-lightning", specifier = "==2.4.0"},
- {name = "ray", specifier = "==2.49.2"},
+ {name = "ray", specifier = "==2.48"},
{name = "scipy", specifier = "<=1.15.3"},
{name = "statsforecast", specifier = ">=2.0.2"},
{name = "tensorboard", specifier = ">=2.20.0"},
@@ -6966,9 +7281,12 @@ requires-dist = [
[package.metadata.requires-dev]
dev = [
+ {extras = ["dask", "ray", "spark"], name = "fugue", specifier = ">=0.9.0"},
{name = "mktestdocs", specifier = ">=0.2.5"},
{name = "pre-commit"},
+ {name = "pyspark", specifier = "<4.1"},
{name = "pytest", specifier = ">=8.4.2"},
+ {name = "pytest"},
{name = "pytest-asyncio", specifier = ">=1.1.0"},
{name = "pytest-cov"},
{name = "pytest-mock", specifier = ">=3.15.1"},
@@ -6988,6 +7306,14 @@ docs = [
{name = "ruff", specifier = ">=0.12.1"},
]
+[package.optional-dependencies]
+distributed = [
+ {extra = ["dask", "ray", "spark"], name = "fugue"},
+ {name = "dask"},
+ {name = "pyspark"},
+ {name = "ray"},
+]
+
[[package]]
dependencies = [
{marker = "python_full_version < '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "1.26.4"},
@@ -7082,6 +7408,8 @@ wheels = [
[[package]]
dependencies = [
{extra = ["torch"], name = "gluonts"},
+ {marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'", name = "dill", source = {registry = "https://pypi.org/simple"}, version = "0.4.0"},
+ {marker = "platform_machine == 'x86_64' and sys_platform == 'darwin'", name = "dill", source = {registry = "https://pypi.org/simple"}, version = "0.3.8"},
{marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
{marker = "python_full_version < '3.13'", name = "scikit-learn", source = {registry = "https://pypi.org/simple"}, version = "1.6.1"},
{marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
@@ -7090,7 +7418,6 @@ dependencies = [
{name = "black"},
{name = "boto3"},
{name = "datasets"},
- {name = "dill"},
{name = "einops"},
{name = "isort"},
{name = "jaxtyping"},
@@ -7414,17 +7741,16 @@ dependencies = [
{marker = "python_full_version < '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.1.4"},
{marker = "python_full_version >= '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "2.1.3"},
{marker = "python_full_version >= '3.13'", name = "pandas", source = {registry = "https://pypi.org/simple"}, version = "2.3.3"},
- {name = "fs"},
{name = "fsspec"},
{name = "pyarrow"},
{name = "six"},
]
name = "triad"
-sdist = {hash = "sha256:5b67673124891981daf8afbab44b2e6358932ca35ef3ff38a25bc3e0f6f03f17", size = 56086, upload-time = "2024-06-28T06:11:32.537Z", url = "https://files.pythonhosted.org/packages/88/28/fca2981080bfb44e317b3fc6cc4119a0abf14f18e707a612764fcad28790/triad-0.9.8.tar.gz"}
+sdist = {hash = "sha256:0bbaf627dfdee8fa05bafe02d87da03460892abd666944da048de8a467e1519d", size = 54185, upload-time = "2025-10-31T06:03:06.709Z", url = "https://files.pythonhosted.org/packages/71/28/33e3ffaa73264460e2b0ffae3f897e42e3b1b9245578851d714b44d088fc/triad-1.0.0.tar.gz"}
source = {registry = "https://pypi.org/simple"}
-version = "0.9.8"
+version = "1.0.0"
wheels = [
- {hash = "sha256:2c0ba7d83977c6d4e7b59e3cc70727f858014ef7676c62d184aa8e63f7bef5de", size = 62340, upload-time = "2024-06-28T06:11:30.764Z", url = "https://files.pythonhosted.org/packages/4f/c6/4aedce0522bb3c72f2d770e7e4c18b0e1f7716d2c70a865e94c89ebcf7e6/triad-0.9.8-py3-none-any.whl"},
+ {hash = "sha256:0d9b638541e26c24ef6ad8c8f29b5df92eaa436f54cfd02ae1c48b4b0acd6e92", size = 59955, upload-time = "2025-10-31T06:03:05.303Z", url = "https://files.pythonhosted.org/packages/1f/a0/8dd7b5b45b96f30a29382b8d7bcd473cfe5573716ad1f3be760963d8f874/triad-1.0.0-py3-none-any.whl"},
]
[[package]]
@@ -7523,8 +7849,9 @@ wheels = [
[[package]]
dependencies = [
+ {marker = "python_full_version < '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.0.0"},
+ {marker = "python_full_version >= '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.1.0"},
{name = "click"},
- {name = "rich"},
{name = "shellingham"},
{name = "typing-extensions"},
]
@@ -8029,6 +8356,7 @@ wheels = [
[[package]]
dependencies = [
{marker = "python_full_version >= '3.11' and python_full_version < '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "1.26.4"},
+ {marker = "python_full_version >= '3.11' and python_full_version < '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.0.0"},
{marker = "python_full_version >= '3.11'", name = "dacite"},
{marker = "python_full_version >= '3.11'", name = "einops"},
{marker = "python_full_version >= '3.11'", name = "ftfy"},
@@ -8040,13 +8368,13 @@ dependencies = [
{marker = "python_full_version >= '3.11'", name = "omegaconf"},
{marker = "python_full_version >= '3.11'", name = "opt-einsum"},
{marker = "python_full_version >= '3.11'", name = "reportlab"},
- {marker = "python_full_version >= '3.11'", name = "rich"},
{marker = "python_full_version >= '3.11'", name = "seaborn"},
{marker = "python_full_version >= '3.11'", name = "tokenizers"},
{marker = "python_full_version >= '3.11'", name = "torch"},
{marker = "python_full_version >= '3.11'", name = "tqdm"},
{marker = "python_full_version >= '3.11'", name = "transformers"},
{marker = "python_full_version >= '3.13'", name = "numpy", source = {registry = "https://pypi.org/simple"}, version = "2.1.3"},
+ {marker = "python_full_version >= '3.13'", name = "rich", source = {registry = "https://pypi.org/simple"}, version = "14.1.0"},
]
name = "xlstm"
sdist = {hash = "sha256:24a5572be44207fc15ed5dea6b805c4bcd450a8f2728320cf21b8082c535b60e", size = 71129, upload-time = "2025-08-24T14:38:49.493Z", url = "https://files.pythonhosted.org/packages/7f/4d/05efa4c76b8ade8cbd638e2b0329694a767146616186ef786107740e4e89/xlstm-2.0.5.tar.gz"}
@@ -8059,11 +8387,14 @@ wheels = [
[[package]]
name = "xxhash"
resolution-markers = [
- "python_full_version < '3.11' and sys_platform != 'linux'",
+ "(python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version < '3.11' and sys_platform == 'linux'",
- "python_full_version == '3.11.*' and sys_platform != 'linux'",
+ "python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and sys_platform == 'linux'",
- "python_full_version == '3.12.*' and sys_platform != 'linux'",
+ "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.12.*' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f", size = 84241, upload-time = "2024-08-17T09:20:38.972Z", url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz"}
@@ -8140,9 +8471,11 @@ wheels = [
[[package]]
name = "xxhash"
resolution-markers = [
- "python_full_version == '3.13.*' and sys_platform != 'linux'",
+ "(python_full_version == '3.13.*' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "(python_full_version >= '3.14' and platform_machine != 'x86_64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')",
+ "python_full_version == '3.13.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version == '3.13.*' and sys_platform == 'linux'",
- "python_full_version >= '3.14' and sys_platform != 'linux'",
+ "python_full_version >= '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
"python_full_version >= '3.14' and sys_platform == 'linux'",
]
sdist = {hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z", url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz"}
@@ -8387,6 +8720,15 @@ wheels = [
{hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z", url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl"},
]
+[[package]]
+name = "zict"
+sdist = {hash = "sha256:e321e263b6a97aafc0790c3cfb3c04656b7066e6738c37fffcca95d803c9fba5", size = 33238, upload-time = "2023-04-17T21:41:16.041Z", url = "https://files.pythonhosted.org/packages/d1/ac/3c494dd7ec5122cff8252c1a209b282c0867af029f805ae9befd73ae37eb/zict-3.0.0.tar.gz"}
+source = {registry = "https://pypi.org/simple"}
+version = "3.0.0"
+wheels = [
+ {hash = "sha256:5796e36bd0e0cc8cf0fbc1ace6a68912611c1dbd74750a3f3026b9b9d6a327ae", size = 43332, upload-time = "2023-04-17T21:41:13.444Z", url = "https://files.pythonhosted.org/packages/80/ab/11a76c1e2126084fde2639514f24e6111b789b0bfa4fc6264a8975c7e1f1/zict-3.0.0-py2.py3-none-any.whl"},
+]
+
[[package]]
name = "zipp"
sdist = {hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z", url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz"}