diff --git a/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py b/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py index 2483f281..2b706611 100644 --- a/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py +++ b/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py @@ -1,6 +1,7 @@ from typing import Dict, Any from datasource.bigquery.config import BigQueryConfig from datasource.redshift.config import RedshiftConfig +from datasource.synapse.config import SynapseConfig from dependency_injector.wiring import inject import json from dependency_injector.wiring import Provide @@ -90,6 +91,8 @@ async def add_datasource( config = BigQueryConfig(**config_json) elif add_datasource_payload.type == DataSourceType.AWS_REDSHIFT: config = RedshiftConfig(**config_json) + elif add_datasource_payload.type == DataSourceType.AZURE_SYNAPSE: + config = SynapseConfig(**config_json) else: raise ValueError(f'Invalid datasource type: {add_datasource_payload.type}') @@ -181,6 +184,8 @@ async def update_datasource( config = BigQueryConfig(**payload_config) elif datasource_type == DataSourceType.AWS_REDSHIFT: config = RedshiftConfig(**payload_config) + elif datasource_type == DataSourceType.AZURE_SYNAPSE: + config = SynapseConfig(**payload_config) else: raise ValueError(f'Invalid datasource type: {datasource_type}') diff --git a/wavefront/server/modules/plugins_module/plugins_module/services/datasource_services.py b/wavefront/server/modules/plugins_module/plugins_module/services/datasource_services.py index 61714608..6c8e8b8b 100644 --- a/wavefront/server/modules/plugins_module/plugins_module/services/datasource_services.py +++ b/wavefront/server/modules/plugins_module/plugins_module/services/datasource_services.py @@ -1,5 +1,6 @@ import collections from datasource import DataSourceType, BigQueryConfig, RedshiftConfig +from datasource.synapse.config import SynapseConfig from db_repo_module.models.datasource import Datasource from db_repo_module.models.role import Role from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository @@ -17,7 +18,7 @@ async def get_datasource_config( datasource_repository: SQLAlchemyRepository[Datasource] = Depends( Provide(PluginsContainer.datasource_repository) ), -) -> tuple[DataSourceType, BigQueryConfig | RedshiftConfig]: +) -> tuple[DataSourceType, BigQueryConfig | RedshiftConfig | SynapseConfig]: datasource: Datasource | None = await datasource_repository.find_one( id=datasource_id ) @@ -28,6 +29,8 @@ async def get_datasource_config( return DataSourceType.GCP_BIGQUERY, BigQueryConfig(**datasource.config) elif datasource.type == DataSourceType.AWS_REDSHIFT: return DataSourceType.AWS_REDSHIFT, RedshiftConfig(**datasource.config) + elif datasource.type == DataSourceType.AZURE_SYNAPSE: + return DataSourceType.AZURE_SYNAPSE, SynapseConfig(**datasource.config) else: raise ValueError(f'Invalid datasource type: {datasource.type}') diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/azure/__init__.py b/wavefront/server/packages/flo_cloud/flo_cloud/azure/__init__.py index c9ebfe44..ce1864d1 100644 --- a/wavefront/server/packages/flo_cloud/flo_cloud/azure/__init__.py +++ b/wavefront/server/packages/flo_cloud/flo_cloud/azure/__init__.py @@ -3,7 +3,8 @@ from .blob_storage import AzureBlobStorage from .storage_queue import StorageQueue from .key_vault import AzureKMS +from .synapse import SynapseClient logging.getLogger('azure').setLevel(logging.WARNING) -__all__ = ['AzureBlobStorage', 'AzureKMS', 'StorageQueue'] +__all__ = ['AzureBlobStorage', 'AzureKMS', 'StorageQueue', 'SynapseClient'] diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/azure/synapse.py b/wavefront/server/packages/flo_cloud/flo_cloud/azure/synapse.py new file mode 100644 index 00000000..bbc5f2ef --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/azure/synapse.py @@ -0,0 +1,265 @@ +import os +import re +import string +import logging +from typing import List, Dict, Any, Optional, Tuple +from contextlib import contextmanager + +import pyodbc + +logger = logging.getLogger(__name__) + + +class SynapseClient: + """ + Azure Synapse Analytics client using pyodbc. + Supports both Dedicated SQL Pools and Serverless SQL Pools — + the pool type is determined by the host URL provided. + """ + + def __init__( + self, + host: Optional[str] = None, + port: int = 1433, + database: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + driver: str = 'ODBC Driver 18 for SQL Server', + timeout: int = 30, + ): + self.host = host or os.getenv('AZURE_SYNAPSE_HOST') + self.port = port + self.database = database or os.getenv('AZURE_SYNAPSE_DATABASE') + self.user = user or os.getenv('AZURE_SYNAPSE_USER') + self.password = password or os.getenv('AZURE_SYNAPSE_PASSWORD') + self.driver = driver + self.timeout = timeout + + if not self.host: + raise ValueError( + 'Synapse host must be provided via parameter or AZURE_SYNAPSE_HOST environment variable' + ) + if not self.database: + raise ValueError( + 'Database must be provided via parameter or AZURE_SYNAPSE_DATABASE environment variable' + ) + if not self.user: + raise ValueError( + 'User must be provided via parameter or AZURE_SYNAPSE_USER environment variable' + ) + if not self.password: + raise ValueError( + 'Password must be provided via parameter or AZURE_SYNAPSE_PASSWORD environment variable' + ) + + @staticmethod + def _escape_conn_value(value: str) -> str: + """Wrap an ODBC connection-string value in braces, escaping any literal }.""" + return '{' + value.replace('}', '}}') + '}' + + def _build_connection_string(self) -> str: + return ( + f'DRIVER={self._escape_conn_value(self.driver)};' + f'Server={self._escape_conn_value(f"{self.host},{self.port}")};' + f'Database={self._escape_conn_value(self.database)};' + f'Uid={self._escape_conn_value(self.user)};' + f'Pwd={self._escape_conn_value(self.password)};' + f'Encrypt=yes;' + f'TrustServerCertificate=no;' + f'Connection Timeout={self.timeout};' + ) + + @contextmanager + def get_connection(self): + conn = None + try: + conn = pyodbc.connect(self._build_connection_string()) + yield conn + except pyodbc.Error as e: + logger.error(f'Synapse connection error: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error connecting to Synapse: {e}') + raise + finally: + if conn: + conn.close() + + @contextmanager + def get_cursor(self): + with self.get_connection() as conn: + cursor = conn.cursor() + try: + yield cursor + finally: + cursor.close() + + def _convert_named_params( + self, query: str, params: Optional[Dict[str, Any]] + ) -> Tuple[str, list]: + """Convert @name style named params to ? positional params for pyodbc.""" + if not params: + return query, [] + + ordered_values = [] + + def replace_param(match: re.Match) -> str: + param_name = match.group(1) + if param_name in params: + ordered_values.append(params[param_name]) + return '?' + return match.group(0) + + converted_query = re.sub(r'@(\w+)', replace_param, query) + return converted_query, ordered_values + + def execute_query_as_dict( + self, query: str, params: Optional[Dict[str, Any]] = None + ) -> List[Dict[str, Any]]: + converted_query, values = self._convert_named_params(query, params) + with self.get_cursor() as cursor: + try: + cursor.execute(converted_query, values) + columns = [desc[0] for desc in cursor.description] + seen = set() + duplicates = {c for c in columns if c in seen or seen.add(c)} + if duplicates: + raise ValueError( + f'Duplicate column names {sorted(duplicates)!r} in query result. ' + f'Query: {converted_query!r}' + ) + return [dict(zip(columns, row)) for row in cursor.fetchall()] + except pyodbc.Error as e: + logger.error(f'Synapse query execution error: {e}') + raise + + def execute_query_to_dict( + self, + projection: str = '*', + table_prefix: str = '', + table_names: List[str] = [], + where_clause: str = '1=1', + join_query: Optional[str] = None, + params: Optional[Dict[str, Any]] = None, + limit: int = 10, + offset: int = 0, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> List[Dict[str, Any]]: + if not table_names: + raise ValueError('At least one table name must be provided') + + base_table = f'{table_prefix}{table_names[0]}' + group_by_clause = f'GROUP BY {group_by}' if group_by else '' + # SQL Server requires ORDER BY when using OFFSET/FETCH + order_by_clause = ( + f'ORDER BY {order_by}' if order_by else 'ORDER BY (SELECT NULL)' + ) + + if join_query: + query = self.__get_join_query( + join_query, + table_names, + table_prefix, + projection, + where_clause, + limit, + offset, + order_by, + group_by=group_by, + ) + else: + query = ( + f'SELECT {projection} FROM {base_table} AS a ' + f'WHERE {where_clause} {group_by_clause} ' + f'{order_by_clause} ' + f'OFFSET {offset} ROWS FETCH NEXT {limit} ROWS ONLY' + ) + + try: + logger.debug(f'Executing query: {query}') + return self.execute_query_as_dict(query, params) + except pyodbc.Error as e: + logger.error(f'Synapse query execution error: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error executing Synapse query: {e}') + raise + + def __get_join_query( + self, + join_query: str, + table_names: List[str], + table_prefix: str, + projection: str, + where_clause: str, + limit: int, + offset: int, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> str: + aliases = list(string.ascii_lowercase) + processed_join = join_query + processed_where = where_clause + for i, table_name in enumerate(table_names): + alias = aliases[i] + qualified = f'{table_prefix}{table_name}' + processed_join = processed_join.replace(f'{table_name}.', f'{alias}.') + processed_where = processed_where.replace(f'{table_name}.', f'{alias}.') + processed_join = processed_join.replace( + f'JOIN {table_name}', + f'JOIN {qualified} AS {alias}', + ) + + group_by_clause = f'GROUP BY {group_by}' if group_by else '' + order_by_clause = ( + f'ORDER BY {order_by}' if order_by else 'ORDER BY (SELECT NULL)' + ) + base_table = f'{table_prefix}{table_names[0]}' + return ( + f'SELECT {projection} FROM {base_table} AS {aliases[0]} ' + f'{processed_join} WHERE {processed_where} ' + f'{group_by_clause} {order_by_clause} ' + f'OFFSET {offset} ROWS FETCH NEXT {limit} ROWS ONLY' + ) + + def get_table_info(self, schema: str = 'dbo') -> List[Dict[str, Any]]: + query = """ + SELECT + c.TABLE_NAME, + c.COLUMN_NAME, + c.DATA_TYPE, + c.CHARACTER_MAXIMUM_LENGTH, + c.NUMERIC_PRECISION, + c.NUMERIC_SCALE, + c.IS_NULLABLE, + c.COLUMN_DEFAULT, + c.ORDINAL_POSITION + FROM INFORMATION_SCHEMA.COLUMNS c + WHERE c.TABLE_SCHEMA = @schema + ORDER BY c.TABLE_NAME, c.ORDINAL_POSITION + """ + return self.execute_query_as_dict(query, {'schema': schema}) + + def list_tables(self, schema: str = 'dbo') -> List[str]: + query = ( + 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES ' + "WHERE TABLE_SCHEMA = ? AND TABLE_TYPE = 'BASE TABLE' " + 'ORDER BY TABLE_NAME' + ) + with self.get_cursor() as cursor: + cursor.execute(query, [schema]) + return [row[0] for row in cursor.fetchall()] + + def test_connection(self) -> bool: + try: + with self.get_cursor() as cursor: + cursor.execute('SELECT 1') + result = cursor.fetchone() + success = result is not None and result[0] == 1 + if success: + logger.info('Synapse connection test successful') + return success + except Exception as e: + logger.error(f'Synapse connection test failed: {e}') + return False diff --git a/wavefront/server/packages/flo_cloud/pyproject.toml b/wavefront/server/packages/flo_cloud/pyproject.toml index 99bd6ea5..cc369305 100644 --- a/wavefront/server/packages/flo_cloud/pyproject.toml +++ b/wavefront/server/packages/flo_cloud/pyproject.toml @@ -19,6 +19,7 @@ dependencies = [ "google-cloud-storage<3.0.0", "google-cloud-pubsub>=2.28.0", "redshift-connector>=2.1.7", + "pyodbc>=5.0.0", ] [tool.pytest.ini_options] diff --git a/wavefront/server/plugins/datasource/datasource/__init__.py b/wavefront/server/plugins/datasource/datasource/__init__.py index 90d4d718..8b8780f5 100644 --- a/wavefront/server/plugins/datasource/datasource/__init__.py +++ b/wavefront/server/plugins/datasource/datasource/__init__.py @@ -9,6 +9,7 @@ from .bigquery import BigQueryPlugin, BigQueryConfig from .redshift import RedshiftPlugin, RedshiftConfig +from .synapse import SynapsePlugin, SynapseConfig from .helper import construct_meta from .odata_parser import ODataQueryParser @@ -17,7 +18,7 @@ class DatasourcePlugin(DataSourceABC): def __init__( self, datasource_type: DataSourceType, - config: BigQueryConfig | RedshiftConfig, + config: BigQueryConfig | RedshiftConfig | SynapseConfig, ): self.datasource_type = datasource_type self.config = config @@ -34,6 +35,11 @@ def __get_datasource(self) -> DataSourceABC: if not isinstance(self.config, BigQueryConfig): raise ValueError(f'Invalid config type: {type(self.config)}') return BigQueryPlugin(self.config) + elif self.datasource_type == DataSourceType.AZURE_SYNAPSE: + self.odata_parser = ODataQueryParser(type='sql', dynamic_var_char='@') + if not isinstance(self.config, SynapseConfig): + raise ValueError(f'Invalid config type: {type(self.config)}') + return SynapsePlugin(self.config) else: raise ValueError(f'Invalid datasource type: {self.datasource_type}') @@ -110,13 +116,13 @@ async def execute_dynamic_query( rls_filter ) result_by_query: Dict[str, Any] = await self.datasource.execute_dynamic_query( - query, - offset, - limit, - odata_filter, - odata_params, - odata_data_filter, - odata_data_params, - params, + query=query, + odata_filter=odata_filter, + odata_params=odata_params, + odata_data_filter=odata_data_filter, + odata_data_params=odata_data_params, + offset=offset, + limit=limit, + params=params, ) return result_by_query diff --git a/wavefront/server/plugins/datasource/datasource/synapse/__init__.py b/wavefront/server/plugins/datasource/datasource/synapse/__init__.py new file mode 100644 index 00000000..4a6a693f --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/synapse/__init__.py @@ -0,0 +1,155 @@ +import asyncio +from typing import Any, Dict, List, Optional + +from ..types import DataSourceABC +from flo_cloud.azure.synapse import SynapseClient as AzureSynapseClient +from .config import SynapseConfig + + +class SynapsePlugin(DataSourceABC): + def __init__(self, config: SynapseConfig): + self.config = config + self.client = AzureSynapseClient( + host=config.host, + port=config.port, + database=config.database, + user=config.user, + password=config.password, + ) + self.db_name = config.schema + + async def test_connection(self) -> bool: + return await asyncio.to_thread(self.client.test_connection) + + def get_schema(self) -> dict: + return self.client.get_table_info(schema=self.db_name) + + def get_table_names(self, **kwargs) -> list[str]: + return self.client.list_tables(kwargs.get('schema', self.db_name)) + + def fetch_data( + self, + table_names: List[str], + projection: str = '*', + where_clause: Optional[str] = '1=1', + join_query: Optional[str] = None, + params: Optional[Dict[str, Any]] = None, + offset: int = 0, + limit: int = 10, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> List[Dict[str, Any]]: + # T-SQL has no boolean literal TRUE — normalise any coming from the orchestrator + normalized_where_clause = (where_clause or '1=1').strip() + if normalized_where_clause.lower() == 'true': + normalized_where_clause = '1=1' + return self.client.execute_query_to_dict( + projection=projection, + table_prefix=f'{self.db_name}.', + table_names=table_names, + where_clause=normalized_where_clause, + join_query=join_query, + params=params, + limit=limit, + offset=offset, + order_by=order_by, + group_by=group_by, + ) + + def insert_rows_json(self, table_name: str, data: List[Dict[str, Any]]) -> None: + pass + + async def execute_query( + self, query: str, use_legacy_sql: bool = False, dry_run: bool = False, **kwargs + ) -> Any: + params = kwargs.get('params') + return await asyncio.to_thread(self.client.execute_query_as_dict, query, params) + + async def execute_dynamic_query( + self, + query: List[Dict[str, Any]], + odata_filter: Optional[str] = None, + odata_params: Optional[Dict[str, Any]] = None, + odata_data_filter: Optional[str] = None, + odata_data_params: Optional[Dict[str, Any]] = None, + offset: Optional[int] = 0, + limit: Optional[int] = 100, + params: Optional[Dict[str, Any]] = None, + ): + results = {} + prepared_queries = [] + + for query_obj in query: + query_to_execute = query_obj.get('query', '') + query_params = query_obj.get('parameters', {}) + query_id = query_obj.get('id') + if not query_id: + raise ValueError('Query ID is required') + + params_key = [p['name'] for p in query_params] + params_to_execute: Dict[str, Any] = {} + + if params is None: + params = {} + + for key in params_key: + if key not in params: + raise ValueError(f'Missing parameter: {key} for query {query_id}') + params_to_execute[key] = params[key] + + if odata_params: + params_to_execute.update(odata_params) + if odata_data_params: + params_to_execute.update(odata_data_params) + + query_to_execute = query_to_execute.replace( + '{{rls}}', f'{odata_data_filter}' if odata_data_filter else '1=1' + ) + query_to_execute = query_to_execute.replace( + '{{filters}}', f'{odata_filter}' if odata_filter else '1=1' + ) + query_to_execute = query_to_execute.rstrip().rstrip(';') + if ' order by ' in query_to_execute.lower(): + query_to_execute += ( + f' OFFSET {offset} ROWS FETCH NEXT {limit} ROWS ONLY' + ) + else: + query_to_execute += f' ORDER BY (SELECT NULL) OFFSET {offset} ROWS FETCH NEXT {limit} ROWS ONLY' + + prepared_queries.append((query_id, query_to_execute, params_to_execute)) + + tasks = [ + ( + query_id, + asyncio.create_task( + asyncio.to_thread( + self.client.execute_query_as_dict, + query_to_execute, + params_to_execute, + ) + ), + ) + for query_id, query_to_execute, params_to_execute in prepared_queries + ] + + for query_id, task in tasks: + try: + formatted_result = await task + results[query_id] = { + 'status': 'success', + 'error': None, + 'description': f'Query {query_id} executed successfully', + 'result': formatted_result, + } + except Exception as e: + results[query_id] = { + 'status': 'error', + 'error': str(e), + 'description': f'Error executing query {query_id}', + 'result': [], + } + + return results + + +__all__ = ['SynapsePlugin', 'SynapseConfig'] diff --git a/wavefront/server/plugins/datasource/datasource/synapse/config.py b/wavefront/server/plugins/datasource/datasource/synapse/config.py new file mode 100644 index 00000000..95c3810b --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/synapse/config.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass + + +@dataclass +class SynapseConfig: + host: str + database: str + user: str + password: str + port: int = 1433 + schema: str = 'dbo' diff --git a/wavefront/server/uv.lock b/wavefront/server/uv.lock index f7bf7aa7..0c24816d 100644 --- a/wavefront/server/uv.lock +++ b/wavefront/server/uv.lock @@ -1443,6 +1443,7 @@ dependencies = [ { name = "google-cloud-kms" }, { name = "google-cloud-pubsub" }, { name = "google-cloud-storage" }, + { name = "pyodbc" }, { name = "redshift-connector" }, ] @@ -1458,6 +1459,7 @@ requires-dist = [ { name = "google-cloud-kms", specifier = ">=3.5.1" }, { name = "google-cloud-pubsub", specifier = ">=2.28.0" }, { name = "google-cloud-storage", specifier = "<3.0.0" }, + { name = "pyodbc", specifier = ">=5.0.0" }, { name = "redshift-connector", specifier = ">=2.1.7" }, ] @@ -4598,6 +4600,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ae/af/1576ecfc8a62d31c0c8b34b856e52f6b05f1d76546dbac0e1d037f044a9e/pymupdf4llm-0.0.17-py3-none-any.whl", hash = "sha256:26de9996945f15e3ca507908f80dc18a959f5b5214bb2e302c7f7034089665a0", size = 26190, upload-time = "2024-09-21T18:40:03.097Z" }, ] +[[package]] +name = "pyodbc" +version = "5.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/85/44b10070a769a56bd910009bb185c0c0a82daff8d567cd1a116d7d730c7d/pyodbc-5.3.0.tar.gz", hash = "sha256:2fe0e063d8fb66efd0ac6dc39236c4de1a45f17c33eaded0d553d21c199f4d05", size = 121770, upload-time = "2025-10-17T18:04:09.43Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/c7/534986d97a26cb8f40ef456dfcf00d8483161eade6d53fa45fcf2d5c2b87/pyodbc-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ebc3be93f61ea0553db88589e683ace12bf975baa954af4834ab89f5ee7bf8ae", size = 71958, upload-time = "2025-10-17T18:03:10.163Z" }, + { url = "https://files.pythonhosted.org/packages/69/3c/6fe3e9eae6db1c34d6616a452f9b954b0d5516c430f3dd959c9d8d725f2a/pyodbc-5.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9b987a25a384f31e373903005554230f5a6d59af78bce62954386736a902a4b3", size = 71843, upload-time = "2025-10-17T18:03:11.058Z" }, + { url = "https://files.pythonhosted.org/packages/44/0e/81a0315d0bf7e57be24338dbed616f806131ab706d87c70f363506dc13d5/pyodbc-5.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:676031723aac7dcbbd2813bddda0e8abf171b20ec218ab8dfb21d64a193430ea", size = 327191, upload-time = "2025-10-17T18:03:11.93Z" }, + { url = "https://files.pythonhosted.org/packages/43/ae/b95bb2068f911950322a97172c68675c85a3e87dc04a98448c339fcbef21/pyodbc-5.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5c30c5cd40b751f77bbc73edd32c4498630939bcd4e72ee7e6c9a4b982cc5ca", size = 332228, upload-time = "2025-10-17T18:03:13.096Z" }, + { url = "https://files.pythonhosted.org/packages/dc/21/2433625f7d5922ee9a34e3805805fa0f1355d01d55206c337bb23ec869bf/pyodbc-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2035c7dfb71677cd5be64d3a3eb0779560279f0a8dc6e33673499498caa88937", size = 1296469, upload-time = "2025-10-17T18:03:14.61Z" }, + { url = "https://files.pythonhosted.org/packages/3a/f4/c760caf7bb9b3ab988975d84bd3e7ebda739fe0075c82f476d04ee97324c/pyodbc-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5cbe4d753723c8a8f65020b7a259183ef5f14307587165ce37e8c7e251951852", size = 1353163, upload-time = "2025-10-17T18:03:16.272Z" }, + { url = "https://files.pythonhosted.org/packages/14/ad/f9ca1e9e44fd91058f6e35b233b1bb6213d590185bfcc2a2c4f1033266e7/pyodbc-5.3.0-cp311-cp311-win32.whl", hash = "sha256:d255f6b117d05cfc046a5201fdf39535264045352ea536c35777cf66d321fbb8", size = 62925, upload-time = "2025-10-17T18:03:17.649Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/52b9b94efd8cfd11890ae04f31f50561710128d735e4e38a8fbb964cd2c2/pyodbc-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:f1ad0e93612a6201621853fc661209d82ff2a35892b7d590106fe8f97d9f1f2a", size = 69329, upload-time = "2025-10-17T18:03:18.474Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6f/bf5433bb345007f93003fa062e045890afb42e4e9fc6bd66acc2c3bd12ca/pyodbc-5.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:0df7ff47fab91ea05548095b00e5eb87ed88ddf4648c58c67b4db95ea4913e23", size = 64447, upload-time = "2025-10-17T18:03:19.691Z" }, + { url = "https://files.pythonhosted.org/packages/f5/0c/7ecf8077f4b932a5d25896699ff5c394ffc2a880a9c2c284d6a3e6ea5949/pyodbc-5.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5ebf6b5d989395efe722b02b010cb9815698a4d681921bf5db1c0e1195ac1bde", size = 72994, upload-time = "2025-10-17T18:03:20.551Z" }, + { url = "https://files.pythonhosted.org/packages/03/78/9fbde156055d88c1ef3487534281a5b1479ee7a2f958a7e90714968749ac/pyodbc-5.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:197bb6ddafe356a916b8ee1b8752009057fce58e216e887e2174b24c7ab99269", size = 72535, upload-time = "2025-10-17T18:03:21.423Z" }, + { url = "https://files.pythonhosted.org/packages/9f/f9/8c106dcd6946e95fee0da0f1ba58cd90eb872eebe8968996a2ea1f7ac3c1/pyodbc-5.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6ccb5315ec9e081f5cbd66f36acbc820ad172b8fa3736cf7f993cdf69bd8a96", size = 333565, upload-time = "2025-10-17T18:03:22.695Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/2c70f47a76a4fafa308d148f786aeb35a4d67a01d41002f1065b465d9994/pyodbc-5.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5dd3d5e469f89a3112cf8b0658c43108a4712fad65e576071e4dd44d2bd763c7", size = 340283, upload-time = "2025-10-17T18:03:23.691Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b2/0631d84731606bfe40d3b03a436b80cbd16b63b022c7b13444fb30761ca8/pyodbc-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b180bc5e49b74fd40a24ef5b0fe143d0c234ac1506febe810d7434bf47cb925b", size = 1302767, upload-time = "2025-10-17T18:03:25.311Z" }, + { url = "https://files.pythonhosted.org/packages/74/b9/707c5314cca9401081b3757301241c167a94ba91b4bd55c8fa591bf35a4a/pyodbc-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e3c39de3005fff3ae79246f952720d44affc6756b4b85398da4c5ea76bf8f506", size = 1361251, upload-time = "2025-10-17T18:03:26.538Z" }, + { url = "https://files.pythonhosted.org/packages/97/7c/893036c8b0c8d359082a56efdaa64358a38dda993124162c3faa35d1924d/pyodbc-5.3.0-cp312-cp312-win32.whl", hash = "sha256:d32c3259762bef440707098010035bbc83d1c73d81a434018ab8c688158bd3bb", size = 63413, upload-time = "2025-10-17T18:03:27.903Z" }, + { url = "https://files.pythonhosted.org/packages/c0/70/5e61b216cc13c7f833ef87f4cdeab253a7873f8709253f5076e9bb16c1b3/pyodbc-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe77eb9dcca5fc1300c9121f81040cc9011d28cff383e2c35416e9ec06d4bc95", size = 70133, upload-time = "2025-10-17T18:03:28.746Z" }, + { url = "https://files.pythonhosted.org/packages/aa/85/e7d0629c9714a85eb4f85d21602ce6d8a1ec0f313fde8017990cf913e3b4/pyodbc-5.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:afe7c4ac555a8d10a36234788fc6cfc22a86ce37fc5ba88a1f75b3e6696665dc", size = 64700, upload-time = "2025-10-17T18:03:29.638Z" }, + { url = "https://files.pythonhosted.org/packages/0c/1d/9e74cbcc1d4878553eadfd59138364b38656369eb58f7e5b42fb344c0ce7/pyodbc-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e9ab0b91de28a5ab838ac4db0253d7cc8ce2452efe4ad92ee6a57b922bf0c24", size = 72975, upload-time = "2025-10-17T18:03:30.466Z" }, + { url = "https://files.pythonhosted.org/packages/37/c7/27d83f91b3144d3e275b5b387f0564b161ddbc4ce1b72bb3b3653e7f4f7a/pyodbc-5.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6132554ffbd7910524d643f13ce17f4a72f3a6824b0adef4e9a7f66efac96350", size = 72541, upload-time = "2025-10-17T18:03:31.348Z" }, + { url = "https://files.pythonhosted.org/packages/1b/33/2bb24e7fc95e98a7b11ea5ad1f256412de35d2e9cc339be198258c1d9a76/pyodbc-5.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1629af4706e9228d79dabb4863c11cceb22a6dab90700db0ef449074f0150c0d", size = 343287, upload-time = "2025-10-17T18:03:32.287Z" }, + { url = "https://files.pythonhosted.org/packages/fa/24/88cde8b6dc07a93a92b6c15520a947db24f55db7bd8b09e85956642b7cf3/pyodbc-5.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ceaed87ba2ea848c11223f66f629ef121f6ebe621f605cde9cfdee4fd9f4b68", size = 350094, upload-time = "2025-10-17T18:03:33.336Z" }, + { url = "https://files.pythonhosted.org/packages/c2/99/53c08562bc171a618fa1699297164f8885e66cde38c3b30f454730d0c488/pyodbc-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3cc472c8ae2feea5b4512e23b56e2b093d64f7cbc4b970af51da488429ff7818", size = 1301029, upload-time = "2025-10-17T18:03:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/d8/10/68a0b5549876d4b53ba4c46eed2a7aca32d589624ed60beef5bd7382619e/pyodbc-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c79df54bbc25bce9f2d87094e7b39089c28428df5443d1902b0cc5f43fd2da6f", size = 1361420, upload-time = "2025-10-17T18:03:35.958Z" }, + { url = "https://files.pythonhosted.org/packages/41/0f/9dfe4987283ffcb981c49a002f0339d669215eb4a3fe4ee4e14537c52852/pyodbc-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c2eb0b08e24fe5c40c7ebe9240c5d3bd2f18cd5617229acee4b0a0484dc226f2", size = 63399, upload-time = "2025-10-17T18:03:36.931Z" }, + { url = "https://files.pythonhosted.org/packages/56/03/15dcefe549d3888b649652af7cca36eda97c12b6196d92937ca6d11306e9/pyodbc-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:01166162149adf2b8a6dc21a212718f205cabbbdff4047dc0c415af3fd85867e", size = 70133, upload-time = "2025-10-17T18:03:38.47Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c1/c8b128ae59a14ecc8510e9b499208e342795aecc3af4c3874805c720b8db/pyodbc-5.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:363311bd40320b4a61454bebf7c38b243cd67c762ed0f8a5219de3ec90c96353", size = 64683, upload-time = "2025-10-17T18:03:39.68Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f2/c26d82a7ce1e90b8bbb8731d3d53de73814e2f6606b9db9d978303aa8d5f/pyodbc-5.3.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3f1bdb3ce6480a17afaaef4b5242b356d4997a872f39e96f015cabef00613797", size = 73513, upload-time = "2025-10-17T18:03:40.536Z" }, + { url = "https://files.pythonhosted.org/packages/82/d5/1ab1b7c4708cbd701990a8f7183c5bb5e0712d5e8479b919934e46dadab4/pyodbc-5.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7713c740a10f33df3cb08f49a023b7e1e25de0c7c99650876bbe717bc95ee780", size = 72631, upload-time = "2025-10-17T18:03:41.713Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/7e3831eeac2b09b31a77e6b3495491ce162035ff2903d7261b49d35aa3c2/pyodbc-5.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf18797a12e70474e1b7f5027deeeccea816372497e3ff2d46b15bec2d18a0cc", size = 344580, upload-time = "2025-10-17T18:03:42.67Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a6/71d26d626a3c45951620b7ff356ec920e420f0e09b0a924123682aa5e4ab/pyodbc-5.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:08b2439500e212625471d32f8fde418075a5ddec556e095e5a4ba56d61df2dc6", size = 350224, upload-time = "2025-10-17T18:03:43.731Z" }, + { url = "https://files.pythonhosted.org/packages/93/14/f702c5e8c2d595776266934498505f11b7f1545baf21ffec1d32c258e9d3/pyodbc-5.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:729c535341bb09c476f219d6f7ab194bcb683c4a0a368010f1cb821a35136f05", size = 1301503, upload-time = "2025-10-17T18:03:45.013Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b2/ad92ebdd1b5c7fec36b065e586d1d34b57881e17ba5beec5c705f1031058/pyodbc-5.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c67e7f2ce649155ea89beb54d3b42d83770488f025cf3b6f39ca82e9c598a02e", size = 1361050, upload-time = "2025-10-17T18:03:46.298Z" }, + { url = "https://files.pythonhosted.org/packages/19/40/dc84e232da07056cb5aaaf5f759ba4c874bc12f37569f7f1670fc71e7ae1/pyodbc-5.3.0-cp314-cp314-win32.whl", hash = "sha256:a48d731432abaee5256ed6a19a3e1528b8881f9cb25cb9cf72d8318146ea991b", size = 65670, upload-time = "2025-10-17T18:03:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/b8/79/c48be07e8634f764662d7a279ac204f93d64172162dbf90f215e2398b0bd/pyodbc-5.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:58635a1cc859d5af3f878c85910e5d7228fe5c406d4571bffcdd281375a54b39", size = 72177, upload-time = "2025-10-17T18:03:57.296Z" }, + { url = "https://files.pythonhosted.org/packages/fc/79/e304574446b2263f428ce14df590ba52c2e0e0205e8d34b235b582b7d57e/pyodbc-5.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:754d052030d00c3ac38da09ceb9f3e240e8dd1c11da8906f482d5419c65b9ef5", size = 66668, upload-time = "2025-10-17T18:03:58.174Z" }, + { url = "https://files.pythonhosted.org/packages/43/17/f4eabf443b838a2728773554017d08eee3aca353102934a7e3ba96fb0e31/pyodbc-5.3.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f927b440c38ade1668f0da64047ffd20ec34e32d817f9a60d07553301324b364", size = 75780, upload-time = "2025-10-17T18:03:47.273Z" }, + { url = "https://files.pythonhosted.org/packages/59/ea/e79e168c3d38c27d59d5d96273fd9e3c3ba55937cc944c4e60618f51de90/pyodbc-5.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:25c4cfb2c08e77bc6e82f666d7acd52f0e52a0401b1876e60f03c73c3b8aedc0", size = 75503, upload-time = "2025-10-17T18:03:48.171Z" }, + { url = "https://files.pythonhosted.org/packages/90/81/d1d7c125ec4a20e83fdc28e119b8321192b2bd694f432cf63e1199b2b929/pyodbc-5.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc834567c2990584b9726cba365834d039380c9dbbcef3030ddeb00c6541b943", size = 398356, upload-time = "2025-10-17T18:03:49.131Z" }, + { url = "https://files.pythonhosted.org/packages/5e/fc/f6be4b3cc3910f8c2aba37aa41671121fd6f37b402ae0fefe53a70ac7cd5/pyodbc-5.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8339d3094858893c1a68ee1af93efc4dff18b8b65de54d99104b99af6306320d", size = 397291, upload-time = "2025-10-17T18:03:50.18Z" }, + { url = "https://files.pythonhosted.org/packages/03/2e/0610b1ed05a5625528d52f6cece9610e84617d35f475c89c2a52f66d13f7/pyodbc-5.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74528fe148980d0c735c0ebb4a4dc74643ac4574337c43c1006ac4d09593f92d", size = 1353900, upload-time = "2025-10-17T18:03:51.339Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f1/43497e1d37f9f71b43b2b3172e7b1bdf50851e278390c3fb6b46a3630c53/pyodbc-5.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d89a7f2e24227150c13be8164774b7e1f9678321a4248f1356a465b9cc17d31e", size = 1406062, upload-time = "2025-10-17T18:03:52.546Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/88a1277c2f7d9ab1cec0a71e074ba24fd4a1710a43974682546da90a1343/pyodbc-5.3.0-cp314-cp314t-win32.whl", hash = "sha256:af4d8c9842fc4a6360c31c35508d6594d5a3b39922f61b282c2b4c9d9da99514", size = 70132, upload-time = "2025-10-17T18:03:53.715Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c7/ee98c62050de4aa8bafb6eb1e11b95e0b0c898bd5930137c6dc776e06a9b/pyodbc-5.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bfeb3e34795d53b7d37e66dd54891d4f9c13a3889a8f5fe9640e56a82d770955", size = 79452, upload-time = "2025-10-17T18:03:54.664Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8f/d8889efd96bbe8e5d43ff9701f6b1565a8e09c3e1f58c388d550724f777b/pyodbc-5.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:13656184faa3f2d5c6f19b701b8f247342ed581484f58bf39af7315c054e69db", size = 70142, upload-time = "2025-10-17T18:03:55.551Z" }, +] + [[package]] name = "pyparsing" version = "3.2.5"