Skip to content

Commit 5ca1140

Browse files
authored
Merge pull request #26 from IEE-TUGraz/feature/TechnicalRepresentation
Refactor kmedoids aggregation into composable functions Add comprehensive SQLite database export features (objective decomposition, dual values, solver stats) Fix multiple bugs in data handling and method return values Enhance output (and security) by escaping user input in printer methods
2 parents 5df1978 + 4d772b2 commit 5ca1140

7 files changed

Lines changed: 649 additions & 170 deletions

File tree

CaseStudy.py

Lines changed: 181 additions & 62 deletions
Large diffs are not rendered by default.

ExcelReader.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -332,6 +332,11 @@ def get_Power_Network(excel_file_path: str, keep_excluded_entries: bool = False,
332332
"""
333333
dPower_Network = __read_non_pivoted_file(excel_file_path, "v0.1.2", ["i", "j", "c"], True, keep_excluded_entries, fail_on_wrong_version)
334334

335+
# Check that all values in column pEnableInvest are either 0 or 1
336+
if not dPower_Network['pEnableInvest'].isin([0, 1]).all():
337+
invalid_values = dPower_Network.loc[~dPower_Network['pEnableInvest'].isin([0, 1]), 'pEnableInvest']
338+
raise ValueError(f"dPower_Network: Found invalid values in 'pEnableInvest' column. Only 0 and 1 are allowed, but found: {invalid_values}")
339+
335340
return dPower_Network
336341

337342

ExcelWriter.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
1+
from __future__ import annotations
2+
13
import os
24
import time
35
import xml.etree.ElementTree as ET
46
from copy import copy, deepcopy
57
from pathlib import Path
8+
from typing import TYPE_CHECKING
69

710
import numpy as np
811
import openpyxl
@@ -12,7 +15,9 @@
1215

1316
import ExcelReader
1417
import TableDefinition
15-
from CaseStudy import CaseStudy
18+
19+
if TYPE_CHECKING:
20+
from CaseStudy import CaseStudy
1621
from TableDefinition import CellStyle, Alignment, Font, Color, Text, Column, NumberFormat, TableDefinition
1722
from printer import Printer
1823

@@ -261,6 +266,8 @@ def write_caseStudy(self, cs: CaseStudy, folder_path: str | Path) -> None:
261266
self.write_Power_VRES(cs.dPower_VRES, folder_path)
262267
if hasattr(cs, "dPower_VRESProfiles"):
263268
self.write_Power_VRESProfiles(cs.dPower_VRESProfiles, folder_path)
269+
if hasattr(cs, "dPower_ImportExport") and cs.dPower_ImportExport is not None:
270+
self.write_Power_ImportExport(cs.dPower_ImportExport, folder_path)
264271
self.write_Power_WeightsK(cs.dPower_WeightsK, folder_path)
265272
self.write_Power_WeightsRP(cs.dPower_WeightsRP, folder_path)
266273

SQLiteWriter.py

Lines changed: 239 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
def model_to_sqlite(model: pyo.base.Model, filename: str) -> None:
1414
"""
1515
Save the model to a SQLite database.
16+
Automatically includes objective decomposition and dual values.
17+
1618
:param model: Pyomo model to save
1719
:param filename: Path to the SQLite database file
1820
:return: None
@@ -40,10 +42,247 @@ def model_to_sqlite(model: pyo.base.Model, filename: str) -> None:
4042
df = pd.DataFrame([pyo.value(o)], columns=['values'])
4143
case pyomo.core.base.constraint.ConstraintList | pyomo.core.base.constraint.IndexedConstraint | pyomo.core.base.expression.IndexedExpression: # Those will not be saved on purpose
4244
continue
45+
case pyomo.core.base.suffix.Suffix:
46+
if str(o) in ["_relaxed_integer_vars", "dual"]:
47+
continue # Not saved on purpose
48+
else:
49+
printer.warning(f"Pyomo-Type {type(o)} not implemented, {o.name} will not be saved to SQLite")
50+
continue
4351
case _:
4452
printer.warning(f"Pyomo-Type {type(o)} not implemented, {o.name} will not be saved to SQLite")
4553
continue
4654
df.to_sql(o.name, cnx, if_exists='replace')
4755
cnx.commit()
4856
cnx.close()
57+
58+
# Automatically add objective decomposition and dual values
59+
add_objective_decomposition_to_sqlite(filename, model)
60+
add_dual_values_to_sqlite(filename, model)
4961
pass
62+
63+
64+
def add_solver_statistics_to_sqlite(filename: str, results, work_units=None) -> None:
65+
"""
66+
Add solver statistics (like Gurobi work-units) to an existing SQLite database.
67+
:param filename: Path to the SQLite database file
68+
:param results: Pyomo solver results object
69+
:param work_units: Optional work units value (from Gurobi solver)
70+
:return: None
71+
"""
72+
cnx = sqlite3.connect(filename)
73+
74+
# Extract solver statistics
75+
stats = {}
76+
77+
try:
78+
# Add work units if provided
79+
if work_units is not None:
80+
stats['work_units'] = float(work_units)
81+
82+
# Get basic solver info from solver[0]
83+
if hasattr(results, 'solver') and len(results.solver) > 0:
84+
solver_info = results.solver[0]
85+
86+
# Status and termination
87+
if hasattr(solver_info, 'status'):
88+
stats['solver_status'] = str(solver_info.status)
89+
if hasattr(solver_info, 'termination_condition'):
90+
stats['termination_condition'] = str(solver_info.termination_condition)
91+
if hasattr(solver_info, 'time'):
92+
try:
93+
time_val = solver_info.time
94+
if time_val is not None and str(type(time_val)) != "<class 'pyomo.opt.results.container.UndefinedData'>":
95+
stats['solver_time'] = float(time_val)
96+
except Exception:
97+
pass
98+
99+
# Get problem statistics
100+
if hasattr(results, 'problem'):
101+
problem = results.problem
102+
for attr in ['lower_bound', 'upper_bound', 'number_of_constraints',
103+
'number_of_variables', 'number_of_nonzeros']:
104+
if hasattr(problem, attr):
105+
value = getattr(problem, attr)
106+
if value is not None:
107+
stats[attr] = float(value) if isinstance(value, (int, float)) else str(value)
108+
109+
# Create a DataFrame with solver statistics
110+
if stats:
111+
df = pd.DataFrame([stats])
112+
df.to_sql('solver_statistics', cnx, if_exists='replace', index=False)
113+
cnx.commit()
114+
work_units_str = f"{stats['work_units']:.2f}" if 'work_units' in stats else 'N/A'
115+
printer.information(f"Added solver statistics to SQLite database (work_units: {work_units_str})")
116+
else:
117+
printer.warning("No solver statistics found in results object")
118+
119+
except Exception as e:
120+
printer.error(f"Failed to add solver statistics: {e}")
121+
import traceback
122+
traceback.print_exc()
123+
finally:
124+
cnx.close()
125+
126+
127+
def add_run_parameters_to_sqlite(filename: str, **parameters) -> None:
128+
"""
129+
Add run parameters to an existing SQLite database.
130+
Creates a table 'run_parameters' with parameter names and values.
131+
132+
:param filename: Path to the SQLite database file
133+
:param parameters: Keyword arguments containing parameter names and values
134+
:return: None
135+
136+
Example:
137+
add_run_parameters_to_sqlite('model.sqlite',
138+
zoi='R1',
139+
dc_buffer=2,
140+
tp_buffer=1,
141+
scale_demand=1.3,
142+
scale_pmax=1.0)
143+
"""
144+
cnx = sqlite3.connect(filename)
145+
146+
try:
147+
# Convert parameters to DataFrame
148+
params = {}
149+
for key, value in parameters.items():
150+
# Convert None to string 'None' for storage
151+
if value is None:
152+
params[key] = 'None'
153+
elif isinstance(value, (int, float)):
154+
params[key] = float(value)
155+
else:
156+
params[key] = str(value)
157+
158+
if params:
159+
df = pd.DataFrame([params])
160+
df.to_sql('run_parameters', cnx, if_exists='replace', index=False)
161+
cnx.commit()
162+
printer.information(f"Added run parameters to SQLite database: {', '.join([f'{k}={v}' for k, v in params.items()])}")
163+
else:
164+
printer.warning("No run parameters provided")
165+
166+
except Exception as e:
167+
printer.error(f"Failed to add run parameters: {e}")
168+
import traceback
169+
traceback.print_exc()
170+
finally:
171+
cnx.close()
172+
173+
174+
def add_objective_decomposition_to_sqlite(filename: str, model: pyo.ConcreteModel) -> None:
175+
"""
176+
Add objective function decomposition to SQLite database.
177+
This enables recalculation of ZOI objectives without the full model.
178+
179+
The objective is decomposed into:
180+
- objective_constant: Single row with the constant term
181+
- objective_terms: Variable names, indices, and their coefficients
182+
183+
:param filename: Path to the SQLite database file
184+
:param model: Pyomo model with objective
185+
:return: None
186+
"""
187+
from pyomo.repn import generate_standard_repn
188+
189+
cnx = sqlite3.connect(filename)
190+
191+
try:
192+
# Decompose objective into linear representation
193+
repn = generate_standard_repn(model.objective.expr, quadratic=False)
194+
195+
# Store objective decomposition as separate tables
196+
# 1. Constant term
197+
df_constant = pd.DataFrame([{'constant': repn.constant if repn.constant else 0.0}])
198+
df_constant.to_sql('objective_constant', cnx, if_exists='replace', index=False)
199+
200+
# 2. Variable names, indices, and coefficients
201+
var_names = [var.parent_component().name for var in repn.linear_vars]
202+
var_indices = [str(var.index()) for var in repn.linear_vars]
203+
coefs = list(repn.linear_coefs)
204+
df_terms = pd.DataFrame({
205+
'var_name': var_names,
206+
'var_index': var_indices,
207+
'coefficient': coefs
208+
})
209+
df_terms.to_sql('objective_terms', cnx, if_exists='replace', index=False)
210+
211+
cnx.commit()
212+
printer.information(f"Added objective decomposition to SQLite ({len(var_indices)} terms)")
213+
214+
except Exception as e:
215+
printer.error(f"Failed to add objective decomposition: {e}")
216+
import traceback
217+
traceback.print_exc()
218+
finally:
219+
cnx.close()
220+
221+
222+
def add_dual_values_to_sqlite(filename: str, model: pyo.ConcreteModel) -> None:
223+
"""
224+
Add dual values (shadow prices) from model constraints to SQLite database.
225+
226+
Dual values are stored in tables named 'dual_<constraint_name>' with:
227+
- Index columns for the constraint
228+
- 'dual_value' column containing the dual/shadow price
229+
230+
:param filename: Path to the SQLite database file
231+
:param model: Solved Pyomo model with dual suffix
232+
:return: None
233+
"""
234+
cnx = sqlite3.connect(filename)
235+
236+
try:
237+
if not hasattr(model, 'dual'):
238+
printer.warning("Model does not have dual suffix - no dual values to save")
239+
return
240+
241+
total_duals = 0
242+
total_constraints = 0
243+
244+
# Iterate through all constraints and save their dual values
245+
for constraint in model.component_objects(pyo.Constraint, active=True):
246+
constraint_name = constraint.name
247+
dual_data = []
248+
249+
# Get dual values for this constraint
250+
for index in constraint:
251+
try:
252+
dual_value = model.dual[constraint[index]]
253+
if dual_value is not None:
254+
# Store index and dual value
255+
if isinstance(index, tuple):
256+
# Multi-indexed constraint
257+
row_data = {str(i): val for i, val in enumerate(index)}
258+
row_data['dual_value'] = float(dual_value)
259+
else:
260+
# Single-indexed or scalar constraint
261+
row_data = {'0': index, 'dual_value': float(dual_value)}
262+
dual_data.append(row_data)
263+
total_duals += 1
264+
except (KeyError, AttributeError):
265+
# Dual value not available for this constraint
266+
pass
267+
268+
total_constraints += 1
269+
270+
# Save to database if we have dual values for this constraint
271+
if dual_data:
272+
df = pd.DataFrame(dual_data)
273+
table_name = f'dual_{constraint_name}'
274+
df.to_sql(table_name, cnx, if_exists='replace', index=False)
275+
276+
cnx.commit()
277+
278+
if total_duals > 0:
279+
printer.information(f"Added dual values to SQLite ({total_duals} duals from {total_constraints} constraints)")
280+
else:
281+
printer.warning(f"No dual values found in model (checked {total_constraints} constraints)")
282+
283+
except Exception as e:
284+
printer.error(f"Failed to add dual values: {e}")
285+
import traceback
286+
traceback.print_exc()
287+
finally:
288+
cnx.close()

0 commit comments

Comments
 (0)