Skip to content

Commit d688bb6

Browse files
committed
Merge remote-tracking branch 'origin/main' into feature/movingWindow
2 parents 9a8ab5f + 91e8229 commit d688bb6

7 files changed

Lines changed: 863 additions & 176 deletions

File tree

CaseStudy.py

Lines changed: 184 additions & 61 deletions
Large diffs are not rendered by default.

ExcelReader.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@ def check_LEGOExcel_version(xls: pd.ExcelFile, sheet_name: str, version_specifie
3636
f"Excel file '{excel_file_path}' (sheet '{sheet_name}') does not have the correct version specifier. "
3737
f"Expected '{version_specifier}' but got '{version_cell}'.")
3838
printer.error(f"Trying to work with it any way, but this can have unintended consequences!")
39-
pass
4039

4140

4241
def __read_non_pivoted_file(excel_file_path: str, version_specifier: str, indices: list[str], has_excl_column: bool,
@@ -348,6 +347,11 @@ def get_Power_Network(excel_file_path: str, keep_excluded_entries: bool = False,
348347
"""
349348
dPower_Network = __read_non_pivoted_file(excel_file_path, "v0.1.2", ["i", "j", "c"], True, keep_excluded_entries, fail_on_wrong_version)
350349

350+
# Check that all values in column pEnableInvest are either 0 or 1
351+
if not dPower_Network['pEnableInvest'].isin([0, 1]).all():
352+
invalid_values = dPower_Network.loc[~dPower_Network['pEnableInvest'].isin([0, 1]), 'pEnableInvest']
353+
raise ValueError(f"dPower_Network: Found invalid values in 'pEnableInvest' column. Only 0 and 1 are allowed, but found: {invalid_values}")
354+
351355
return dPower_Network
352356

353357

@@ -519,8 +523,10 @@ def compare_Excels(source_path: str, target_path: str, dont_check_formatting: bo
519523
for k, v in source_cell.font.__dict__.items():
520524
if k == "color" and v is not None:
521525
for k2, v2 in v.__dict__.items():
522-
if v2 != getattr(target_cell.font.color, k2):
523-
printer.error(f"Mismatch in font color at {sheet}/{source_cell.coordinate}: {v2} != {getattr(target_cell.font.color, k2)}")
526+
if ((v2 is None and target_cell.font.color is not None) or
527+
(v2 is not None and target_cell.font.color is None) or
528+
(v2 != getattr(target_cell.font.color, k2))):
529+
printer.error(f"Mismatch in font color at {sheet}/{source_cell.coordinate}: {v2} != {getattr(target_cell.font.color, k2) if target_cell.font.color is not None else None}")
524530
equal = False
525531
elif getattr(target_cell.font, k) != v:
526532
printer.error(f"Mismatch in font property '{k}' at {sheet}/{source_cell.coordinate}: {getattr(target_cell.font, k)} != {v}")
@@ -530,8 +536,10 @@ def compare_Excels(source_path: str, target_path: str, dont_check_formatting: bo
530536
for k, v in source_cell.fill.__dict__.items():
531537
if k == "color" and v is not None:
532538
for k2, v2 in v.__dict__.items():
533-
if v2 != getattr(target_cell.fill.color, k2):
534-
printer.error(f"Mismatch in fill color at {sheet}/{source_cell.coordinate}: {v2} != {getattr(target_cell.fill.color, k2)}")
539+
if ((v2 is None and target_cell.fill.color is not None) or
540+
(v2 is not None and target_cell.fill.color is None) or
541+
(v2 != getattr(target_cell.fill.color, k2))):
542+
printer.error(f"Mismatch in fill color at {sheet}/{source_cell.coordinate}: {v2} != {getattr(target_cell.fill.color, k2) if target_cell.fill.color is not None else None}")
535543
equal = False
536544
elif getattr(target_cell.fill, k) != v:
537545
printer.error(f"Mismatch in fill property '{k}' at {sheet}/{source_cell.coordinate}: {getattr(target_cell.fill, k)} != {v}")

ExcelWriter.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
1+
from __future__ import annotations
2+
13
import os
24
import time
35
import xml.etree.ElementTree as ET
46
from copy import copy, deepcopy
57
from pathlib import Path
8+
from typing import TYPE_CHECKING
69

710
import numpy as np
811
import openpyxl
@@ -12,7 +15,9 @@
1215

1316
import ExcelReader
1417
import TableDefinition
15-
from CaseStudy import CaseStudy
18+
19+
if TYPE_CHECKING:
20+
from CaseStudy import CaseStudy
1621
from TableDefinition import CellStyle, Alignment, Font, Color, Text, Column, NumberFormat, TableDefinition
1722
from printer import Printer
1823

@@ -262,6 +267,8 @@ def write_caseStudy(self, cs: CaseStudy, folder_path: str | Path) -> None:
262267
self.write_Power_VRES(cs.dPower_VRES, folder_path)
263268
if hasattr(cs, "dPower_VRESProfiles"):
264269
self.write_Power_VRESProfiles(cs.dPower_VRESProfiles, folder_path)
270+
if hasattr(cs, "dPower_ImportExport") and cs.dPower_ImportExport is not None:
271+
self.write_Power_ImportExport(cs.dPower_ImportExport, folder_path)
265272
self.write_Power_WeightsK(cs.dPower_WeightsK, folder_path)
266273
self.write_Power_WeightsRP(cs.dPower_WeightsRP, folder_path)
267274

@@ -597,3 +604,6 @@ def model_to_excel(model: pyomo.core.Model, target_path: str) -> None:
597604
for excel_definition_id, file_path in files_different:
598605
printer.error(f"'{excel_definition_id}' (file: '{file_path}')")
599606
exit(1)
607+
else:
608+
printer.success("All Excel files were equal")
609+
exit(0)

SQLiteWriter.py

Lines changed: 239 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -289,10 +289,247 @@ def model_to_sqlite(model: pyo.base.Model, filename: str, use_moving_window: boo
289289
df = pd.DataFrame([pyo.value(o)], columns=['values'])
290290
case pyomo.core.base.constraint.ConstraintList | pyomo.core.base.constraint.IndexedConstraint | pyomo.core.base.expression.IndexedExpression: # Those will not be saved on purpose
291291
continue
292+
case pyomo.core.base.suffix.Suffix:
293+
if str(o) in ["_relaxed_integer_vars", "dual"]:
294+
continue # Not saved on purpose
295+
else:
296+
printer.warning(f"Pyomo-Type {type(o)} not implemented, {o.name} will not be saved to SQLite")
297+
continue
292298
case _:
293299
printer.warning(f"Pyomo-Type {type(o)} not implemented, {o.name} will not be saved to SQLite")
294300
continue
295-
df.to_sql(o.name, cnx, if_exists='replace')
301+
df.to_sql(o.name, cnx, if_exists='replace')
302+
cnx.commit()
303+
cnx.close()
304+
305+
# Automatically add objective decomposition and dual values
306+
add_objective_decomposition_to_sqlite(filename, model)
307+
add_dual_values_to_sqlite(filename, model)
308+
pass
309+
310+
311+
def add_solver_statistics_to_sqlite(filename: str, results, work_units=None) -> None:
312+
"""
313+
Add solver statistics (like Gurobi work-units) to an existing SQLite database.
314+
:param filename: Path to the SQLite database file
315+
:param results: Pyomo solver results object
316+
:param work_units: Optional work units value (from Gurobi solver)
317+
:return: None
318+
"""
319+
cnx = sqlite3.connect(filename)
320+
321+
# Extract solver statistics
322+
stats = {}
323+
324+
try:
325+
# Add work units if provided
326+
if work_units is not None:
327+
stats['work_units'] = float(work_units)
328+
329+
# Get basic solver info from solver[0]
330+
if hasattr(results, 'solver') and len(results.solver) > 0:
331+
solver_info = results.solver[0]
332+
333+
# Status and termination
334+
if hasattr(solver_info, 'status'):
335+
stats['solver_status'] = str(solver_info.status)
336+
if hasattr(solver_info, 'termination_condition'):
337+
stats['termination_condition'] = str(solver_info.termination_condition)
338+
if hasattr(solver_info, 'time'):
339+
try:
340+
time_val = solver_info.time
341+
if time_val is not None and str(type(time_val)) != "<class 'pyomo.opt.results.container.UndefinedData'>":
342+
stats['solver_time'] = float(time_val)
343+
except Exception:
344+
pass
345+
346+
# Get problem statistics
347+
if hasattr(results, 'problem'):
348+
problem = results.problem
349+
for attr in ['lower_bound', 'upper_bound', 'number_of_constraints',
350+
'number_of_variables', 'number_of_nonzeros']:
351+
if hasattr(problem, attr):
352+
value = getattr(problem, attr)
353+
if value is not None:
354+
stats[attr] = float(value) if isinstance(value, (int, float)) else str(value)
355+
356+
# Create a DataFrame with solver statistics
357+
if stats:
358+
df = pd.DataFrame([stats])
359+
df.to_sql('solver_statistics', cnx, if_exists='replace', index=False)
296360
cnx.commit()
361+
work_units_str = f"{stats['work_units']:.2f}" if 'work_units' in stats else 'N/A'
362+
printer.information(f"Added solver statistics to SQLite database (work_units: {work_units_str})")
363+
else:
364+
printer.warning("No solver statistics found in results object")
365+
366+
except Exception as e:
367+
printer.error(f"Failed to add solver statistics: {e}")
368+
import traceback
369+
traceback.print_exc()
370+
finally:
371+
cnx.close()
372+
373+
374+
def add_run_parameters_to_sqlite(filename: str, **parameters) -> None:
375+
"""
376+
Add run parameters to an existing SQLite database.
377+
Creates a table 'run_parameters' with parameter names and values.
378+
379+
:param filename: Path to the SQLite database file
380+
:param parameters: Keyword arguments containing parameter names and values
381+
:return: None
382+
383+
Example:
384+
add_run_parameters_to_sqlite('model.sqlite',
385+
zoi='R1',
386+
dc_buffer=2,
387+
tp_buffer=1,
388+
scale_demand=1.3,
389+
scale_pmax=1.0)
390+
"""
391+
cnx = sqlite3.connect(filename)
392+
393+
try:
394+
# Convert parameters to DataFrame
395+
params = {}
396+
for key, value in parameters.items():
397+
# Convert None to string 'None' for storage
398+
if value is None:
399+
params[key] = 'None'
400+
elif isinstance(value, (int, float)):
401+
params[key] = float(value)
402+
else:
403+
params[key] = str(value)
404+
405+
if params:
406+
df = pd.DataFrame([params])
407+
df.to_sql('run_parameters', cnx, if_exists='replace', index=False)
408+
cnx.commit()
409+
printer.information(f"Added run parameters to SQLite database: {', '.join([f'{k}={v}' for k, v in params.items()])}")
410+
else:
411+
printer.warning("No run parameters provided")
412+
413+
except Exception as e:
414+
printer.error(f"Failed to add run parameters: {e}")
415+
import traceback
416+
traceback.print_exc()
417+
finally:
418+
cnx.close()
419+
420+
421+
def add_objective_decomposition_to_sqlite(filename: str, model: pyo.ConcreteModel) -> None:
422+
"""
423+
Add objective function decomposition to SQLite database.
424+
This enables recalculation of ZOI objectives without the full model.
425+
426+
The objective is decomposed into:
427+
- objective_constant: Single row with the constant term
428+
- objective_terms: Variable names, indices, and their coefficients
429+
430+
:param filename: Path to the SQLite database file
431+
:param model: Pyomo model with objective
432+
:return: None
433+
"""
434+
from pyomo.repn import generate_standard_repn
435+
436+
cnx = sqlite3.connect(filename)
437+
438+
try:
439+
# Decompose objective into linear representation
440+
repn = generate_standard_repn(model.objective.expr, quadratic=False)
441+
442+
# Store objective decomposition as separate tables
443+
# 1. Constant term
444+
df_constant = pd.DataFrame([{'constant': repn.constant if repn.constant else 0.0}])
445+
df_constant.to_sql('objective_constant', cnx, if_exists='replace', index=False)
446+
447+
# 2. Variable names, indices, and coefficients
448+
var_names = [var.parent_component().name for var in repn.linear_vars]
449+
var_indices = [str(var.index()) for var in repn.linear_vars]
450+
coefs = list(repn.linear_coefs)
451+
df_terms = pd.DataFrame({
452+
'var_name': var_names,
453+
'var_index': var_indices,
454+
'coefficient': coefs
455+
})
456+
df_terms.to_sql('objective_terms', cnx, if_exists='replace', index=False)
457+
458+
cnx.commit()
459+
printer.information(f"Added objective decomposition to SQLite ({len(var_indices)} terms)")
460+
461+
except Exception as e:
462+
printer.error(f"Failed to add objective decomposition: {e}")
463+
import traceback
464+
traceback.print_exc()
465+
finally:
466+
cnx.close()
467+
468+
469+
def add_dual_values_to_sqlite(filename: str, model: pyo.ConcreteModel) -> None:
470+
"""
471+
Add dual values (shadow prices) from model constraints to SQLite database.
472+
473+
Dual values are stored in tables named 'dual_<constraint_name>' with:
474+
- Index columns for the constraint
475+
- 'dual_value' column containing the dual/shadow price
476+
477+
:param filename: Path to the SQLite database file
478+
:param model: Solved Pyomo model with dual suffix
479+
:return: None
480+
"""
481+
cnx = sqlite3.connect(filename)
482+
483+
try:
484+
if not hasattr(model, 'dual'):
485+
printer.warning("Model does not have dual suffix - no dual values to save")
486+
return
487+
488+
total_duals = 0
489+
total_constraints = 0
490+
491+
# Iterate through all constraints and save their dual values
492+
for constraint in model.component_objects(pyo.Constraint, active=True):
493+
constraint_name = constraint.name
494+
dual_data = []
495+
496+
# Get dual values for this constraint
497+
for index in constraint:
498+
try:
499+
dual_value = model.dual[constraint[index]]
500+
if dual_value is not None:
501+
# Store index and dual value
502+
if isinstance(index, tuple):
503+
# Multi-indexed constraint
504+
row_data = {str(i): val for i, val in enumerate(index)}
505+
row_data['dual_value'] = float(dual_value)
506+
else:
507+
# Single-indexed or scalar constraint
508+
row_data = {'0': index, 'dual_value': float(dual_value)}
509+
dual_data.append(row_data)
510+
total_duals += 1
511+
except (KeyError, AttributeError):
512+
# Dual value not available for this constraint
513+
pass
514+
515+
total_constraints += 1
516+
517+
# Save to database if we have dual values for this constraint
518+
if dual_data:
519+
df = pd.DataFrame(dual_data)
520+
table_name = f'dual_{constraint_name}'
521+
df.to_sql(table_name, cnx, if_exists='replace', index=False)
522+
523+
cnx.commit()
524+
525+
if total_duals > 0:
526+
printer.information(f"Added dual values to SQLite ({total_duals} duals from {total_constraints} constraints)")
527+
else:
528+
printer.warning(f"No dual values found in model (checked {total_constraints} constraints)")
529+
530+
except Exception as e:
531+
printer.error(f"Failed to add dual values: {e}")
532+
import traceback
533+
traceback.print_exc()
534+
finally:
297535
cnx.close()
298-
pass

0 commit comments

Comments
 (0)