|
13 | 13 | def model_to_sqlite(model: pyo.base.Model, filename: str) -> None: |
14 | 14 | """ |
15 | 15 | Save the model to a SQLite database. |
| 16 | + Automatically includes objective decomposition and dual values. |
| 17 | +
|
16 | 18 | :param model: Pyomo model to save |
17 | 19 | :param filename: Path to the SQLite database file |
18 | 20 | :return: None |
@@ -40,10 +42,247 @@ def model_to_sqlite(model: pyo.base.Model, filename: str) -> None: |
40 | 42 | df = pd.DataFrame([pyo.value(o)], columns=['values']) |
41 | 43 | case pyomo.core.base.constraint.ConstraintList | pyomo.core.base.constraint.IndexedConstraint | pyomo.core.base.expression.IndexedExpression: # Those will not be saved on purpose |
42 | 44 | continue |
| 45 | + case pyomo.core.base.suffix.Suffix: |
| 46 | + if str(o) in ["_relaxed_integer_vars", "dual"]: |
| 47 | + continue # Not saved on purpose |
| 48 | + else: |
| 49 | + printer.warning(f"Pyomo-Type {type(o)} not implemented, {o.name} will not be saved to SQLite") |
| 50 | + continue |
43 | 51 | case _: |
44 | 52 | printer.warning(f"Pyomo-Type {type(o)} not implemented, {o.name} will not be saved to SQLite") |
45 | 53 | continue |
46 | 54 | df.to_sql(o.name, cnx, if_exists='replace') |
47 | 55 | cnx.commit() |
48 | 56 | cnx.close() |
| 57 | + |
| 58 | + # Automatically add objective decomposition and dual values |
| 59 | + add_objective_decomposition_to_sqlite(filename, model) |
| 60 | + add_dual_values_to_sqlite(filename, model) |
49 | 61 | pass |
| 62 | + |
| 63 | + |
| 64 | +def add_solver_statistics_to_sqlite(filename: str, results, work_units=None) -> None: |
| 65 | + """ |
| 66 | + Add solver statistics (like Gurobi work-units) to an existing SQLite database. |
| 67 | + :param filename: Path to the SQLite database file |
| 68 | + :param results: Pyomo solver results object |
| 69 | + :param work_units: Optional work units value (from Gurobi solver) |
| 70 | + :return: None |
| 71 | + """ |
| 72 | + cnx = sqlite3.connect(filename) |
| 73 | + |
| 74 | + # Extract solver statistics |
| 75 | + stats = {} |
| 76 | + |
| 77 | + try: |
| 78 | + # Add work units if provided |
| 79 | + if work_units is not None: |
| 80 | + stats['work_units'] = float(work_units) |
| 81 | + |
| 82 | + # Get basic solver info from solver[0] |
| 83 | + if hasattr(results, 'solver') and len(results.solver) > 0: |
| 84 | + solver_info = results.solver[0] |
| 85 | + |
| 86 | + # Status and termination |
| 87 | + if hasattr(solver_info, 'status'): |
| 88 | + stats['solver_status'] = str(solver_info.status) |
| 89 | + if hasattr(solver_info, 'termination_condition'): |
| 90 | + stats['termination_condition'] = str(solver_info.termination_condition) |
| 91 | + if hasattr(solver_info, 'time'): |
| 92 | + try: |
| 93 | + time_val = solver_info.time |
| 94 | + if time_val is not None and str(type(time_val)) != "<class 'pyomo.opt.results.container.UndefinedData'>": |
| 95 | + stats['solver_time'] = float(time_val) |
| 96 | + except Exception: |
| 97 | + pass |
| 98 | + |
| 99 | + # Get problem statistics |
| 100 | + if hasattr(results, 'problem'): |
| 101 | + problem = results.problem |
| 102 | + for attr in ['lower_bound', 'upper_bound', 'number_of_constraints', |
| 103 | + 'number_of_variables', 'number_of_nonzeros']: |
| 104 | + if hasattr(problem, attr): |
| 105 | + value = getattr(problem, attr) |
| 106 | + if value is not None: |
| 107 | + stats[attr] = float(value) if isinstance(value, (int, float)) else str(value) |
| 108 | + |
| 109 | + # Create a DataFrame with solver statistics |
| 110 | + if stats: |
| 111 | + df = pd.DataFrame([stats]) |
| 112 | + df.to_sql('solver_statistics', cnx, if_exists='replace', index=False) |
| 113 | + cnx.commit() |
| 114 | + work_units_str = f"{stats['work_units']:.2f}" if 'work_units' in stats else 'N/A' |
| 115 | + printer.information(f"Added solver statistics to SQLite database (work_units: {work_units_str})") |
| 116 | + else: |
| 117 | + printer.warning("No solver statistics found in results object") |
| 118 | + |
| 119 | + except Exception as e: |
| 120 | + printer.error(f"Failed to add solver statistics: {e}") |
| 121 | + import traceback |
| 122 | + traceback.print_exc() |
| 123 | + finally: |
| 124 | + cnx.close() |
| 125 | + |
| 126 | + |
| 127 | +def add_run_parameters_to_sqlite(filename: str, **parameters) -> None: |
| 128 | + """ |
| 129 | + Add run parameters to an existing SQLite database. |
| 130 | + Creates a table 'run_parameters' with parameter names and values. |
| 131 | +
|
| 132 | + :param filename: Path to the SQLite database file |
| 133 | + :param parameters: Keyword arguments containing parameter names and values |
| 134 | + :return: None |
| 135 | +
|
| 136 | + Example: |
| 137 | + add_run_parameters_to_sqlite('model.sqlite', |
| 138 | + zoi='R1', |
| 139 | + dc_buffer=2, |
| 140 | + tp_buffer=1, |
| 141 | + scale_demand=1.3, |
| 142 | + scale_pmax=1.0) |
| 143 | + """ |
| 144 | + cnx = sqlite3.connect(filename) |
| 145 | + |
| 146 | + try: |
| 147 | + # Convert parameters to DataFrame |
| 148 | + params = {} |
| 149 | + for key, value in parameters.items(): |
| 150 | + # Convert None to string 'None' for storage |
| 151 | + if value is None: |
| 152 | + params[key] = 'None' |
| 153 | + elif isinstance(value, (int, float)): |
| 154 | + params[key] = float(value) |
| 155 | + else: |
| 156 | + params[key] = str(value) |
| 157 | + |
| 158 | + if params: |
| 159 | + df = pd.DataFrame([params]) |
| 160 | + df.to_sql('run_parameters', cnx, if_exists='replace', index=False) |
| 161 | + cnx.commit() |
| 162 | + printer.information(f"Added run parameters to SQLite database: {', '.join([f'{k}={v}' for k, v in params.items()])}") |
| 163 | + else: |
| 164 | + printer.warning("No run parameters provided") |
| 165 | + |
| 166 | + except Exception as e: |
| 167 | + printer.error(f"Failed to add run parameters: {e}") |
| 168 | + import traceback |
| 169 | + traceback.print_exc() |
| 170 | + finally: |
| 171 | + cnx.close() |
| 172 | + |
| 173 | + |
| 174 | +def add_objective_decomposition_to_sqlite(filename: str, model: pyo.ConcreteModel) -> None: |
| 175 | + """ |
| 176 | + Add objective function decomposition to SQLite database. |
| 177 | + This enables recalculation of ZOI objectives without the full model. |
| 178 | +
|
| 179 | + The objective is decomposed into: |
| 180 | + - objective_constant: Single row with the constant term |
| 181 | + - objective_terms: Variable names, indices, and their coefficients |
| 182 | +
|
| 183 | + :param filename: Path to the SQLite database file |
| 184 | + :param model: Pyomo model with objective |
| 185 | + :return: None |
| 186 | + """ |
| 187 | + from pyomo.repn import generate_standard_repn |
| 188 | + |
| 189 | + cnx = sqlite3.connect(filename) |
| 190 | + |
| 191 | + try: |
| 192 | + # Decompose objective into linear representation |
| 193 | + repn = generate_standard_repn(model.objective.expr, quadratic=False) |
| 194 | + |
| 195 | + # Store objective decomposition as separate tables |
| 196 | + # 1. Constant term |
| 197 | + df_constant = pd.DataFrame([{'constant': repn.constant if repn.constant else 0.0}]) |
| 198 | + df_constant.to_sql('objective_constant', cnx, if_exists='replace', index=False) |
| 199 | + |
| 200 | + # 2. Variable names, indices, and coefficients |
| 201 | + var_names = [var.parent_component().name for var in repn.linear_vars] |
| 202 | + var_indices = [str(var.index()) for var in repn.linear_vars] |
| 203 | + coefs = list(repn.linear_coefs) |
| 204 | + df_terms = pd.DataFrame({ |
| 205 | + 'var_name': var_names, |
| 206 | + 'var_index': var_indices, |
| 207 | + 'coefficient': coefs |
| 208 | + }) |
| 209 | + df_terms.to_sql('objective_terms', cnx, if_exists='replace', index=False) |
| 210 | + |
| 211 | + cnx.commit() |
| 212 | + printer.information(f"Added objective decomposition to SQLite ({len(var_indices)} terms)") |
| 213 | + |
| 214 | + except Exception as e: |
| 215 | + printer.error(f"Failed to add objective decomposition: {e}") |
| 216 | + import traceback |
| 217 | + traceback.print_exc() |
| 218 | + finally: |
| 219 | + cnx.close() |
| 220 | + |
| 221 | + |
| 222 | +def add_dual_values_to_sqlite(filename: str, model: pyo.ConcreteModel) -> None: |
| 223 | + """ |
| 224 | + Add dual values (shadow prices) from model constraints to SQLite database. |
| 225 | +
|
| 226 | + Dual values are stored in tables named 'dual_<constraint_name>' with: |
| 227 | + - Index columns for the constraint |
| 228 | + - 'dual_value' column containing the dual/shadow price |
| 229 | +
|
| 230 | + :param filename: Path to the SQLite database file |
| 231 | + :param model: Solved Pyomo model with dual suffix |
| 232 | + :return: None |
| 233 | + """ |
| 234 | + cnx = sqlite3.connect(filename) |
| 235 | + |
| 236 | + try: |
| 237 | + if not hasattr(model, 'dual'): |
| 238 | + printer.warning("Model does not have dual suffix - no dual values to save") |
| 239 | + return |
| 240 | + |
| 241 | + total_duals = 0 |
| 242 | + total_constraints = 0 |
| 243 | + |
| 244 | + # Iterate through all constraints and save their dual values |
| 245 | + for constraint in model.component_objects(pyo.Constraint, active=True): |
| 246 | + constraint_name = constraint.name |
| 247 | + dual_data = [] |
| 248 | + |
| 249 | + # Get dual values for this constraint |
| 250 | + for index in constraint: |
| 251 | + try: |
| 252 | + dual_value = model.dual[constraint[index]] |
| 253 | + if dual_value is not None: |
| 254 | + # Store index and dual value |
| 255 | + if isinstance(index, tuple): |
| 256 | + # Multi-indexed constraint |
| 257 | + row_data = {str(i): val for i, val in enumerate(index)} |
| 258 | + row_data['dual_value'] = float(dual_value) |
| 259 | + else: |
| 260 | + # Single-indexed or scalar constraint |
| 261 | + row_data = {'0': index, 'dual_value': float(dual_value)} |
| 262 | + dual_data.append(row_data) |
| 263 | + total_duals += 1 |
| 264 | + except (KeyError, AttributeError): |
| 265 | + # Dual value not available for this constraint |
| 266 | + pass |
| 267 | + |
| 268 | + total_constraints += 1 |
| 269 | + |
| 270 | + # Save to database if we have dual values for this constraint |
| 271 | + if dual_data: |
| 272 | + df = pd.DataFrame(dual_data) |
| 273 | + table_name = f'dual_{constraint_name}' |
| 274 | + df.to_sql(table_name, cnx, if_exists='replace', index=False) |
| 275 | + |
| 276 | + cnx.commit() |
| 277 | + |
| 278 | + if total_duals > 0: |
| 279 | + printer.information(f"Added dual values to SQLite ({total_duals} duals from {total_constraints} constraints)") |
| 280 | + else: |
| 281 | + printer.warning(f"No dual values found in model (checked {total_constraints} constraints)") |
| 282 | + |
| 283 | + except Exception as e: |
| 284 | + printer.error(f"Failed to add dual values: {e}") |
| 285 | + import traceback |
| 286 | + traceback.print_exc() |
| 287 | + finally: |
| 288 | + cnx.close() |
0 commit comments