Skip to content

Commit 77808df

Browse files
committed
Changes in response to review
1 parent f0feabc commit 77808df

File tree

6 files changed

+62
-91
lines changed

6 files changed

+62
-91
lines changed

docs/advanced/mode.md

+4-7
Original file line numberDiff line numberDiff line change
@@ -74,13 +74,10 @@ As an example, if you wanted to generate 10 SPORES, all of which are within 10%
7474

7575
```yaml
7676
config.build.mode: spores
77-
config.solve:
78-
spores:
79-
# The number of SPORES to generate:
80-
number: 10
81-
parameters:
82-
# The fraction above the cost-optimal cost to set the maximum cost during SPORES:
83-
spores_slack: 0.1
77+
# The number of SPORES to generate:
78+
config.solve.spores.number: 10:
79+
# The fraction above the cost-optimal cost to set the maximum cost during SPORES:
80+
parameters.spores_slack: 0.1
8481
```
8582

8683
You will now also need a `spores_score` cost class in your model.

src/calliope/backend/backend_model.py

+2-17
Original file line numberDiff line numberDiff line change
@@ -937,33 +937,18 @@ def has_integer_or_binary_variables(self) -> bool:
937937
"""
938938

939939
@abstractmethod
940-
def _solve(
941-
self,
942-
solver: str,
943-
solver_io: str | None = None,
944-
solver_options: dict | None = None,
945-
save_logs: str | None = None,
946-
warmstart: bool = False,
947-
**solve_config,
948-
) -> xr.Dataset:
940+
def _solve(self, solve_config: config.Solve, warmstart: bool = False) -> xr.Dataset:
949941
"""Optimise built model.
950942
951943
If solution is optimal, interface objects (decision variables, global
952944
expressions, constraints, objective) can be successfully evaluated for their
953945
values at optimality.
954946
955947
Args:
956-
solver (str): Name of solver to optimise with.
957-
solver_io (str | None, optional): If chosen solver has a python interface, set to "python" for potential
958-
performance gains, otherwise should be left as None. Defaults to None.
959-
solver_options (dict | None, optional): Solver options/parameters to pass directly to solver.
960-
See solver documentation for available parameters that can be influenced. Defaults to None.
961-
save_logs (str | None, optional): If given, solver logs and built LP file will be saved to this filepath.
962-
Defaults to None.
948+
solve_config: (config.Solve): Calliope Solve configuration object.
963949
warmstart (bool, optional): If True, and the chosen solver is capable of implementing it, an existing
964950
optimal solution will be used to warmstart the next solve run.
965951
Defaults to False.
966-
**solve_config: solve configuration overrides.
967952
968953
Returns:
969954
xr.Dataset: Dataset of decision variable values if the solution was optimal/feasible,

src/calliope/backend/gurobi_backend_model.py

+5-13
Original file line numberDiff line numberDiff line change
@@ -240,26 +240,18 @@ def get_global_expression( # noqa: D102, override
240240
else:
241241
return global_expression
242242

243-
def _solve(
244-
self,
245-
solver: str,
246-
solver_io: str | None = None,
247-
solver_options: dict | None = None,
248-
save_logs: str | None = None,
249-
warmstart: bool = False,
250-
**solve_config,
251-
) -> xr.Dataset:
243+
def _solve(self, solve_config: config.Solve, warmstart: bool = False) -> xr.Dataset:
252244
self._instance.resetParams()
253245

254-
if solver_options is not None:
255-
for k, v in solver_options.items():
246+
if solve_config.solver_options is not None:
247+
for k, v in solve_config.solver_options.items():
256248
self._instance.setParam(k, v)
257249

258250
if not warmstart:
259251
self._instance.setParam("LPWarmStart", 0)
260252

261-
if save_logs is not None:
262-
logdir = Path(save_logs)
253+
if solve_config.save_logs is not None:
254+
logdir = Path(solve_config.save_logs)
263255
self._instance.setParam("LogFile", (logdir / "gurobi.log").as_posix())
264256

265257
self._instance.update()

src/calliope/backend/pyomo_backend_model.py

+15-23
Original file line numberDiff line numberDiff line change
@@ -204,14 +204,12 @@ def _objective_setter(
204204
self._add_component(name, objective_dict, _objective_setter, "objectives")
205205

206206
def set_objective(self, name: str) -> None: # noqa: D102, override
207-
for obj_name, obj in self.objectives.items():
208-
if obj.item().active and obj_name != name:
209-
self.log("objectives", obj_name, "Objective deactivated.", level="info")
210-
obj.item().deactivate()
211-
if obj_name == name:
212-
obj.item().activate()
213-
self.log("objectives", obj_name, "Objective activated.", level="info")
207+
self.objectives[self.objective].item().deactivate()
208+
self.log("objectives", self.objective, "Objective deactivated.", level="info")
209+
210+
self.objectives[name].item().activate()
214211
self.objective = name
212+
self.log("objectives", name, "Objective activated.", level="info")
215213

216214
def get_parameter( # noqa: D102, override
217215
self, name: str, as_backend_objs: bool = True
@@ -285,36 +283,30 @@ def get_global_expression( # noqa: D102, override
285283
return global_expression
286284

287285
def _solve( # noqa: D102, override
288-
self,
289-
solver: str,
290-
solver_io: str | None = None,
291-
solver_options: dict | None = None,
292-
save_logs: str | None = None,
293-
warmstart: bool = False,
294-
**solve_config,
286+
self, solve_config: config.Solve, warmstart: bool = False
295287
) -> xr.Dataset:
296-
if solver == "cbc" and self.shadow_prices.is_active:
288+
if solve_config.solver == "cbc" and self.shadow_prices.is_active:
297289
model_warn(
298290
"Switching off shadow price tracker as constraint duals cannot be accessed from the CBC solver"
299291
)
300292
self.shadow_prices.deactivate()
301-
opt = SolverFactory(solver, solver_io=solver_io)
293+
opt = SolverFactory(solve_config.solver, solver_io=solve_config.solver_io)
302294

303-
if solver_options:
304-
for k, v in solver_options.items():
295+
if solve_config.solver_options:
296+
for k, v in solve_config.solver_options.items():
305297
opt.options[k] = v
306298

307299
solve_kwargs = {}
308-
if save_logs is not None:
300+
if solve_config.save_logs is not None:
309301
solve_kwargs.update({"symbolic_solver_labels": True, "keepfiles": True})
310-
logdir = Path(save_logs)
302+
logdir = Path(solve_config.save_logs)
311303
logdir.mkdir(parents=True, exist_ok=True)
312304
TempfileManager.tempdir = logdir # Sets log output dir
313305

314-
if warmstart and solver in ["glpk", "cbc"]:
306+
if warmstart and solve_config.solver in ["glpk", "cbc"]:
315307
model_warn(
316-
f"The chosen solver, {solver}, does not support warmstart, which may "
317-
"impact performance."
308+
f"The chosen solver, {solve_config.solver}, does not support warmstart, "
309+
"which may impact performance."
318310
)
319311
warmstart = False
320312

src/calliope/config.py

+7
Original file line numberDiff line numberDiff line change
@@ -215,6 +215,13 @@ class SolveSpores(ConfigBaseModel):
215215
tracking_parameter: str | None = None
216216
"""If given, an input parameter name with which to filter technologies for consideration in SPORES scoring."""
217217

218+
score_iteration_threshold_relative: float = Field(default=0.1, ge=0)
219+
"""A factor to apply to flow capacities above which they will increment the SPORES score.
220+
E.g., if the previous iteration flow capacity was `100` then, with a threshold value of 0.1,
221+
only capacities above `10` in the current iteration will cause the SPORES score to increase for that technology at that node.
222+
If, say, the current iteration's capacity is `8` then the SPORES score will not change for that technology (as if it had no )
223+
"""
224+
218225

219226
class Solve(ConfigBaseModel):
220227
"""Base configuration options used when solving a Calliope optimisation problem (`calliope.Model.solve`)."""

src/calliope/model.py

+29-31
Original file line numberDiff line numberDiff line change
@@ -357,9 +357,7 @@ def solve(self, force: bool = False, warmstart: bool = False, **kwargs) -> None:
357357
elif mode == "spores":
358358
results = self._solve_spores(self.config.solve)
359359
else:
360-
results = self.backend._solve(
361-
warmstart=warmstart, **self.config.solve.model_dump()
362-
)
360+
results = self.backend._solve(self.config.solve, warmstart=warmstart)
363361

364362
log_time(
365363
LOGGER,
@@ -510,6 +508,9 @@ def _solve_operate(self, solver_config: config.Solve) -> xr.Dataset:
510508
Optimisation is undertaken iteratively for slices of the timeseries, with
511509
some data being passed between slices.
512510
511+
Args:
512+
solver_config (config.Solve): Calliope Solver configuration object.
513+
513514
Returns:
514515
xr.Dataset: Results dataset.
515516
"""
@@ -519,9 +520,7 @@ def _solve_operate(self, solver_config: config.Solve) -> xr.Dataset:
519520

520521
LOGGER.info("Optimisation model | Running first time window.")
521522

522-
step_results = self.backend._solve(
523-
warmstart=False, **solver_config.model_dump()
524-
)
523+
iteration_results = self.backend._solve(solver_config, warmstart=False)
525524

526525
results_list = []
527526

@@ -531,15 +530,17 @@ def _solve_operate(self, solver_config: config.Solve) -> xr.Dataset:
531530
f"Optimisation model | Running time window starting at {windowstep_as_string}."
532531
)
533532
results_list.append(
534-
step_results.sel(timesteps=slice(None, windowstep - self._TS_OFFSET))
533+
iteration_results.sel(
534+
timesteps=slice(None, windowstep - self._TS_OFFSET)
535+
)
535536
)
536-
previous_step_results = results_list[-1]
537+
previous_iteration_results = results_list[-1]
537538
horizonstep = self._model_data.horizonsteps.sel(windowsteps=windowstep)
538539
new_inputs = self.inputs.sel(
539540
timesteps=slice(windowstep, horizonstep)
540541
).drop_vars(["horizonsteps", "windowsteps"], errors="ignore")
541542

542-
if len(new_inputs.timesteps) != len(step_results.timesteps):
543+
if len(new_inputs.timesteps) != len(iteration_results.timesteps):
543544
LOGGER.info(
544545
"Optimisation model | Reaching the end of the timeseries. "
545546
"Re-building model with shorter time horizon."
@@ -554,18 +555,16 @@ def _solve_operate(self, solver_config: config.Solve) -> xr.Dataset:
554555
self.backend.update_parameter(param_name, param_data)
555556
self.backend.inputs[param_name] = param_data
556557

557-
if "storage" in step_results:
558+
if "storage" in iteration_results:
558559
self.backend.update_parameter(
559560
"storage_initial",
560-
self._recalculate_storage_initial(previous_step_results),
561+
self._recalculate_storage_initial(previous_iteration_results),
561562
)
562563

563-
step_results = self.backend._solve(
564-
warmstart=False, **solver_config.model_dump()
565-
)
564+
iteration_results = self.backend._solve(solver_config, warmstart=False)
566565

567566
self._start_window_idx = 0
568-
results_list.append(step_results.sel(timesteps=slice(windowstep, None)))
567+
results_list.append(iteration_results.sel(timesteps=slice(windowstep, None)))
569568
results = xr.concat(results_list, dim="timesteps", combine_attrs="no_conflicts")
570569
results.attrs["termination_condition"] = ",".join(
571570
set(result.attrs["termination_condition"] for result in results_list)
@@ -599,21 +598,18 @@ def _solve_spores(self, solver_config: config.Solve) -> xr.Dataset:
599598
xr.Dataset: Results dataset.
600599
"""
601600
LOGGER.info("Optimisation model | Resetting SPORES parameters.")
602-
self.backend.update_parameter(
603-
"spores_score", self.inputs.get("spores_score", xr.DataArray(0))
604-
)
605-
self.backend.update_parameter(
606-
"spores_baseline_cost",
607-
self.inputs.get("spores_baseline_cost", xr.DataArray(np.inf)),
608-
)
601+
for init_param in ["spores_score", "spores_baseline_cost"]:
602+
default = xr.DataArray(self.inputs.attrs["defaults"][init_param])
603+
self.backend.update_parameter(
604+
init_param, self.inputs.get(init_param, default)
605+
)
606+
609607
self.backend.set_objective(self.config.build.objective)
610608

611609
spores_config: config.SolveSpores = solver_config.spores
612610
if not spores_config.skip_baseline_run:
613611
LOGGER.info("Optimisation model | Running baseline model.")
614-
baseline_results = self.backend._solve(
615-
warmstart=False, **solver_config.model_dump()
616-
)
612+
baseline_results = self.backend._solve(solver_config, warmstart=False)
617613
else:
618614
LOGGER.info("Optimisation model | Using existing baseline model results.")
619615
baseline_results = self.results.copy()
@@ -625,20 +621,19 @@ def _solve_spores(self, solver_config: config.Solve) -> xr.Dataset:
625621
spores_config.save_per_spore_path / "baseline.nc"
626622
)
627623

624+
# We store the results from each iteration in the `results_list` to later concatenate into a single dataset.
628625
results_list: list[xr.Dataset] = [baseline_results]
629626
spore_range = range(1, spores_config.number + 1)
630627
for spore in spore_range:
631628
LOGGER.info(f"Optimisation model | Running SPORE {spore}.")
632629
self._spores_update_model(baseline_results, results_list[-1], spores_config)
633630

634-
step_results = self.backend._solve(
635-
warmstart=False, **solver_config.model_dump()
636-
)
637-
results_list.append(step_results)
631+
iteration_results = self.backend._solve(solver_config, warmstart=False)
632+
results_list.append(iteration_results)
638633

639634
if spores_config.save_per_spore_path is not None:
640635
LOGGER.info(f"Optimisation model | Saving SPORE {spore} to file.")
641-
step_results.assign_coords(spores=spore).to_netcdf(
636+
iteration_results.assign_coords(spores=spore).to_netcdf(
642637
spores_config.save_per_spore_path / f"spore_{spore}.nc"
643638
)
644639

@@ -672,7 +667,10 @@ def _spores_update_model(
672667
previous_cap = previous_results["flow_cap"].where(spores_techs)
673668

674669
# Make sure that penalties are applied only to non-negligible deployments of capacity
675-
min_relevant_size = 0.1 * previous_cap.max(["nodes", "techs"])
670+
min_relevant_size = (
671+
spores_config.score_iteration_threshold_relative
672+
* previous_cap.max(["nodes", "techs"])
673+
)
676674

677675
new_score = (
678676
# Where capacity was deployed more than the minimal relevant size, assign an integer penalty (score)

0 commit comments

Comments
 (0)