Skip to content
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions config/config.default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1310,6 +1310,16 @@ solving:
AggFill: 0
PreDual: 0
GURO_PAR_BARDENSETHRESH: 200
"gurobi-simplex":
threads: 32
method: 1
NodeMethod: 1
Seed: 123
AggFill: 0
PreDual: 0
FeasibilityTol: 1.0e-05
OptimalityTol: 1.0e-05
ScaleFlag: 0
"gurobi-numeric-focus":
NumericFocus: 3
method: 2
Expand Down Expand Up @@ -1597,6 +1607,11 @@ cba:
msv_extraction:
resolution: false
resample_method: ffill
solving:
solver:
name: highs
options: "highs-simplex"
solver_options: {}
solving:
options:
horizon: 168
Expand Down
5 changes: 5 additions & 0 deletions config/config.tyndp.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -453,6 +453,11 @@ cba:
resolution: false
# Resampling method: ffill (forward fill), interpolate (linear)
resample_method: ffill
# Solve options for MSV extraction
solving:
solver:
name: highs
options: highs-simplex

# Rolling horizon solving settings
solving:
Expand Down
80 changes: 80 additions & 0 deletions config/schema.default.json
Original file line number Diff line number Diff line change
Expand Up @@ -580,6 +580,26 @@
"interpolate"
],
"type": "string"
},
"solving": {
"description": "Configuration for `cba.msv_extraction.solving` settings.",
"properties": {
"solver": {
"additionalProperties": {
"type": "string"
},
"description": "Solver configuration for MSV extraction.",
"type": "object"
},
"solver_options": {
"additionalProperties": {
"additionalProperties": true,
"type": "object"
},
"description": "Solver-specific options for MSV extraction.",
"type": "object"
}
}
}
}
},
Expand Down Expand Up @@ -6451,6 +6471,46 @@
"interpolate"
],
"type": "string"
},
"solving": {
"description": "Configuration for `cba.msv_extraction.solving` settings.",
"properties": {
"solver": {
"additionalProperties": {
"type": "string"
},
"description": "Solver configuration for MSV extraction.",
"type": "object"
},
"solver_options": {
"additionalProperties": {
"additionalProperties": true,
"type": "object"
},
"description": "Solver-specific options for MSV extraction.",
"type": "object"
}
}
}
}
},
"_CbaMsvSolvingConfig": {
"description": "Configuration for `cba.msv_extraction.solving` settings.",
"properties": {
"solver": {
"additionalProperties": {
"type": "string"
},
"description": "Solver configuration for MSV extraction.",
"type": "object"
},
"solver_options": {
"additionalProperties": {
"additionalProperties": true,
"type": "object"
},
"description": "Solver-specific options for MSV extraction.",
"type": "object"
}
}
},
Expand Down Expand Up @@ -15166,6 +15226,26 @@
"interpolate"
],
"type": "string"
},
"solving": {
"description": "Configuration for `cba.msv_extraction.solving` settings.",
"properties": {
"solver": {
"additionalProperties": {
"type": "string"
},
"description": "Solver configuration for MSV extraction.",
"type": "object"
},
"solver_options": {
"additionalProperties": {
"additionalProperties": true,
"type": "object"
},
"description": "Solver-specific options for MSV extraction.",
"type": "object"
}
}
}
}
},
Expand Down
5 changes: 5 additions & 0 deletions config/test/config.cyears.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -464,6 +464,11 @@ cba:
resolution: false
# Resampling method: ffill (forward fill), interpolate (linear)
resample_method: ffill
# Solve options for MSV extraction
solving:
solver:
name: highs
options: "highs-simplex"

# Rolling horizon solving settings
solving:
Expand Down
5 changes: 5 additions & 0 deletions config/test/config.tyndp.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,11 @@ cba:
resolution: false
# Resampling method: ffill (forward fill), interpolate (linear)
resample_method: ffill
# Solve options for MSV extraction
solving:
solver:
name: highs
options: "highs-simplex"

# Rolling horizon solving settings
solving:
Expand Down
2 changes: 2 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ Upcoming Open-TYNDP Release

* Disable OCGT as an extendable carrier and add load shedding for H2 and AC (https://github.com/open-energy-transition/open-tyndp/pull/547).

* Add separate solver option for CBA MSV extraction (`cba.msv_extraction.solving`), move CBA solve logs to `results/cba/logs/`, and add `gurobi-simplex` as a solver option (https://github.com/open-energy-transition/open-tyndp/pull/627).

**Bugfixes and Compatibility**

* Improve EU-wide prices with Pan-European values (https://github.com/open-energy-transition/open-tyndp/pull/607).
Expand Down
35 changes: 13 additions & 22 deletions rules/cba.smk
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ def input_msv_snapshot_weightings(w):
rule solve_cba_msv_extraction:
params:
solving=config_provider("solving"),
cba_solving=config_provider("cba", "solving"),
cba_solving=config_provider("cba", "msv_extraction", "solving"),
msv_resolution=config_provider("cba", "msv_extraction", "resolution"),
cyclic_carriers=config_provider("cba", "storage", "cyclic_carriers"),
input:
Expand All @@ -327,9 +327,9 @@ rule solve_cba_msv_extraction:
output:
network=resources("cba/networks/msv_{planning_horizons}.nc"),
log:
solver=logs("cba/solve_cba_msv_extraction/{planning_horizons}_solver.log"),
memory=logs("cba/solve_cba_msv_extraction/{planning_horizons}_memory.log"),
python=logs("cba/solve_cba_msv_extraction/{planning_horizons}_python.log"),
solver=RESULTS + "logs/cba/msv/{planning_horizons}_solver.log",
memory=RESULTS + "logs/cba/msv/{planning_horizons}_memory.log",
python=RESULTS + "logs/cba/msv/{planning_horizons}_python.log",
threads: 1
script:
"../scripts/cba/solve_cba_msv_extraction.py"
Expand Down Expand Up @@ -382,15 +382,9 @@ rule solve_cba_reference_network:
output:
network=RESULTS + "cba/networks/reference_{planning_horizons}.nc",
log:
solver=logs(
"cba/solve_cba_reference_network/reference_{planning_horizons}_solver.log"
),
memory=logs(
"cba/solve_cba_reference_network/reference_{planning_horizons}_memory.log"
),
python=logs(
"cba/solve_cba_reference_network/reference_{planning_horizons}_python.log"
),
solver=RESULTS + "logs/cba/reference/reference_{planning_horizons}_solver.log",
memory=RESULTS + "logs/cba/reference/reference_{planning_horizons}_memory.log",
python=RESULTS + "logs/cba/reference/reference_{planning_horizons}_python.log",
threads: 1
script:
scripts("cba/solve_cba_network.py")
Expand All @@ -409,15 +403,12 @@ rule solve_cba_network:
output:
network=RESULTS + "cba/networks/project_{cba_project}_{planning_horizons}.nc",
log:
solver=logs(
"cba/solve_cba_network/project_{cba_project}_{planning_horizons}_solver.log"
),
memory=logs(
"cba/solve_cba_network/project_{cba_project}_{planning_horizons}_memory.log"
),
python=logs(
"cba/solve_cba_network/project_{cba_project}_{planning_horizons}_python.log"
),
solver=RESULTS
+ "logs/cba/projects/project_{cba_project}_{planning_horizons}_solver.log",
memory=RESULTS
+ "logs/cba/projects/project_{cba_project}_{planning_horizons}_memory.log",
python=RESULTS
+ "logs/cba/projects/project_{cba_project}_{planning_horizons}_python.log",
threads: 1
script:
scripts("cba/solve_cba_network.py")
Expand Down
47 changes: 39 additions & 8 deletions scripts/cba/solve_cba_msv_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,13 @@
- ``resources/cba/networks/msv_{planning_horizons}.nc``: Network with marginal storage values in stores_t.mu_energy_balance
"""

import copy
import logging

import pypsa
from snakemake.utils import update_config

from scripts._benchmark import memory_logger
from scripts._helpers import (
configure_logging,
set_scenario_config,
Expand Down Expand Up @@ -60,11 +62,13 @@
n = set_temporal_aggregation(n, msv_resolution, snapshot_weightings)

# Merge CBA-specific solving overrides into the global solving config
solving = snakemake.params.get("solving", {})
solving = copy.deepcopy(snakemake.params.get("solving", {}))
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think one could drop the deepcopy as we are not reading the original snakemake.params.solving again but this is more robust, so let's keep it. Memory overhead is also negligable.

update_config(solving, snakemake.params.get("cba_solving", {}))

solver_name = solving.get("solver", {}).get("name", "highs")
solver_options_key = solving.get("solver", {}).get("options", "highs-default")
solver_options = solving.get("solver_options", {}).get(solver_options_key, {})
solver_log = getattr(snakemake.log, "solver", None)

# Prepare network (e.g., load shedding setup)
solve_opts = solving.get("options", {})
Expand All @@ -81,22 +85,49 @@
limit_max_growth=None,
)

# Solve with perfect foresight (full year, single optimization)
# assign_all_duals=True ensures we get mu_energy_balance
status, termination_condition = n.optimize(
solver_name=solver_name,
solver_options=solving.get("solver_options", {}).get(solver_options_key, {}),
assign_all_duals=True,
)
if solver_log:
with open(solver_log, "a") as f:
print(
f"Starting MSV extraction solve with solver={solver_name} "
f"options={solver_options_key}",
file=f,
)

# Solve with perfect foresight (full year, single optimization).
# Assign_all_duals=True ensures we get mu_energy_balance.
# Use solve_model with log_fn so detailed solver output lands in *_solver.log.
with memory_logger(
filename=getattr(snakemake.log, "memory", None), interval=30
) as mem:
n.optimize.create_model()
status, termination_condition = n.optimize.solve_model(
solver_name=solver_name,
solver_options=solver_options,
assign_all_duals=True,
log_fn=solver_log,
)

if status != "ok":
logger.error(f"Extraction solve failed: {termination_condition}")
if solver_log:
with open(solver_log, "a") as f:
print(
f"MSV extraction solve failed: {status} / {termination_condition}",
file=f,
)
# if the solver is gurobi, print infeasibilities using n.model.print_infeasibilities()
if solving.get("solver", {}).get("name", "") == "gurobi":
n.model.print_infeasibilities()
raise RuntimeError(f"Extraction solve failed: {termination_condition}")

logger.info(f"Extraction solve completed: {termination_condition}")
logger.info(f"Maximum memory usage: {mem.mem_usage}")
if solver_log:
with open(solver_log, "a") as f:
print(
f"MSV extraction solve completed: {status} / {termination_condition}",
file=f,
)

# Save network with marginal storage values
n.export_to_netcdf(snakemake.output.network)
3 changes: 2 additions & 1 deletion scripts/cba/solve_cba_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
they apply to the dispatch.
"""

import copy
import importlib
import logging
import os
Expand Down Expand Up @@ -327,7 +328,7 @@ def solve_network(
set_scenario_config(snakemake)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)

solving = snakemake.params.solving
solving = copy.deepcopy(snakemake.params.solving)
update_config(solving, snakemake.params.cba_solving)

np.random.seed(solving["options"].get("seed", 123))
Expand Down
17 changes: 17 additions & 0 deletions scripts/lib/validation/config/cba.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,19 @@ class _CbaStorageConfig(ConfigModel):
)


class _CbaMsvSolvingConfig(ConfigModel):
"""Configuration for `cba.msv_extraction.solving` settings."""

solver: dict[str, str] = Field(
default_factory=lambda: {"name": "highs", "options": "highs-simplex"},
description="Solver configuration for MSV extraction.",
)
solver_options: dict[str, dict[str, Any]] = Field(
default_factory=dict,
description="Solver-specific options for MSV extraction.",
)


class _CbaMsvExtractionConfig(ConfigModel):
"""Configuration for `cba.msv_extraction` settings."""

Expand All @@ -39,6 +52,10 @@ class _CbaMsvExtractionConfig(ConfigModel):
default="ffill",
description="Method for resampling marginal storage value to target network resolution.",
)
solving: _CbaMsvSolvingConfig = Field(
default_factory=_CbaMsvSolvingConfig,
description="Solver configuration overrides for the MSV extraction solve.",
)


class _CbaSolvingConfig(ConfigModel):
Expand Down
11 changes: 11 additions & 0 deletions scripts/lib/validation/config/solving.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,17 @@ class SolvingConfig(BaseModel):
"PreDual": 0,
"GURO_PAR_BARDENSETHRESH": 200,
},
"gurobi-simplex": {
"threads": 32,
"method": 1, # dual simplex
"NodeMethod": 1, # if MIP
"Seed": 123,
"AggFill": 0,
"PreDual": 0,
"FeasibilityTol": 1.0e-05,
"OptimalityTol": 1.0e-05,
"ScaleFlag": 0, # 0 to turn off scaling (faster)
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

While your test have shown slight speed up with ScaleFlag 0, I am contemplating if our best option wouldn't be to let Gurobi decide automatically how to scale the model (i.e. ScaleFlag -1). It could be that the speed up that we are seeing with 0 here, is only because of the small size of the test model (1 week). The settings will be used for MSV on a whole year, will they not?

},
"gurobi-numeric-focus": {
"NumericFocus": 3,
"method": 2,
Expand Down
Loading