Skip to content

Commit

Permalink
feat(thermal): add new matrices for v8.7
Browse files Browse the repository at this point in the history
  • Loading branch information
MartinBelthle committed Apr 5, 2024
1 parent 9633b03 commit b9d71e0
Show file tree
Hide file tree
Showing 9 changed files with 191 additions and 6 deletions.
40 changes: 39 additions & 1 deletion antarest/study/business/areas/thermal_management.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
import json
import typing as t
from pathlib import Path

from pydantic import validator

from antarest.core.exceptions import DuplicateThermalCluster, ThermalClusterConfigNotFound, ThermalClusterNotFound
from antarest.core.exceptions import (
DuplicateThermalCluster,
IncoherenceBetweenMatricesLength,
ThermalClusterConfigNotFound,
ThermalClusterNotFound,
)
from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands
from antarest.study.model import Study
from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
Expand Down Expand Up @@ -338,6 +344,11 @@ def duplicate_cluster(
f"input/thermal/prepro/{area_id}/{lower_new_id}/modulation",
f"input/thermal/prepro/{area_id}/{lower_new_id}/data",
]
if int(study.version) >= 870:
source_paths.append(f"input/thermal/series/{area_id}/{lower_source_id}/CO2Cost")
source_paths.append(f"input/thermal/series/{area_id}/{lower_source_id}/fuelCost")
new_paths.append(f"input/thermal/series/{area_id}/{lower_new_id}/CO2Cost")
new_paths.append(f"input/thermal/series/{area_id}/{lower_new_id}/fuelCost")

# Prepare and execute commands
commands: t.List[t.Union[CreateCluster, ReplaceMatrix]] = [create_cluster_cmd]
Expand All @@ -351,3 +362,30 @@ def duplicate_cluster(
execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service)

return ThermalClusterOutput(**new_config.dict(by_alias=False))

def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool:
cluster_id_lowered = cluster_id.lower()
matrices_path = [f"input/thermal/series/{area_id}/{cluster_id_lowered}/series"]
if int(study.version) >= 870:
matrices_path.append(f"input/thermal/series/{area_id}/{cluster_id_lowered}/CO2Cost")
matrices_path.append(f"input/thermal/series/{area_id}/{cluster_id_lowered}/fuelCost")

matrices_width = []
for matrix_path in matrices_path:
matrix = self.storage_service.get_storage(study).get(study, matrix_path)
matrix_data = matrix["data"]
matrix_length = len(matrix_data)
if matrix_length > 0 and matrix_length != 8760:
raise IncoherenceBetweenMatricesLength(
f"The matrix {Path(matrix_path).name} should have 8760 rows, currently: {matrix_length}"
)
matrices_width.append(len(matrix_data[0]))
comparison_set = set(matrices_width)
comparison_set.discard(0)
comparison_set.discard(1)
if len(comparison_set) > 1:
raise IncoherenceBetweenMatricesLength(
f"Matrix columns mismatch in thermal cluster '{cluster_id}' series. Columns size are {matrices_width}"
)

return True
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import default_scenario_hourly
from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix
from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency


class InputThermalSeriesAreaThermal(FolderNode):
Expand All @@ -13,4 +14,17 @@ def build(self) -> TREE:
default_empty=default_scenario_hourly,
),
}
if self.config.version >= 870:
children["CO2Cost"] = InputSeriesMatrix(
self.context,
self.config.next_file("CO2Cost.txt"),
freq=MatrixFrequency.HOURLY,
default_empty=default_scenario_hourly,
)
children["fuelCost"] = InputSeriesMatrix(
self.context,
self.config.next_file("fuelCost.txt"),
freq=MatrixFrequency.HOURLY,
default_empty=default_scenario_hourly,
)
return children
13 changes: 9 additions & 4 deletions antarest/study/storage/study_upgrader/upgrader_870.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,15 @@ def upgrade_870(study_path: Path) -> None:

# Add properties for thermal clusters in .ini file
ini_files = study_path.glob("input/thermal/clusters/*/list.ini")
thermal_path = study_path / Path("input/thermal/series")
for ini_file_path in ini_files:
data = IniReader().read(ini_file_path)
for section in data:
data[section]["costgeneration"] = "SetManually"
data[section]["efficiency"] = 100
data[section]["variableomcost"] = 0
area_id = ini_file_path.parent.name
for cluster in data.keys():
new_thermal_path = thermal_path / area_id / cluster.lower()
(new_thermal_path / "CO2Cost.txt").touch()
(new_thermal_path / "fuelCost.txt").touch()
data[cluster]["costgeneration"] = "SetManually"
data[cluster]["efficiency"] = 100
data[cluster]["variableomcost"] = 0
IniWriter().write(data, ini_file_path)
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,13 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
}
}
}
if study_data.config.version >= 870:
new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id][
"CO2Cost"
] = self.command_context.generator_matrix_constants.get_null_matrix()
new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id][
"fuelCost"
] = self.command_context.generator_matrix_constants.get_null_matrix()
study_data.tree.save(new_cluster_data)

return output
Expand Down
31 changes: 31 additions & 0 deletions antarest/study/web/study_data_blueprint.py
Original file line number Diff line number Diff line change
Expand Up @@ -1894,6 +1894,37 @@ def redirect_update_thermal_cluster(
# We cannot perform redirection, because we have a PUT, where a PATCH is required.
return update_thermal_cluster(uuid, area_id, cluster_id, cluster_data, current_user=current_user)

@bp.get(
path="/studies/{uuid}/areas/{area_id}/clusters/thermal/{cluster_id}/validate",
tags=[APITag.study_data],
summary="Validates the thermal cluster series",
response_model=None,
)
def validate_cluster_series(
uuid: str,
area_id: str,
cluster_id: str,
current_user: JWTUser = Depends(auth.get_current_user),
) -> bool:
"""
Validate the consistency of all time series for the given thermal cluster.
Args:
- `uuid`: The UUID of the study.
- `area_id`: the area ID.
- `cluster_id`: the ID of the thermal cluster.
Permissions:
- User must have READ permission on the study.
"""
logger.info(
f"Validating thermal series values for study {uuid} and cluster {cluster_id}",
extra={"user": current_user.id},
)
params = RequestParameters(user=current_user)
study = study_service.check_study_access(uuid, StudyPermissionType.READ, params)
return study_service.thermal_manager.validate_series(study, area_id, cluster_id)

@bp.delete(
path="/studies/{uuid}/areas/{area_id}/clusters/thermal",
tags=[APITag.study_data],
Expand Down
89 changes: 88 additions & 1 deletion tests/integration/study_data_blueprint/test_thermal.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,13 @@
* delete a cluster (or several clusters)
* validate the consistency of the matrices (and properties)
"""

import io
import json
import re
import typing as t

import numpy as np
import pandas as pd
import pytest
from starlette.testclient import TestClient

Expand Down Expand Up @@ -265,6 +266,21 @@
]


def _upload_matrix(
client: TestClient, user_access_token: str, study_id: str, matrix_path: str, df: pd.DataFrame
) -> None:
tsv = io.BytesIO()
df.to_csv(tsv, sep="\t", index=False, header=False)
tsv.seek(0)
res = client.put(
f"/v1/studies/{study_id}/raw",
params={"path": matrix_path},
headers={"Authorization": f"Bearer {user_access_token}"},
files={"file": tsv},
)
res.raise_for_status()


@pytest.mark.unit_test
class TestThermal:
@pytest.mark.parametrize(
Expand Down Expand Up @@ -527,6 +543,77 @@ def test_lifecycle(
assert res.status_code == 200
assert res.json()["data"] == matrix

# =============================
# THERMAL CLUSTER VALIDATION
# =============================

# Everything is fine at the beginning
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate",
headers={"Authorization": f"Bearer {user_access_token}"},
)
assert res.status_code == 200
assert res.json() is True

# Modifies series matrix with wrong length (!= 8760)
_upload_matrix(
client,
user_access_token,
study_id,
f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/series",
pd.DataFrame(np.random.randint(0, 10, size=(4, 1))),
)

# Validation should fail
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate",
headers={"Authorization": f"Bearer {user_access_token}"},
)
assert res.status_code == 422
obj = res.json()
assert obj["exception"] == "IncoherenceBetweenMatricesLength"
assert obj["description"] == "The matrix series should have 8760 rows, currently: 4"

# Update with the right length
_upload_matrix(
client,
user_access_token,
study_id,
f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/series",
pd.DataFrame(np.random.randint(0, 10, size=(8760, 4))),
)

# Validation should succeed again
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate",
headers={"Authorization": f"Bearer {user_access_token}"},
)
assert res.status_code == 200
assert res.json() is True

if version >= 870:
# Adds a CO2Cost matrix with different columns size
_upload_matrix(
client,
user_access_token,
study_id,
f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/CO2Cost",
pd.DataFrame(np.random.randint(0, 10, size=(8760, 3))),
)

# Validation should fail
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate",
headers={"Authorization": f"Bearer {user_access_token}"},
)
assert res.status_code == 422
obj = res.json()
assert obj["exception"] == "IncoherenceBetweenMatricesLength"
assert (
obj["description"]
== "Matrix columns mismatch in thermal cluster 'FR_Gas conventional' series. Columns size are [4, 3, 1]"
)

# =============================
# THERMAL CLUSTER DELETION
# =============================
Expand Down
3 changes: 3 additions & 0 deletions tests/storage/business/test_study_version_upgrader.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,8 +211,11 @@ def assert_inputs_are_updated(tmp_path: Path, old_area_values: dict, old_binding
# thermal cluster part
for area in list_areas:
reader = IniReader(DUPLICATE_KEYS)
thermal_series_path = tmp_path / "input" / "thermal" / "series" / area
thermal_cluster_list = reader.read(tmp_path / "input" / "thermal" / "clusters" / area / "list.ini")
for cluster in thermal_cluster_list:
assert (thermal_series_path / cluster.lower() / "fuelCost.txt").exists()
assert (thermal_series_path / cluster.lower() / "CO2Cost.txt").exists()
assert thermal_cluster_list[cluster]["costgeneration"] == "SetManually"
assert thermal_cluster_list[cluster]["efficiency"] == 100
assert thermal_cluster_list[cluster]["variableomcost"] == 0
Expand Down
Binary file not shown.
Binary file not shown.

0 comments on commit b9d71e0

Please sign in to comment.