diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 985123003d..e4b5125379 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -11,6 +11,7 @@ # This file is part of the Antares project. import collections +import copy import logging import typing as t @@ -65,9 +66,17 @@ CreateBindingConstraint, OptionalProperties, TermMatrices, + create_binding_constraint_config, ) +from antarest.study.storage.variantstudy.model.command.icommand import ICommand from antarest.study.storage.variantstudy.model.command.remove_binding_constraint import RemoveBindingConstraint -from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint +from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix +from antarest.study.storage.variantstudy.model.command.update_binding_constraint import ( + UpdateBindingConstraint, + update_matrices_names, +) +from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig +from antarest.study.storage.variantstudy.model.command_context import CommandContext from antarest.study.storage.variantstudy.model.dbmodel import VariantStudy logger = logging.getLogger(__name__) @@ -338,6 +347,13 @@ class ConstraintOutput870(ConstraintOutput830): # the type of the output constraint in the FastAPI endpoint. ConstraintOutput = t.Union[ConstraintOutputBase, ConstraintOutput830, ConstraintOutput870] +OPERATOR_MATRIX_FILE_MAP = { + BindingConstraintOperator.EQUAL: ["{bc_id}_eq"], + BindingConstraintOperator.GREATER: ["{bc_id}_gt"], + BindingConstraintOperator.LESS: ["{bc_id}_lt"], + BindingConstraintOperator.BOTH: ["{bc_id}_lt", "{bc_id}_gt"], +} + def _get_references_by_widths( file_study: FileStudy, bcs: t.Sequence[ConstraintOutput] @@ -351,17 +367,13 @@ def _get_references_by_widths( The height of the matrices may vary depending on the time step, but the width should be consistent within a group of binding constraints. """ - operator_matrix_file_map = { - BindingConstraintOperator.EQUAL: ["{bc_id}_eq"], - BindingConstraintOperator.GREATER: ["{bc_id}_gt"], - BindingConstraintOperator.LESS: ["{bc_id}_lt"], - BindingConstraintOperator.BOTH: ["{bc_id}_lt", "{bc_id}_gt"], - } references_by_width: t.Dict[int, t.List[t.Tuple[str, str]]] = {} _total = len(bcs) for _index, bc in enumerate(bcs): - matrices_name = operator_matrix_file_map[bc.operator] if file_study.config.version >= 870 else ["{bc_id}"] + matrices_name = ( + OPERATOR_MATRIX_FILE_MAP[bc.operator] if file_study.config.version >= STUDY_VERSION_8_7 else ["{bc_id}"] + ) for matrix_name in matrices_name: matrix_id = matrix_name.format(bc_id=bc.id) logger.info(f"⏲ Validating BC '{bc.id}': {matrix_id=} [{_index+1}/{_total}]") @@ -384,6 +396,40 @@ def _get_references_by_widths( return references_by_width +def _generate_replace_matrix_commands( + bc_id: str, + study_version: StudyVersion, + value: ConstraintInput, + operator: BindingConstraintOperator, + command_context: CommandContext, +) -> t.List[ICommand]: + commands: t.List[ICommand] = [] + if study_version < STUDY_VERSION_8_7: + matrix = { + BindingConstraintFrequency.HOURLY.value: default_bc_hourly_86, + BindingConstraintFrequency.DAILY.value: default_bc_weekly_daily_86, + BindingConstraintFrequency.WEEKLY.value: default_bc_weekly_daily_86, + }[value.time_step].tolist() + command = ReplaceMatrix( + target=f"input/bindingconstraints/{bc_id}", matrix=matrix, command_context=command_context + ) + commands.append(command) + else: + matrix = { + BindingConstraintFrequency.HOURLY.value: default_bc_hourly_87, + BindingConstraintFrequency.DAILY.value: default_bc_weekly_daily_87, + BindingConstraintFrequency.WEEKLY.value: default_bc_weekly_daily_87, + }[value.time_step].tolist() + matrices_to_replace = OPERATOR_MATRIX_FILE_MAP[operator] + for matrix_name in matrices_to_replace: + matrix_id = matrix_name.format(bc_id=bc_id) + command = ReplaceMatrix( + target=f"input/bindingconstraints/{matrix_id}", matrix=matrix, command_context=command_context + ) + commands.append(command) + return commands + + def _validate_binding_constraints(file_study: FileStudy, bcs: t.Sequence[ConstraintOutput]) -> bool: """ Validates the binding constraints within a group. @@ -747,9 +793,10 @@ def update_binding_constraint( study: Study, binding_constraint_id: str, data: ConstraintInput, + existing_constraint: t.Optional[ConstraintOutput] = None, ) -> ConstraintOutput: file_study = self.storage_service.get_storage(study).get_raw(study) - existing_constraint = self.get_binding_constraint(study, binding_constraint_id) + existing_constraint = existing_constraint or self.get_binding_constraint(study, binding_constraint_id) study_version = StudyVersion.parse(study.version) check_attributes_coherence(data, study_version, data.operator or existing_constraint.operator) @@ -774,10 +821,11 @@ def update_binding_constraint( # Validates the matrices. Needed when the study is a variant because we only append the command to the list if isinstance(study, VariantStudy): updated_matrices = [term for term in [m.value for m in TermMatrices] if getattr(data, term)] - time_step = data.time_step or existing_constraint.time_step - command.validates_and_fills_matrices( - time_step=time_step, specific_matrices=updated_matrices, version=study_version, create=False # type: ignore - ) + if updated_matrices: + time_step = data.time_step or existing_constraint.time_step + command.validates_and_fills_matrices( + time_step=time_step, specific_matrices=updated_matrices, version=study_version, create=False # type: ignore + ) execute_or_add_commands(study, file_study, [command], self.storage_service) @@ -807,15 +855,69 @@ def update_binding_constraints( study: The study from which to update the constraints. bcs_by_ids: A mapping of binding constraint IDs to their updated configurations. + If there's more than 50 BCs updated as the same time, the 'update_binding_constraint' command takes more than 1 second. + And for thousands of BCs updated as the same time, it takes several minutes. + This is mainly because we open/close the 'bindingconstraints.ini' file multiple times for each constraint. + To avoid this, when dealing with such a case we'll use the 'update_config' command to write all the data at once. + However, such command is not really clear, so we won't use it on variants with less than 50 updated BCs. + Returns: A dictionary of the updated binding constraints, indexed by their IDs. Raises: BindingConstraintNotFound: If any of the specified binding constraint IDs are not found. """ + + # Variant study with less than 50 updated constraints updated_constraints = {} - for bc_id, data in bcs_by_ids.items(): - updated_constraints[bc_id] = self.update_binding_constraint(study, bc_id, data) + if len(bcs_by_ids) < 50 and isinstance(study, VariantStudy): + existing_constraints = {bc.id: bc for bc in self.get_binding_constraints(study)} + for bc_id, data in bcs_by_ids.items(): + updated_constraints[bc_id] = self.update_binding_constraint( + study, bc_id, data, existing_constraints[bc_id] + ) + return updated_constraints + + # More efficient way of doing things but using less readable commands. + study_version = StudyVersion.parse(study.version) + commands = [] + command_context = self.storage_service.variant_study_service.command_factory.command_context + + file_study = self.storage_service.get_storage(study).get_raw(study) + config = file_study.tree.get(["input", "bindingconstraints", "bindingconstraints"]) + dict_config = {value["id"]: key for (key, value) in config.items()} + for bc_id, value in bcs_by_ids.items(): + if bc_id not in dict_config: + raise BindingConstraintNotFound(f"Binding constraint '{bc_id}' not found") + + props = create_binding_constraint_config(study_version, **value.dict()) + new_values = props.model_dump(mode="json", by_alias=True, exclude_unset=True) + upd_obj = config[dict_config[bc_id]] + current_value = copy.deepcopy(upd_obj) + upd_obj.update(new_values) + output = self.constraint_model_adapter(upd_obj, study_version) + updated_constraints[bc_id] = output + + if value.time_step and value.time_step != BindingConstraintFrequency(current_value["type"]): + # The user changed the time step, we need to update the matrix accordingly + replace_matrix_commands = _generate_replace_matrix_commands( + bc_id, study_version, value, output.operator, command_context + ) + commands.extend(replace_matrix_commands) + + if value.operator and study_version >= STUDY_VERSION_8_7: + # The user changed the operator, we have to rename matrices accordingly + existing_operator = BindingConstraintOperator(current_value["operator"]) + update_matrices_names(file_study, bc_id, existing_operator, value.operator) + + # Updates the file only once with all the information + command = UpdateConfig( + target="input/bindingconstraints/bindingconstraints", + data=config, + command_context=command_context, + ) + commands.append(command) + execute_or_add_commands(study, file_study, commands, self.storage_service) return updated_constraints def remove_binding_constraint(self, study: Study, binding_constraint_id: str) -> None: diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index a57f51a666..e84564de57 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -499,6 +499,9 @@ def remove_bc_from_scenario_builder(study_data: FileStudy, removed_groups: t.Set NOTE: this update can be very long if the scenario builder configuration is large. """ + if not removed_groups: + return + rulesets = study_data.tree.get(["settings", "scenariobuilder"]) for ruleset in rulesets.values(): diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py index b5a847f4a1..bc582d0036 100644 --- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py @@ -33,7 +33,7 @@ from antarest.study.storage.variantstudy.model.model import CommandDTO -def _update_matrices_names( +def update_matrices_names( file_study: FileStudy, bc_id: str, existing_operator: BindingConstraintOperator, @@ -170,7 +170,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: if self.operator and study_version >= STUDY_VERSION_8_7: existing_operator = BindingConstraintOperator(actual_cfg.get("operator")) new_operator = self.operator - _update_matrices_names(study_data, self.id, existing_operator, new_operator) + update_matrices_names(study_data, self.id, existing_operator, new_operator) self._apply_config(study_data.config) diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index 42b7a9cc57..c5e8d17b5b 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -11,6 +11,7 @@ # This file is part of the Antares project. import re +import time import numpy as np import pandas as pd @@ -23,7 +24,6 @@ MATRIX_SIZES = {"hourly": 8784, "daily": 366, "weekly": 366} - REQUIRED_MATRICES = { "less": {"lt"}, "equal": {"eq"}, @@ -97,9 +97,85 @@ class TestBindingConstraints: Test the end points related to binding constraints. """ + def test_update_multiple_binding_constraints(self, client: TestClient, user_access_token: str) -> None: + client.headers = {"Authorization": f"Bearer {user_access_token}"} + preparer = PreparerProxy(client, user_access_token) + study_id = preparer.create_study("foo", version=880) + body = {} + # Creates 50 BCs + for k in range(50): + bc_id = f"bc_{k}" + client.post( + f"/v1/studies/{study_id}/commands", + json=[{"action": "create_binding_constraint", "args": {"name": bc_id}}], + ) + body[bc_id] = {"filterSynthesis": "hourly"} + # Modify all of them with the table-mode endpoints + start = time.time() + res = client.put(f"/v1/studies/{study_id}/table-mode/binding-constraints", json=body) + assert res.status_code in {200, 201} + end = time.time() + duration = end - start + # due to new code this should be extremely fast. + assert duration < 0.2 + # asserts the changes are effective. + res = client.get(f"/v1/studies/{study_id}/bindingconstraints") + assert res.status_code == 200 + for bc in res.json(): + assert bc["filterSynthesis"] == "hourly" + # create a variant from the study + study_id = preparer.create_variant(study_id, name="var_1") + # Update 10 BCs + body = {} + for k in range(10): + body[f"bc_{k}"] = {"enabled": False} + res = client.put(f"/v1/studies/{study_id}/table-mode/binding-constraints", json=body) + assert res.status_code in {200, 201} + # asserts changes are effective + res = client.get(f"/v1/studies/{study_id}/bindingconstraints") + assert res.status_code == 200 + for bc in res.json(): + bc_id = bc["id"] + if int(bc_id[3:]) < 10: + assert not bc["enabled"] + else: + assert bc["enabled"] + # asserts commands used are update_binding_constraint + res = client.get(f"/v1/studies/{study_id}/commands") + assert res.status_code == 200 + json_result = res.json() + assert len(json_result) == 10 + for cmd in json_result: + assert cmd["action"] == "update_binding_constraint" + # create another variant from the parent study + study_id = preparer.create_variant(study_id, name="var_1") + # update 50 BCs + body = {} + for k in range(49): + body[f"bc_{k}"] = {"comments": "New comment !"} + body["bc_49"] = {"time_step": "daily"} + res = client.put(f"/v1/studies/{study_id}/table-mode/binding-constraints", json=body) + assert res.status_code in {200, 201} + # asserts changes are effective + res = client.get(f"/v1/studies/{study_id}/bindingconstraints") + assert res.status_code == 200 + for bc in res.json(): + bc_id = bc["id"] + if int(bc_id[3:]) < 49: + assert bc["comments"] == "New comment !" + else: + assert bc["timeStep"] == "daily" + # asserts commands used are update_config and replace_matrix + res = client.get(f"/v1/studies/{study_id}/commands") + assert res.status_code == 200 + json_result = res.json() + assert len(json_result) == 2 + assert json_result[0]["action"] == "replace_matrix" + assert json_result[1]["action"] == "update_config" + @pytest.mark.parametrize("study_type", ["raw", "variant"]) def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, study_type: str) -> None: - client.headers = {"Authorization": f"Bearer {user_access_token}"} # type: ignore + client.headers = {"Authorization": f"Bearer {user_access_token}"} # ============================= # STUDY PREPARATION diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index 746790ef3d..c62cdc8e27 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -40,7 +40,7 @@ from antarest.study.storage.variantstudy.model.command.remove_link import RemoveLink from antarest.study.storage.variantstudy.model.command.update_binding_constraint import ( UpdateBindingConstraint, - _update_matrices_names, + update_matrices_names, ) from antarest.study.storage.variantstudy.model.command.update_scenario_builder import UpdateScenarioBuilder from antarest.study.storage.variantstudy.model.command_context import CommandContext @@ -599,7 +599,7 @@ def test__update_matrices_names( assert not link_path.exists(), f"Superfluous matrix link: {matrix_link!r}" # update matrices names - _update_matrices_names( + update_matrices_names( file_study=empty_study, bc_id="bd_rename_matrices", existing_operator=existing_operator,