diff --git a/.github/workflows/compatibility.yml b/.github/workflows/compatibility.yml
index 58c44c5cc4..9131425f79 100644
--- a/.github/workflows/compatibility.yml
+++ b/.github/workflows/compatibility.yml
@@ -14,10 +14,8 @@ jobs:
python-version: [ 3.8 ]
steps:
- - name: Checkout github repo (+ download lfs dependencies)
+ - name: Checkout github repo
uses: actions/checkout@v2
- with:
- submodules: recursive
- name: Set up Python
uses: actions/setup-python@v1
with:
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 4f920c083e..0d0687ea8a 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -17,7 +17,6 @@ jobs:
- name: Checkout github repo (+ download lfs dependencies)
uses: actions/checkout@v3
with:
- submodules: recursive
fetch-depth: 0
- name: Install wget for windows
if: matrix.os == 'windows-latest'
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index e5061506b1..d0f8c4c7ac 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -10,8 +10,6 @@ jobs:
steps:
- name: Checkout github repo (+ download lfs dependencies)
uses: actions/checkout@v2
- with:
- submodules: recursive
- name: Set up Python
uses: actions/setup-python@v1
with:
@@ -41,8 +39,6 @@ jobs:
steps:
- name: Checkout github repo (+ download lfs dependencies)
uses: actions/checkout@v2
- with:
- submodules: recursive
- name: Set up Python
uses: actions/setup-python@v1
with:
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 50629e9bb7..0000000000
--- a/.gitmodules
+++ /dev/null
@@ -1,3 +0,0 @@
-[submodule "antares-launcher"]
- path = antares-launcher
- url = https://github.com/AntaresSimulatorTeam/antares-launcher.git
diff --git a/Dockerfile b/Dockerfile
index f99b8d7559..631ca6c336 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -13,16 +13,6 @@ COPY ./scripts /scripts
COPY ./alembic /alembic
COPY ./alembic.ini /alembic.ini
-# > IMPORTANT: The `antares-launcher` project (source files) is no longer needed,
-# > because the `Antares-Launcher` Python library is now declared as a dependency
-# > in the `requirements.txt` file.
-# > In other words, we can dispense with the creation of the symbolic link.
-
-# COPY ./antares-launcher /antares-launcher
-# RUN ln -s /antares-launcher/antareslauncher /antareslauncher
-# RUN mkdir /conf/antares-launcher
-# RUN cp /antares-launcher/requirements.txt /conf/antares-launcher/requirements.txt
-
RUN ./scripts/install-debug.sh
RUN pip3 install --upgrade pip \
diff --git a/README.md b/README.md
index a9a3db1298..98748de7a6 100644
--- a/README.md
+++ b/README.md
@@ -17,8 +17,6 @@ First clone the projet:
```shell script
git clone https://github.com/AntaresSimulatorTeam/AntaREST.git
cd AntaREST
-git submodule init
-git submodule update
```
Install back dependencies
diff --git a/antares-launcher b/antares-launcher
deleted file mode 160000
index ba92020341..0000000000
--- a/antares-launcher
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit ba92020341f85c526f067aa0b6658b1ec4cef893
diff --git a/antarest/__init__.py b/antarest/__init__.py
index 4533cd61d1..3731beda17 100644
--- a/antarest/__init__.py
+++ b/antarest/__init__.py
@@ -7,9 +7,9 @@
# Standard project metadata
-__version__ = "2.13.2"
+__version__ = "2.14.0"
__author__ = "RTE, Antares Web Team"
-__date__ = "2023-04-25"
+__date__ = "2023-05-12"
# noinspection SpellCheckingInspection
__credits__ = "(c) Réseau de Transport de l’Électricité (RTE)"
diff --git a/antarest/core/core_blueprint.py b/antarest/core/core_blueprint.py
index 40a2e4dba4..73fd56fce9 100644
--- a/antarest/core/core_blueprint.py
+++ b/antarest/core/core_blueprint.py
@@ -5,7 +5,11 @@
from antarest.core.jwt import JWTUser
from antarest.core.requests import UserHasNotPermissionError
from antarest.core.utils.web import APITag
-from antarest.core.version_info import VersionInfoDTO, get_commit_id
+from antarest.core.version_info import (
+ VersionInfoDTO,
+ get_commit_id,
+ get_dependencies,
+)
from antarest.login.auth import Auth
from fastapi import APIRouter, Depends
from pydantic import BaseModel
@@ -39,18 +43,18 @@ def version_info() -> Any:
"""
Returns the current version of the application, along with relevant dependency information.
+ - `name`: The name of the application.
- `version`: The current version of the application.
- `gitcommit`: The commit ID of the current version's Git repository.
- `dependencies`: A dictionary of dependencies, where the key is
the dependency name and the value is its version number.
"""
- from antareslauncher import __version__ as antares_launcher_version
from antarest import __version__ as antarest_version
return VersionInfoDTO(
version=antarest_version,
gitcommit=get_commit_id(config.resources_path),
- dependencies={"Antares_Launcher": antares_launcher_version},
+ dependencies=get_dependencies(),
)
@bp.get("/kill", include_in_schema=False)
diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py
index 5a0476751f..e57856d243 100644
--- a/antarest/core/exceptions.py
+++ b/antarest/core/exceptions.py
@@ -1,7 +1,7 @@
from http import HTTPStatus
from typing import Optional
-from fastapi import HTTPException
+from fastapi.exceptions import HTTPException
class ShouldNotHappenException(Exception):
@@ -177,14 +177,26 @@ class StudyOutputNotFoundError(Exception):
pass
+class AllocationDataNotFound(HTTPException):
+ def __init__(self, *area_ids: str) -> None:
+ count = len(area_ids)
+ ids = ", ".join(f"'{a}'" for a in area_ids)
+ msg = {
+ 0: "Allocation data is found",
+ 1: f"Allocation data for area {area_ids} is not found",
+ 2: f"Allocation data for areas {area_ids} is not found",
+ }[min(count, 2)]
+ super().__init__(HTTPStatus.NOT_FOUND, msg)
+
+
class AreaNotFound(HTTPException):
def __init__(self, *area_ids: str) -> None:
count = len(area_ids)
ids = ", ".join(f"'{a}'" for a in area_ids)
msg = {
0: "All areas are found",
- 1: f"{count} area is not found: {ids}",
- 2: f"{count} areas are not found: {ids}",
+ 1: f"Area is not found: {ids}",
+ 2: f"Areas are not found: {ids}",
}[min(count, 2)]
super().__init__(HTTPStatus.NOT_FOUND, msg)
diff --git a/antarest/core/logging/utils.py b/antarest/core/logging/utils.py
index 56a6e00fc3..b1e295f072 100644
--- a/antarest/core/logging/utils.py
+++ b/antarest/core/logging/utils.py
@@ -22,7 +22,27 @@
class CustomDefaultFormatter(logging.Formatter):
+ """
+ A custom logging formatter that ensures all fields specified
+ in the format string are available in the log record.
+
+ This formatter uses a regular expression pattern to extract
+ field names from the format string, and adds any missing
+ fields to the log record with a value of `None`.
+ """
+
def format(self, record: logging.LogRecord) -> str:
+ """
+ Formats the specified log record using the custom formatter,
+ ensuring all fields specified in the format string are available
+ in the record. Returns the formatted string.
+
+ Args:
+ record: The logging record to format.
+
+ Returns:
+ The formatted message.
+ """
arg_pattern = re.compile(r"%\((\w+)\)")
arg_names = [x.group(1) for x in arg_pattern.finditer(self._fmt or "")]
for field in arg_names:
@@ -31,7 +51,17 @@ def format(self, record: logging.LogRecord) -> str:
return super().format(record)
-def configure_logger(config: Config) -> None:
+def configure_logger(
+ config: Config, handler_cls: str = "logging.FileHandler"
+) -> None:
+ """
+ Set up the logging configuration based on the input `config` object
+ and an optional `handler_cls` argument.
+
+ Args:
+ config: A `Config` object that contains the logging configuration parameters.
+ handler_cls: A string representing the class of the logging handler.
+ """
logging_config: Dict[str, Any] = {
"version": 1,
"disable_existing_loggers": False,
@@ -91,13 +121,32 @@ def configure_logger(config: Config) -> None:
},
}
if config.logging.logfile is not None:
- logging_config["handlers"]["default"] = {
- "class": "logging.FileHandler",
- "formatter": "console",
- "level": "INFO",
- "filename": config.logging.logfile,
- "filters": ["context"],
- }
+ if handler_cls == "logging.FileHandler":
+ logging_config["handlers"]["default"] = {
+ "class": handler_cls,
+ "formatter": "console",
+ "level": "INFO",
+ "filename": config.logging.logfile,
+ "filters": ["context"],
+ }
+ elif handler_cls == "logging.handlers.TimedRotatingFileHandler":
+ logging_config["handlers"]["default"] = {
+ "class": handler_cls,
+ "filename": config.logging.logfile,
+ "when": "D", # D = day
+ "interval": 90, # 90 days = 3 months
+ "backupCount": 1, # keep only 1 backup (0 means keep all)
+ "encoding": "utf-8",
+ "delay": False,
+ "utc": False,
+ "atTime": None,
+ "formatter": "console",
+ "level": "INFO",
+ "filters": ["context"],
+ }
+ else: # pragma: no cover
+ raise NotImplementedError(handler_cls)
+
if config.logging.level is not None and config.logging.level in [
"INFO",
"WARNING",
diff --git a/antarest/core/version_info.py b/antarest/core/version_info.py
index 88572704a3..f6532b9cdb 100644
--- a/antarest/core/version_info.py
+++ b/antarest/core/version_info.py
@@ -3,7 +3,7 @@
"""
import subprocess
from pathlib import Path
-from typing import Dict, Optional
+from typing import Dict
from pydantic import BaseModel
@@ -14,6 +14,22 @@ class VersionInfoDTO(BaseModel):
gitcommit: str
dependencies: Dict[str, str]
+ class Config:
+ schema_extra = {
+ "example": {
+ "name": "AntaREST",
+ "version": "2.13.2",
+ "gitcommit": "879d9d641fc2e7e30e626084b431ce014de63532",
+ "dependencies": {
+ "click": "8.0.4",
+ "Deprecated": "1.2.13",
+ "fastapi": "0.73.0",
+ "Flask": "2.1.3",
+ "gunicorn": "20.1.0",
+ },
+ }
+ }
+
def get_commit_id(resources_dir: Path) -> str:
"""
@@ -36,10 +52,42 @@ def get_commit_id(resources_dir: Path) -> str:
try:
return path_commit_id.read_text(encoding="utf-8").strip()
except FileNotFoundError:
- command = "git log -1 HEAD --format=%H"
- try:
- return subprocess.check_output(
- command, encoding="utf-8", shell=True
- ).strip()
- except (subprocess.CalledProcessError, FileNotFoundError):
- return ""
+ return get_last_commit_from_git()
+
+
+def get_last_commit_from_git() -> str:
+ """Returns the commit ID of the current Git HEAD, or ""."""
+ command = "git log -1 HEAD --format=%H"
+ try:
+ return subprocess.check_output(
+ command, encoding="utf-8", shell=True
+ ).strip()
+ except (subprocess.CalledProcessError, FileNotFoundError):
+ return ""
+
+
+def get_dependencies() -> Dict[str, str]:
+ """
+ Retrieve the list of installed dependencies and their versions.
+
+ Returns:
+ A dictionary containing the package names and their corresponding versions installed in the
+ current Python environment. The dictionary keys are the package names (as strings), and the
+ values are the corresponding version numbers (also as strings).
+
+ Raises:
+ subprocess.CalledProcessError:
+ If the `pip freeze` command fails for some reason.
+ """
+ # fmt: off
+ output = subprocess.check_output("pip freeze", encoding="utf-8", shell=True)
+ lines = (
+ line
+ for line in output.splitlines(keepends=False)
+ if "==" in line
+ )
+ # noinspection PyTypeChecker
+ packages = dict(line.split("==", 1) for line in lines)
+ # AntaREST is not a dependency of AntaREST
+ return {k: v for k, v in packages.items() if k.lower() != "antarest"}
+ # fmt: on
diff --git a/antarest/main.py b/antarest/main.py
index 10d452c59a..1383f6a143 100644
--- a/antarest/main.py
+++ b/antarest/main.py
@@ -8,6 +8,8 @@
import uvicorn # type: ignore
import uvicorn.config # type: ignore
from fastapi import FastAPI, HTTPException
+from fastapi.encoders import jsonable_encoder
+from fastapi.exceptions import RequestValidationError
from fastapi_jwt_auth import AuthJWT # type: ignore
from ratelimit import RateLimitMiddleware # type: ignore
from ratelimit.backends.redis import RedisBackend # type: ignore
@@ -283,6 +285,22 @@ def handle_http_exception(request: Request, exc: HTTPException) -> Any:
status_code=exc.status_code,
)
+ @application.exception_handler(RequestValidationError)
+ async def handle_validation_exception(
+ request: Request, exc: RequestValidationError
+ ) -> Any:
+ error_message = exc.errors()[0]["msg"]
+ return JSONResponse(
+ status_code=422,
+ content=jsonable_encoder(
+ {
+ "description": error_message,
+ "exception": "RequestValidationError",
+ "body": exc.body,
+ }
+ ),
+ )
+
@application.exception_handler(Exception)
def handle_all_exception(request: Request, exc: Exception) -> Any:
"""Return JSON instead of HTML for HTTP errors."""
diff --git a/antarest/study/business/adequacy_patch_management.py b/antarest/study/business/adequacy_patch_management.py
new file mode 100644
index 0000000000..836d6804fe
--- /dev/null
+++ b/antarest/study/business/adequacy_patch_management.py
@@ -0,0 +1,147 @@
+from enum import Enum
+from typing import Optional, List, Any, Dict
+
+from pydantic.types import StrictBool, confloat
+
+from antarest.study.business.utils import (
+ FormFieldsBaseModel,
+ execute_or_add_commands,
+ FieldInfo,
+ GENERAL_DATA_PATH,
+)
+from antarest.study.model import Study
+from antarest.study.storage.storage_service import StudyStorageService
+from antarest.study.storage.variantstudy.model.command.update_config import (
+ UpdateConfig,
+)
+
+
+class PriceTakingOrder(str, Enum):
+ DENS = "DENS"
+ LOAD = "Load"
+
+
+ThresholdType = confloat(ge=0)
+
+
+class AdequacyPatchFormFields(FormFieldsBaseModel):
+ # version 830
+ enable_adequacy_patch: Optional[StrictBool]
+ ntc_from_physical_areas_out_to_physical_areas_in_adequacy_patch: Optional[
+ StrictBool
+ ]
+ ntc_between_physical_areas_out_adequacy_patch: Optional[StrictBool]
+ # version 850
+ price_taking_order: Optional[PriceTakingOrder]
+ include_hurdle_cost_csr: Optional[StrictBool]
+ check_csr_cost_function: Optional[StrictBool]
+ threshold_initiate_curtailment_sharing_rule: Optional[ThresholdType] # type: ignore
+ threshold_display_local_matching_rule_violations: Optional[ThresholdType] # type: ignore
+ threshold_csr_variable_bounds_relaxation: Optional[ThresholdType] # type: ignore
+
+
+ADEQUACY_PATCH_PATH = f"{GENERAL_DATA_PATH}/adequacy patch"
+
+
+FIELDS_INFO: Dict[str, FieldInfo] = {
+ "enable_adequacy_patch": {
+ "path": f"{ADEQUACY_PATCH_PATH}/include-adq-patch",
+ "default_value": False,
+ "start_version": 830,
+ },
+ "ntc_from_physical_areas_out_to_physical_areas_in_adequacy_patch": {
+ "path": f"{ADEQUACY_PATCH_PATH}/set-to-null-ntc-from-physical-out-to-physical-in-for-first-step",
+ "default_value": True,
+ "start_version": 830,
+ },
+ "ntc_between_physical_areas_out_adequacy_patch": {
+ "path": f"{ADEQUACY_PATCH_PATH}/set-to-null-ntc-between-physical-out-for-first-step",
+ "default_value": True,
+ "start_version": 830,
+ },
+ "price_taking_order": {
+ "path": f"{ADEQUACY_PATCH_PATH}/price-taking-order",
+ "default_value": PriceTakingOrder.DENS.value,
+ "start_version": 850,
+ },
+ "include_hurdle_cost_csr": {
+ "path": f"{ADEQUACY_PATCH_PATH}/include-hurdle-cost-csr",
+ "default_value": False,
+ "start_version": 850,
+ },
+ "check_csr_cost_function": {
+ "path": f"{ADEQUACY_PATCH_PATH}/check-csr-cost-function",
+ "default_value": False,
+ "start_version": 850,
+ },
+ "threshold_initiate_curtailment_sharing_rule": {
+ "path": f"{ADEQUACY_PATCH_PATH}/threshold-initiate-curtailment-sharing-rule",
+ "default_value": 0.0,
+ "start_version": 850,
+ },
+ "threshold_display_local_matching_rule_violations": {
+ "path": f"{ADEQUACY_PATCH_PATH}/threshold-display-local-matching-rule-violations",
+ "default_value": 0.0,
+ "start_version": 850,
+ },
+ "threshold_csr_variable_bounds_relaxation": {
+ "path": f"{ADEQUACY_PATCH_PATH}/threshold-csr-variable-bounds-relaxation",
+ "default_value": 3,
+ "start_version": 850,
+ },
+}
+
+
+class AdequacyPatchManager:
+ def __init__(self, storage_service: StudyStorageService) -> None:
+ self.storage_service = storage_service
+
+ def get_field_values(self, study: Study) -> AdequacyPatchFormFields:
+ """
+ Get adequacy patch field values for the webapp form
+ """
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+ general_data = file_study.tree.get(GENERAL_DATA_PATH.split("/"))
+ parent = general_data.get("adequacy patch", {})
+
+ def get_value(field_info: FieldInfo) -> Any:
+ path = field_info["path"]
+ start_version = field_info.get("start_version", -1)
+ target_name = path.split("/")[-1]
+ is_in_version = file_study.config.version >= start_version # type: ignore
+
+ return (
+ parent.get(target_name, field_info["default_value"])
+ if is_in_version
+ else None
+ )
+
+ return AdequacyPatchFormFields.construct(
+ **{name: get_value(info) for name, info in FIELDS_INFO.items()}
+ )
+
+ def set_field_values(
+ self, study: Study, field_values: AdequacyPatchFormFields
+ ) -> None:
+ """
+ Set adequacy patch config from the webapp form
+ """
+ commands: List[UpdateConfig] = []
+
+ for field_name, value in field_values.__iter__():
+ if value is not None:
+ info = FIELDS_INFO[field_name]
+
+ commands.append(
+ UpdateConfig(
+ target=info["path"],
+ data=value,
+ command_context=self.storage_service.variant_study_service.command_factory.command_context,
+ )
+ )
+
+ if commands:
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+ execute_or_add_commands(
+ study, file_study, commands, self.storage_service
+ )
diff --git a/antarest/study/business/allocation_management.py b/antarest/study/business/allocation_management.py
new file mode 100644
index 0000000000..e3c0046bff
--- /dev/null
+++ b/antarest/study/business/allocation_management.py
@@ -0,0 +1,284 @@
+from typing import List, Dict
+
+import numpy
+import numpy as np
+from pydantic import root_validator
+from pydantic import validator, conlist
+
+from antarest.core.exceptions import (
+ AllocationDataNotFound,
+ AreaNotFound,
+)
+from antarest.study.business.area_management import AreaInfoDTO
+from antarest.study.business.utils import (
+ FormFieldsBaseModel,
+ execute_or_add_commands,
+)
+from antarest.study.model import Study
+from antarest.study.storage.storage_service import StudyStorageService
+from antarest.study.storage.variantstudy.model.command.update_config import (
+ UpdateConfig,
+)
+
+
+class AllocationField(FormFieldsBaseModel):
+ """Model for consumption coefficients of a given area."""
+
+ area_id: str
+ coefficient: float
+
+
+class AllocationFormFields(FormFieldsBaseModel):
+ """Model for a list of consumption coefficients for each area."""
+
+ allocation: List[AllocationField]
+
+ @root_validator
+ def check_allocation(
+ cls, values: Dict[str, List[AllocationField]]
+ ) -> Dict[str, List[AllocationField]]:
+ allocation = values.get("allocation", [])
+
+ if not allocation:
+ raise ValueError("allocation must not be empty")
+
+ if len(allocation) != len({a.area_id for a in allocation}):
+ raise ValueError("allocation must not contain duplicate area IDs")
+
+ for a in allocation:
+ if a.coefficient < 0:
+ raise ValueError(
+ "allocation must not contain negative coefficients"
+ )
+
+ if numpy.isnan(a.coefficient):
+ raise ValueError(
+ "allocation must not contain NaN coefficients"
+ )
+
+ if sum(a.coefficient for a in allocation) <= 0:
+ raise ValueError("sum of allocation coefficients must be positive")
+
+ return values
+
+
+class AllocationMatrix(FormFieldsBaseModel):
+ """
+ Hydraulic allocation matrix.
+ index: List of all study areas
+ columns: List of selected production areas
+ data: 2D-array matrix of consumption coefficients
+ """
+
+ index: conlist(str, min_items=1) # type: ignore
+ columns: conlist(str, min_items=1) # type: ignore
+ data: List[List[float]] # NonNegativeFloat not necessary
+
+ # noinspection PyMethodParameters
+ @validator("data")
+ def validate_hydro_allocation_matrix(
+ cls, data: List[List[float]], values: Dict[str, List[str]]
+ ) -> List[List[float]]:
+ """
+ Validate the hydraulic allocation matrix.
+ Args:
+ data: the allocation matrix to validate.
+ values: the allocation matrix fields.
+ Raises:
+ ValueError:
+ If the coefficients columns are empty or has no non-null values.
+ Returns:
+ The allocation fields.
+ """
+
+ array = np.array(data)
+ rows = len(values.get("index", []))
+ cols = len(values.get("columns", []))
+
+ if array.size == 0:
+ raise ValueError("allocation matrix must not be empty")
+ if array.shape != (rows, cols):
+ raise ValueError("allocation matrix must have square shape")
+ if np.any(array < 0):
+ raise ValueError(
+ "allocation matrix must not contain negative coefficients"
+ )
+ if np.any(np.isnan(array)):
+ raise ValueError(
+ "allocation matrix must not contain NaN coefficients"
+ )
+ if np.all(array == 0):
+ raise ValueError(
+ "allocation matrix must not contain only null values"
+ )
+
+ return data
+
+
+class AllocationManager:
+ """
+ Manage hydraulic allocation coefficients.
+ """
+
+ def __init__(self, storage_service: StudyStorageService) -> None:
+ self.storage_service = storage_service
+
+ def get_allocation_data(
+ self, study: Study, area_id: str
+ ) -> Dict[str, List[AllocationField]]:
+ """
+ Get hydraulic allocation data.
+
+ Args:
+ study: study to get the allocation data from.
+ area_id: area to get the allocation data from.
+
+ Returns:
+ The allocation data.
+
+ Raises:
+ AllocationDataNotFound: if the allocation data is not found.
+ """
+ # sourcery skip: reintroduce-else, swap-if-else-branches, use-named-expression
+
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+ allocation_data = file_study.tree.get(
+ f"input/hydro/allocation/{area_id}".split("/"), depth=2
+ )
+
+ if not allocation_data:
+ raise AllocationDataNotFound(area_id)
+
+ return allocation_data.get("[allocation]", {})
+
+ def get_allocation_form_fields(
+ self, all_areas: List[AreaInfoDTO], study: Study, area_id: str
+ ) -> AllocationFormFields:
+ """
+ Get hydraulic allocation coefficients.
+
+ Args:
+ all_areas: list of all areas in the study.
+ study: study to get the allocation coefficients from.
+ area_id: area to get the allocation coefficients from.
+
+ Returns:
+ The allocation coefficients.
+
+ Raises:
+ AllocationDataNotFound: if the allocation data is not found.
+ """
+
+ areas_ids = {area.id for area in all_areas}
+ allocations = self.get_allocation_data(study, area_id)
+
+ filtered_allocations = {
+ area: value
+ for area, value in allocations.items()
+ if area in areas_ids
+ }
+
+ return AllocationFormFields.construct(
+ allocation=[
+ AllocationField.construct(area_id=area, coefficient=value)
+ for area, value in filtered_allocations.items()
+ ]
+ )
+
+ def set_allocation_form_fields(
+ self,
+ all_areas: List[AreaInfoDTO],
+ study: Study,
+ area_id: str,
+ data: AllocationFormFields,
+ ) -> AllocationFormFields:
+ """
+ Set hydraulic allocation coefficients.
+
+ Args:
+ all_areas: list of all areas in the study.
+ study: study to set the allocation coefficients to.
+ area_id: area to set the allocation coefficients to.
+ data: allocation coefficients to set.
+
+ Raises:
+ AreaNotFound: if the area is not found.
+ """
+
+ allocation_ids = {field.area_id for field in data.allocation}
+ areas_ids = {area.id for area in all_areas}
+
+ if invalid_ids := allocation_ids - areas_ids:
+ # sort for deterministic error message and testing
+ raise AreaNotFound(*sorted(invalid_ids))
+
+ filtered_allocations = [
+ f
+ for f in data.allocation
+ if f.coefficient > 0 and f.area_id in areas_ids
+ ]
+
+ command_context = (
+ self.storage_service.variant_study_service.command_factory.command_context
+ )
+ command = UpdateConfig(
+ target=f"input/hydro/allocation/{area_id}/[allocation]",
+ data={f.area_id: f.coefficient for f in filtered_allocations},
+ command_context=command_context,
+ )
+
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+
+ execute_or_add_commands(
+ study, file_study, [command], self.storage_service
+ )
+
+ updated_allocations = self.get_allocation_data(study, area_id)
+
+ return AllocationFormFields.construct(
+ allocation=[
+ AllocationField.construct(area_id=area, coefficient=value)
+ for area, value in updated_allocations.items()
+ ]
+ )
+
+ def get_allocation_matrix(
+ self, study: Study, all_areas: List[AreaInfoDTO]
+ ) -> AllocationMatrix:
+ """
+ Get the hydraulic allocation matrix for all areas in the study.
+
+ Args:
+ study: study to get the allocation matrix from.
+ all_areas: list of all areas in the study.
+
+ Returns:
+ The allocation matrix.
+
+ Raises:
+ AllocationDataNotFound: if the allocation data is not found.
+ """
+
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+ allocation_cfg = file_study.tree.get(
+ "input/hydro/allocation/*".split("/"), depth=2
+ )
+
+ if not allocation_cfg:
+ areas_ids = {area.id for area in all_areas}
+ raise AllocationDataNotFound(*areas_ids)
+
+ rows = [area.id for area in all_areas]
+ columns = [area.id for area in all_areas if area.id in allocation_cfg]
+ array = np.zeros((len(rows), len(columns)), dtype=np.float64)
+
+ for prod_area, allocation_dict in allocation_cfg.items():
+ allocations = allocation_dict["[allocation]"]
+ for cons_area, coefficient in allocations.items():
+ row_idx = rows.index(cons_area)
+ col_idx = columns.index(prod_area)
+ array[row_idx][col_idx] = coefficient
+
+ return AllocationMatrix.construct(
+ index=rows, columns=columns, data=array.tolist()
+ )
diff --git a/antarest/study/business/correlation_management.py b/antarest/study/business/correlation_management.py
new file mode 100644
index 0000000000..02f3e8ac8b
--- /dev/null
+++ b/antarest/study/business/correlation_management.py
@@ -0,0 +1,383 @@
+"""
+Management of spatial correlations between the different generators.
+The generators are of the same category and can be hydraulic, wind, load or solar.
+"""
+import collections
+from typing import Dict, List, Sequence
+
+import numpy as np
+import numpy.typing as npt
+from antarest.core.exceptions import AreaNotFound
+from antarest.study.business.area_management import AreaInfoDTO
+from antarest.study.business.utils import (
+ FormFieldsBaseModel,
+ execute_or_add_commands,
+)
+from antarest.study.model import Study
+from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
+from antarest.study.storage.storage_service import StudyStorageService
+from antarest.study.storage.variantstudy.model.command.update_config import (
+ UpdateConfig,
+)
+from pydantic import conlist, validator
+
+
+class AreaCoefficientItem(FormFieldsBaseModel):
+ """
+ Model for correlation coefficients of a given area.
+
+ Attributes:
+ area_id: Area identifier.
+ coefficient: correlation coefficients in percentage (-100 <= coefficient <= 100).
+ """
+
+ class Config:
+ allow_population_by_field_name = True
+
+ area_id: str
+ coefficient: float
+
+
+class CorrelationFormFields(FormFieldsBaseModel):
+ """
+ Model for a list of consumption coefficients for each area.
+
+ Attributes:
+ correlation: A list of non-null correlation coefficients in percentage.
+ """
+
+ correlation: List[AreaCoefficientItem]
+
+ # noinspection PyMethodParameters
+ @validator("correlation")
+ def check_correlation(
+ cls, correlation: List[AreaCoefficientItem]
+ ) -> List[AreaCoefficientItem]:
+ if not correlation:
+ raise ValueError("correlation must not be empty")
+ counter = collections.Counter(field.area_id for field in correlation)
+ if duplicates := {id_ for id_, count in counter.items() if count > 1}:
+ raise ValueError(
+ f"correlation must not contain duplicate area IDs: {duplicates}"
+ )
+ # fmt: off
+ array = np.array([a.coefficient for a in correlation], dtype=np.float64)
+ if np.any((array < -100) | np.any(array > 100)):
+ raise ValueError("percentage must be between -100 and 100")
+ if np.any(np.isnan(array)):
+ raise ValueError("correlation matrix must not contain NaN coefficients")
+ # fmt: on
+ return correlation
+
+
+class CorrelationMatrix(FormFieldsBaseModel):
+ """
+ Correlation matrix for hydraulic, wind, load, or solar generators.
+
+ Attributes:
+ index: A list of all study areas.
+ columns: A list of selected production areas.
+ data: A 2D-array matrix of correlation coefficients.
+ """
+
+ index: conlist(str, min_items=1) # type: ignore
+ columns: conlist(str, min_items=1) # type: ignore
+ data: List[List[float]] # NonNegativeFloat not necessary
+
+ # noinspection PyMethodParameters
+ @validator("data")
+ def validate_correlation_matrix(
+ cls, data: List[List[float]], values: Dict[str, List[str]]
+ ) -> List[List[float]]:
+ """
+ Validates the correlation matrix by checking its shape and range of coefficients.
+
+ Args:
+ cls: The `CorrelationMatrix` class.
+ data: The correlation matrix to validate.
+ values: A dictionary containing the values of `index` and `columns`.
+
+ Returns:
+ List[List[float]]: The validated correlation matrix.
+
+ Raises:
+ ValueError:
+ If the correlation matrix is empty,
+ has an incorrect shape,
+ is squared but not symmetric,
+ or contains coefficients outside the range of -1 to 1
+ or NaN coefficients.
+ """
+
+ array = np.array(data)
+ rows = len(values.get("index", []))
+ cols = len(values.get("columns", []))
+
+ # fmt: off
+ if array.size == 0:
+ raise ValueError("correlation matrix must not be empty")
+ if array.shape != (rows, cols):
+ raise ValueError(f"correlation matrix must have shape ({rows}×{cols})")
+ if np.any((array < -1) | np.any(array > 1)):
+ raise ValueError("coefficients must be between -1 and 1")
+ if np.any(np.isnan(array)):
+ raise ValueError("correlation matrix must not contain NaN coefficients")
+ if (
+ array.shape[0] == array.shape[1]
+ and not np.array_equal(array, array.T)
+ ):
+ raise ValueError("correlation matrix is not symmetric")
+ # fmt: on
+ return data
+
+ class Config:
+ schema_extra = {
+ "example": {
+ "columns": ["north", "east", "south", "west"],
+ "data": [
+ [0.0, 0.0, 0.25, 0.0],
+ [0.0, 0.0, 0.75, 0.12],
+ [0.25, 0.75, 0.0, 0.75],
+ [0.0, 0.12, 0.75, 0.0],
+ ],
+ "index": ["north", "east", "south", "west"],
+ }
+ }
+
+
+def _config_to_array(
+ area_ids: Sequence[str],
+ correlation_cfg: Dict[str, str],
+) -> npt.NDArray[np.float64]:
+ array = np.identity(len(area_ids), dtype=np.float64)
+ for key, value in correlation_cfg.items():
+ a1, a2 = key.split("%")
+ i = area_ids.index(a1)
+ j = area_ids.index(a2)
+ if i == j:
+ # ignored: values from the diagonal are always == 1.0
+ continue
+ coefficient = value
+ array[i][j] = coefficient
+ array[j][i] = coefficient
+ return array
+
+
+def _array_to_config(
+ area_ids: Sequence[str],
+ array: npt.NDArray[np.float64],
+) -> Dict[str, str]:
+ correlation_cfg: Dict[str, str] = {}
+ count = len(area_ids)
+ for i in range(count):
+ # not saved: values from the diagonal are always == 1.0
+ for j in range(i + 1, count):
+ coefficient = array[i][j]
+ if not coefficient:
+ # null values are not saved
+ continue
+ a1 = area_ids[i]
+ a2 = area_ids[j]
+ correlation_cfg[f"{a1}%{a2}"] = coefficient
+ return correlation_cfg
+
+
+class CorrelationManager:
+ """
+ This manager allows you to read and write the hydraulic, wind, load or solar
+ correlation matrices of a raw study or a variant.
+ """
+
+ # Today, only the 'hydro' category is fully supported, but
+ # we could also manage the 'load' 'solar' and 'wind'
+ # categories but the usage is deprecated.
+ url = ["input", "hydro", "prepro", "correlation", "annual"]
+
+ def __init__(self, storage_service: StudyStorageService) -> None:
+ self.storage_service = storage_service
+
+ def _get_array(
+ self,
+ file_study: FileStudy,
+ area_ids: Sequence[str],
+ ) -> npt.NDArray[np.float64]:
+ correlation_cfg = file_study.tree.get(self.url, depth=3)
+ return _config_to_array(area_ids, correlation_cfg)
+
+ def _set_array(
+ self,
+ study: Study,
+ file_study: FileStudy,
+ area_ids: Sequence[str],
+ array: npt.NDArray[np.float64],
+ ) -> None:
+ correlation_cfg = _array_to_config(area_ids, array)
+ command_context = (
+ self.storage_service.variant_study_service.command_factory.command_context
+ )
+ command = UpdateConfig(
+ target="/".join(self.url),
+ data=correlation_cfg,
+ command_context=command_context,
+ )
+ execute_or_add_commands(
+ study, file_study, [command], self.storage_service
+ )
+
+ def get_correlation_form_fields(
+ self, all_areas: List[AreaInfoDTO], study: Study, area_id: str
+ ) -> CorrelationFormFields:
+ """
+ Get the correlation form fields (percentage values) for a given area.
+
+ Args:
+ all_areas: list of all areas in the study.
+ study: study to get the correlation coefficients from.
+ area_id: area to get the correlation coefficients from.
+
+ Returns:
+ The correlation coefficients.
+ """
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+
+ area_ids = [area.id for area in all_areas]
+ array = self._get_array(file_study, area_ids)
+ column = array[:, area_ids.index(area_id)] * 100
+
+ correlation_field = [
+ AreaCoefficientItem.construct(area_id=a, coefficient=c)
+ for a, c in zip(area_ids, column)
+ if a != area_id and c
+ ]
+
+ current_area_coefficient = column[area_ids.index(area_id)]
+ correlation_field.insert(
+ 0,
+ AreaCoefficientItem.construct(
+ area_id=area_id, coefficient=current_area_coefficient
+ ),
+ )
+
+ return CorrelationFormFields.construct(correlation=correlation_field)
+
+ def set_correlation_form_fields(
+ self,
+ all_areas: List[AreaInfoDTO],
+ study: Study,
+ area_id: str,
+ data: CorrelationFormFields,
+ ) -> CorrelationFormFields:
+ """
+ Set the correlation coefficients of a given area from the form fields (percentage values).
+
+ Args:
+ all_areas: list of all areas in the study.
+ study: study to set the correlation coefficients to.
+ area_id: area to set the correlation coefficients to.
+ data: correlation coefficients to set.
+
+ Raises:
+ AreaNotFound: if the area is not found or invalid.
+
+ Returns:
+ The updated correlation coefficients.
+ """
+ area_ids = [area.id for area in all_areas]
+ correlation_values = collections.OrderedDict.fromkeys(area_ids, 0.0)
+ correlation_values.update(
+ {field.area_id: field.coefficient for field in data.correlation}
+ )
+
+ if invalid_ids := set(correlation_values) - set(area_ids):
+ # sort for deterministic error message and testing
+ raise AreaNotFound(*sorted(invalid_ids))
+
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+ array = self._get_array(file_study, area_ids)
+ j = area_ids.index(area_id)
+ for i, coefficient in enumerate(correlation_values.values()):
+ array[i][j] = coefficient / 100
+ array[j][i] = coefficient / 100
+ self._set_array(study, file_study, area_ids, array)
+
+ column = array[:, area_ids.index(area_id)] * 100
+ return CorrelationFormFields.construct(
+ correlation=[
+ AreaCoefficientItem.construct(area_id=a, coefficient=c)
+ for a, c in zip(area_ids, column)
+ if c
+ ]
+ )
+
+ def get_correlation_matrix(
+ self, all_areas: List[AreaInfoDTO], study: Study, columns: List[str]
+ ) -> CorrelationMatrix:
+ """
+ Read the correlation coefficients and get the correlation matrix (values in the range -1 to 1).
+
+ Args:
+ all_areas: list of all areas in the study.
+ study: study to get the correlation matrix from.
+ columns: areas to get the correlation matrix from.
+
+ Returns:
+ The correlation matrix.
+ """
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+ area_ids = [area.id for area in all_areas]
+ columns = (
+ [a for a in area_ids if a in columns] if columns else area_ids
+ )
+ array = self._get_array(file_study, area_ids)
+ # noinspection PyTypeChecker
+ data = [
+ [c for i, c in enumerate(row) if area_ids[i] in columns]
+ for row in array.tolist()
+ ]
+
+ return CorrelationMatrix.construct(
+ index=area_ids, columns=columns, data=data
+ )
+
+ def set_correlation_matrix(
+ self,
+ all_areas: List[AreaInfoDTO],
+ study: Study,
+ matrix: CorrelationMatrix,
+ ) -> CorrelationMatrix:
+ """
+ Set the correlation coefficients from the coefficient matrix (values in the range -1 to 1).
+
+ Args:
+ all_areas: list of all areas in the study.
+ study: study to get the correlation matrix from.
+ matrix: correlation matrix to update
+
+ Returns:
+ The updated correlation matrix.
+ """
+ file_study = self.storage_service.get_storage(study).get_raw(study)
+ area_ids = [area.id for area in all_areas]
+
+ array = self._get_array(file_study, area_ids)
+
+ for row, a1 in zip(matrix.data, matrix.index):
+ for coefficient, a2 in zip(row, matrix.columns):
+ if missing := {a1, a2} - set(area_ids):
+ raise AreaNotFound(*missing)
+ i = area_ids.index(a1)
+ j = area_ids.index(a2)
+ array[i][j] = coefficient
+ array[j][i] = coefficient
+
+ self._set_array(study, file_study, area_ids, array)
+
+ # noinspection PyTypeChecker
+ data = [
+ [c for i, c in enumerate(row) if area_ids[i] in matrix.columns]
+ for row in array.tolist()
+ ]
+
+ return CorrelationMatrix.construct(
+ index=area_ids, columns=matrix.columns, data=data
+ )
diff --git a/antarest/study/business/general_management.py b/antarest/study/business/general_management.py
index a7c2797f82..e0e1544cf3 100644
--- a/antarest/study/business/general_management.py
+++ b/antarest/study/business/general_management.py
@@ -1,7 +1,7 @@
from enum import Enum
-from typing import Optional, Dict, Any, List
+from typing import Optional, Dict, Any, List, cast
-from pydantic import StrictBool, conint, PositiveInt
+from pydantic import StrictBool, conint, PositiveInt, root_validator
from antarest.study.business.utils import (
FormFieldsBaseModel,
@@ -57,7 +57,7 @@ class BuildingMode(str, Enum):
DERATED = "Derated"
-DayNumberType = conint(ge=1, le=365)
+DayNumberType = conint(ge=1, le=366)
class GeneralFormFields(FormFieldsBaseModel):
@@ -82,6 +82,39 @@ class GeneralFormFields(FormFieldsBaseModel):
geographic_trimming: Optional[StrictBool]
thematic_trimming: Optional[StrictBool]
+ @root_validator
+ def day_fields_validation(cls, values: Dict[str, Any]) -> Dict[str, Any]:
+ first_day = values.get("first_day")
+ last_day = values.get("last_day")
+ leap_year = values.get("leap_year")
+ day_fields = [first_day, last_day, leap_year]
+
+ if all(v is None for v in day_fields):
+ # The user wishes to update another field than these three.
+ # no need to validate anything:
+ return values
+
+ if any(v is None for v in day_fields):
+ raise ValueError(
+ "First day, last day and leap year fields must be defined together"
+ )
+
+ first_day = cast(int, first_day)
+ last_day = cast(int, last_day)
+ leap_year = cast(bool, leap_year)
+ num_days_in_year = 366 if leap_year else 365
+
+ if first_day > last_day:
+ raise ValueError(
+ "Last day must be greater than or equal to the first day"
+ )
+ if last_day > num_days_in_year:
+ raise ValueError(
+ f"Last day cannot be greater than {num_days_in_year}"
+ )
+
+ return values
+
GENERAL = "general"
OUTPUT = "output"
@@ -142,7 +175,7 @@ class GeneralFormFields(FormFieldsBaseModel):
"filtering": {
"path": f"{GENERAL_PATH}/filtering",
"default_value": False,
- "end_version": 700,
+ "end_version": 710,
},
"geographic_trimming": {
"path": f"{GENERAL_PATH}/geographic-trimming",
@@ -184,10 +217,10 @@ def get_value(field_name: str, field_info: FieldInfo) -> Any:
path = field_info["path"]
study_ver = file_study.config.version
- start_ver = field_info.get("start_version", -1)
- end_ver = field_info.get("end_version", study_ver)
+ start_ver = cast(int, field_info.get("start_version", -1))
+ end_ver = cast(int, field_info.get("end_version", study_ver + 1))
target_name = path.split("/")[-1]
- is_in_version = start_ver <= study_ver <= end_ver # type: ignore
+ is_in_version = start_ver <= study_ver < end_ver
parent = general if GENERAL_PATH in path else output
return (
@@ -233,7 +266,7 @@ def set_field_values(
)
)
- if len(commands) > 0:
+ if commands:
execute_or_add_commands(
study, file_study, commands, self.storage_service
)
diff --git a/antarest/study/business/optimization_management.py b/antarest/study/business/optimization_management.py
index 6c6daa85bf..3339347081 100644
--- a/antarest/study/business/optimization_management.py
+++ b/antarest/study/business/optimization_management.py
@@ -1,7 +1,7 @@
from enum import Enum
-from typing import Optional, Union, List, Any, Dict
+from typing import Optional, Union, List, Any, Dict, cast
-from pydantic.types import StrictBool, StrictFloat, StrictInt
+from pydantic.types import StrictBool
from antarest.study.business.utils import (
FormFieldsBaseModel,
@@ -16,11 +16,6 @@
)
-class LinkType(str, Enum):
- LOCAL = "local"
- AC = "ac"
-
-
class LegacyTransmissionCapacities(str, Enum):
INFINITE = "infinite"
@@ -45,11 +40,6 @@ class SimplexOptimizationRange(str, Enum):
WEEK = "week"
-class PriceTakingOrder(str, Enum):
- DENS = "DENS"
- LOAD = "Load"
-
-
class OptimizationFormFields(FormFieldsBaseModel):
binding_constraints: Optional[StrictBool]
hurdle_costs: Optional[StrictBool]
@@ -59,7 +49,6 @@ class OptimizationFormFields(FormFieldsBaseModel):
Union[LegacyTransmissionCapacities, TransmissionCapacities],
]
]
- link_type: Optional[LinkType]
thermal_clusters_min_stable_power: Optional[StrictBool]
thermal_clusters_min_ud_time: Optional[StrictBool]
day_ahead_reserve: Optional[StrictBool]
@@ -69,24 +58,9 @@ class OptimizationFormFields(FormFieldsBaseModel):
export_mps: Optional[Union[bool, str]]
unfeasible_problem_behavior: Optional[UnfeasibleProblemBehavior]
simplex_optimization_range: Optional[SimplexOptimizationRange]
- # version 830
- split_exported_mps: Optional[StrictBool]
- enable_adequacy_patch: Optional[StrictBool]
- ntc_from_physical_areas_out_to_physical_areas_in_adequacy_patch: Optional[
- StrictBool
- ]
- ntc_between_physical_areas_out_adequacy_patch: Optional[StrictBool]
- # version 850
- price_taking_order: Optional[PriceTakingOrder]
- include_hurdle_cost_csr: Optional[StrictBool]
- check_csr_cost_function: Optional[StrictBool]
- threshold_initiate_curtailment_sharing_rule: Optional[StrictFloat]
- threshold_display_local_matching_rule_violations: Optional[StrictFloat]
- threshold_csr_variable_bounds_relaxation: Optional[StrictInt]
OPTIMIZATION_PATH = f"{GENERAL_DATA_PATH}/optimization"
-ADEQUACY_PATCH_PATH = f"{GENERAL_DATA_PATH}/adequacy patch"
FIELDS_INFO: Dict[str, FieldInfo] = {
@@ -102,10 +76,6 @@ class OptimizationFormFields(FormFieldsBaseModel):
"path": f"{OPTIMIZATION_PATH}/transmission-capacities",
"default_value": True,
},
- "link_type": {
- "path": f"{OPTIMIZATION_PATH}/link-type",
- "default_value": LinkType.LOCAL,
- },
"thermal_clusters_min_stable_power": {
"path": f"{OPTIMIZATION_PATH}/include-tc-minstablepower",
"default_value": True,
@@ -136,62 +106,11 @@ class OptimizationFormFields(FormFieldsBaseModel):
},
"unfeasible_problem_behavior": {
"path": f"{OPTIMIZATION_PATH}/include-unfeasible-problem-behavior",
- "default_value": UnfeasibleProblemBehavior.ERROR_VERBOSE,
+ "default_value": UnfeasibleProblemBehavior.ERROR_VERBOSE.value,
},
"simplex_optimization_range": {
"path": f"{OPTIMIZATION_PATH}/simplex-range",
- "default_value": SimplexOptimizationRange.WEEK,
- },
- "split_exported_mps": {
- "path": f"{OPTIMIZATION_PATH}/include-split-exported-mps",
- "default_value": False,
- "start_version": 830,
- "end_version": 840,
- },
- "enable_adequacy_patch": {
- "path": f"{ADEQUACY_PATCH_PATH}/include-adq-patch",
- "default_value": False,
- "start_version": 830,
- },
- "ntc_from_physical_areas_out_to_physical_areas_in_adequacy_patch": {
- "path": f"{ADEQUACY_PATCH_PATH}/set-to-null-ntc-from-physical-out-to-physical-in-for-first-step",
- "default_value": True,
- "start_version": 830,
- },
- "ntc_between_physical_areas_out_adequacy_patch": {
- "path": f"{ADEQUACY_PATCH_PATH}/set-to-null-ntc-between-physical-out-for-first-step",
- "default_value": True,
- "start_version": 830,
- },
- "price_taking_order": {
- "path": f"{ADEQUACY_PATCH_PATH}/price-taking-order",
- "default_value": "DENS",
- "start_version": 850,
- },
- "include_hurdle_cost_csr": {
- "path": f"{ADEQUACY_PATCH_PATH}/include-hurdle-cost-csr",
- "default_value": False,
- "start_version": 850,
- },
- "check_csr_cost_function": {
- "path": f"{ADEQUACY_PATCH_PATH}/check-csr-cost-function",
- "default_value": False,
- "start_version": 850,
- },
- "threshold_initiate_curtailment_sharing_rule": {
- "path": f"{ADEQUACY_PATCH_PATH}/threshold-initiate-curtailment-sharing-rule",
- "default_value": 0.0,
- "start_version": 850,
- },
- "threshold_display_local_matching_rule_violations": {
- "path": f"{ADEQUACY_PATCH_PATH}/threshold-display-local-matching-rule-violations",
- "default_value": 0.0,
- "start_version": 850,
- },
- "threshold_csr_variable_bounds_relaxation": {
- "path": f"{ADEQUACY_PATCH_PATH}/threshold-csr-variable-bounds-relaxation",
- "default_value": 3,
- "start_version": 850,
+ "default_value": SimplexOptimizationRange.WEEK.value,
},
}
@@ -202,21 +121,19 @@ def __init__(self, storage_service: StudyStorageService) -> None:
def get_field_values(self, study: Study) -> OptimizationFormFields:
"""
- Get Optimization field values for the webapp form
+ Get optimization field values for the webapp form
"""
file_study = self.storage_service.get_storage(study).get_raw(study)
general_data = file_study.tree.get(GENERAL_DATA_PATH.split("/"))
- optimization = general_data.get("optimization", {})
- adequacy_patch = general_data.get("adequacy patch", {})
+ parent = general_data.get("optimization", {})
def get_value(field_info: FieldInfo) -> Any:
path = field_info["path"]
- start_version = field_info.get("start_version", -1)
+ study_ver = file_study.config.version
+ start_ver = cast(int, field_info.get("start_version", -1))
target_name = path.split("/")[-1]
- is_in_version = file_study.config.version >= start_version # type: ignore
- parent = (
- optimization if OPTIMIZATION_PATH in path else adequacy_patch
- )
+ is_in_version = start_ver <= study_ver
+
return (
parent.get(target_name, field_info["default_value"])
if is_in_version
@@ -231,7 +148,7 @@ def set_field_values(
self, study: Study, field_values: OptimizationFormFields
) -> None:
"""
- Set Optimization config from the webapp form
+ Set optimization config from the webapp form
"""
commands: List[UpdateConfig] = []
diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py
index 1f7bf3a348..41e217e39d 100644
--- a/antarest/study/business/table_mode_management.py
+++ b/antarest/study/business/table_mode_management.py
@@ -379,7 +379,7 @@ class PathVars(TypedDict, total=False):
},
}
-COLUMN_MODELS_BY_TYPE = {
+COLUMNS_MODELS_BY_TYPE = {
TableTemplateType.AREA: AreaColumns,
TableTemplateType.LINK: LinkColumns,
TableTemplateType.CLUSTER: ClusterColumns,
@@ -387,7 +387,7 @@ class PathVars(TypedDict, total=False):
TableTemplateType.BINDING_CONSTRAINT: BindingConstraintColumns,
}
-ColumnModelTypes = Union[
+ColumnsModelTypes = Union[
AreaColumns,
LinkColumns,
ClusterColumns,
@@ -405,9 +405,9 @@ def get_table_data(
study: RawStudy,
table_type: TableTemplateType,
columns: List[str],
- ) -> Dict[str, ColumnModelTypes]:
+ ) -> Dict[str, ColumnsModelTypes]:
file_study = self.storage_service.get_storage(study).get_raw(study)
- column_model = COLUMN_MODELS_BY_TYPE[table_type]
+ columns_model = COLUMNS_MODELS_BY_TYPE[table_type]
fields_info = FIELDS_INFO_BY_TYPE[table_type]
glob_object = TableModeManager.__get_glob_object(
file_study, table_type
@@ -426,7 +426,7 @@ def get_column_value(col: str, data: Dict[str, Any]) -> Any:
if table_type == TableTemplateType.AREA:
return {
- area_id: column_model.construct(
+ area_id: columns_model.construct(
**{col: get_column_value(col, data) for col in columns}
) # type: ignore
for area_id, data in glob_object.items()
@@ -434,7 +434,7 @@ def get_column_value(col: str, data: Dict[str, Any]) -> Any:
if table_type == TableTemplateType.BINDING_CONSTRAINT:
return {
- data["id"]: column_model.construct(
+ data["id"]: columns_model.construct(
**{col: get_column_value(col, data) for col in columns}
) # type: ignore
for data in glob_object.values()
@@ -443,7 +443,7 @@ def get_column_value(col: str, data: Dict[str, Any]) -> Any:
obj: Dict[str, Any] = {}
for id_1, value_1 in glob_object.items():
for id_2, value_2 in value_1.items():
- obj[f"{id_1} / {id_2}"] = column_model.construct(
+ obj[f"{id_1} / {id_2}"] = columns_model.construct(
**{col: get_column_value(col, value_2) for col in columns}
)
@@ -453,7 +453,7 @@ def set_table_data(
self,
study: RawStudy,
table_type: TableTemplateType,
- data: Dict[str, ColumnModelTypes],
+ data: Dict[str, ColumnsModelTypes],
) -> None:
commands: List[ICommand] = []
bindings_by_id = None
diff --git a/antarest/study/business/thematic_trimming_management.py b/antarest/study/business/thematic_trimming_management.py
index 691ef35f01..5a455dbc11 100644
--- a/antarest/study/business/thematic_trimming_management.py
+++ b/antarest/study/business/thematic_trimming_management.py
@@ -1,4 +1,4 @@
-from typing import Optional, Dict, Any, List
+from typing import Optional, Dict, Any, List, cast
from pydantic.types import StrictBool
@@ -235,10 +235,9 @@ def set_field_values(
keys_by_bool: Dict[bool, List[Any]] = {True: [], False: []}
for name, info in FIELDS_INFO.items():
- start_ver = info.get("start_version", 0)
- end_ver = info.get("end_version", study_ver)
+ start_ver = cast(int, info.get("start_version", 0))
- if start_ver <= study_ver <= end_ver: # type: ignore
+ if start_ver <= study_ver:
keys_by_bool[field_values_dict[name]].append(info["path"])
config_data: Dict[str, Any]
diff --git a/antarest/study/service.py b/antarest/study/service.py
index d4bf5144cf..147e6c75a8 100644
--- a/antarest/study/service.py
+++ b/antarest/study/service.py
@@ -53,9 +53,13 @@
MatrixEditInstructionDTO,
)
from antarest.matrixstore.utils import parse_tsv_matrix
+from antarest.study.business.adequacy_patch_management import (
+ AdequacyPatchManager,
+)
from antarest.study.business.advanced_parameters_management import (
AdvancedParamsManager,
)
+from antarest.study.business.allocation_management import AllocationManager
from antarest.study.business.area_management import (
AreaCreationDTO,
AreaInfoDTO,
@@ -303,10 +307,14 @@ def __init__(
self.storage_service
)
self.optimization_manager = OptimizationManager(self.storage_service)
+ self.adequacy_patch_manager = AdequacyPatchManager(
+ self.storage_service
+ )
self.advanced_parameters_manager = AdvancedParamsManager(
self.storage_service
)
self.hydro_manager = HydroManager(self.storage_service)
+ self.allocation_manager = AllocationManager(self.storage_service)
self.renewable_manager = RenewableManager(self.storage_service)
self.thermal_manager = ThermalManager(self.storage_service)
self.ts_config_manager = TimeSeriesConfigManager(self.storage_service)
diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py
index 528185b56f..224c1c4d4f 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py
@@ -1,6 +1,6 @@
import re
from abc import ABC, abstractmethod
-from typing import Tuple, List
+from typing import List, Tuple
import pandas as pd # type: ignore
from pandas import DataFrame
diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py
index f28d0cc66b..00f496fd4e 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py
@@ -1,6 +1,6 @@
import logging
from pathlib import Path
-from typing import List, Optional, Any, Union, cast, Dict
+from typing import Any, Dict, List, Optional, Union, cast
import pandas as pd # type: ignore
from pandas.errors import EmptyDataError # type: ignore
@@ -14,6 +14,7 @@
ContextServer,
)
from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
MatrixNode,
)
@@ -29,8 +30,8 @@ def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
+ freq: MatrixFrequency = MatrixFrequency.HOURLY,
nb_columns: Optional[int] = None,
- freq: str = "hourly",
default_empty: Optional[List[List[float]]] = None,
):
super().__init__(context=context, config=config, freq=freq)
@@ -45,6 +46,7 @@ def parse(
) -> Union[JSON, pd.DataFrame]:
file_path = file_path or self.config.path
try:
+ # sourcery skip: extract-method
stopwatch = StopWatch()
if self.get_link_path().exists():
link = self.get_link_path().read_text()
@@ -79,22 +81,7 @@ def parse(
except EmptyDataError:
logger.warning(f"Empty file found when parsing {file_path}")
default = self._format_default_matrix()
- if return_dataframe:
- return pd.DataFrame(default)
- return default
-
- def _dump_json(self, data: JSON) -> None:
- df = pd.DataFrame(**data)
- if not df.empty:
- df.to_csv(
- self.config.path,
- sep="\t",
- header=False,
- index=False,
- float_format="%.6f",
- )
- else:
- self.config.path.write_bytes(b"")
+ return pd.DataFrame(default) if return_dataframe else default
def check_errors(
self,
@@ -123,8 +110,8 @@ def _format_default_matrix(self) -> Dict[str, Any]:
if column_count > 0:
logger.info("Using preset default matrix")
return {
- "index": list(range(0, index_count)),
- "columns": list(range(0, column_count)),
+ "index": list(range(index_count)),
+ "columns": list(range(column_count)),
"data": self.default_empty,
}
return {}
diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py
index 914d8a4464..77326ac3ad 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py
@@ -1,7 +1,8 @@
import logging
from abc import ABC, abstractmethod
+from enum import Enum
from pathlib import Path
-from typing import List, Optional, Union, Any
+from typing import Any, List, Optional, Union
import pandas as pd # type: ignore
@@ -15,19 +16,31 @@
from antarest.study.storage.rawstudy.model.filesystem.exceptions import (
DenormalizationException,
)
-from antarest.study.storage.rawstudy.model.filesystem.lazy_node import (
- LazyNode,
-)
+from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode
logger = logging.getLogger(__name__)
+class MatrixFrequency(str, Enum):
+ """
+ An enumeration of matrix frequencies.
+
+ Each frequency corresponds to a specific time interval for a matrix's data.
+ """
+
+ ANNUAL = "annual"
+ MONTHLY = "monthly"
+ WEEKLY = "weekly"
+ DAILY = "daily"
+ HOURLY = "hourly"
+
+
class MatrixNode(LazyNode[Union[bytes, JSON], Union[bytes, JSON], JSON], ABC):
def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
- freq: str,
+ freq: MatrixFrequency,
) -> None:
LazyNode.__init__(self, context, config)
self.freq = freq
@@ -99,26 +112,35 @@ def parse(
"""
raise NotImplementedError()
- @abstractmethod
- def _dump_json(self, data: JSON) -> None:
+ def dump(
+ self,
+ data: Union[bytes, JSON],
+ url: Optional[List[str]] = None,
+ ) -> None:
"""
- Store data on tree.
-
- Args:
- data: new data to save
- url: data path to change
+ Write matrix data to file.
- Returns:
+ If the input data is of type bytes, write the data to file as is.
+ Otherwise, convert the data to a Pandas DataFrame and write it to file as a tab-separated CSV.
+ If the resulting DataFrame is empty, write an empty bytes object to file.
+ Args:
+ data: The data to write to file. If data is bytes, it will be written directly to file,
+ otherwise it will be converted to a Pandas DataFrame and then written to file.
+ url: node URL (not used here).
"""
-
- raise NotImplementedError()
-
- def dump(
- self, data: Union[bytes, JSON], url: Optional[List[str]] = None
- ) -> None:
+ self.config.path.parent.mkdir(exist_ok=True, parents=True)
if isinstance(data, bytes):
- self.config.path.parent.mkdir(exist_ok=True, parents=True)
self.config.path.write_bytes(data)
else:
- self._dump_json(data)
+ df = pd.DataFrame(**data)
+ if df.empty:
+ self.config.path.write_bytes(b"")
+ else:
+ df.to_csv(
+ self.config.path,
+ sep="\t",
+ header=False,
+ index=False,
+ float_format="%.6f",
+ )
diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py
index c9f420ef88..0c0750b702 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py
@@ -1,6 +1,6 @@
import logging
from pathlib import Path
-from typing import List, Optional, cast, Union, Any
+from typing import Any, List, Optional, Union, cast
import pandas as pd # type: ignore
from pandas import DataFrame
@@ -17,14 +17,17 @@
)
from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode
from antarest.study.storage.rawstudy.model.filesystem.matrix.date_serializer import (
- IDateMatrixSerializer,
FactoryDateSerializer,
+ IDateMatrixSerializer,
rename_unnamed,
)
from antarest.study.storage.rawstudy.model.filesystem.matrix.head_writer import (
+ AreaHeadWriter,
HeadWriter,
LinkHeadWriter,
- AreaHeadWriter,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
)
logger = logging.getLogger(__name__)
@@ -42,9 +45,9 @@ def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
+ freq: MatrixFrequency,
date_serializer: IDateMatrixSerializer,
head_writer: HeadWriter,
- freq: str,
):
super().__init__(context=context, config=config)
self.date_serializer = date_serializer
@@ -152,12 +155,12 @@ def load(
return b""
if not file_path.exists():
- raise KeyError
+ raise FileNotFoundError(file_path)
return self.parse(file_path, tmp_dir)
- except KeyError:
+ except FileNotFoundError as e:
raise ChildNotFoundError(
f"Output file {self.config.path.name} not found in study {self.config.study_id}"
- )
+ ) from e
def dump(
self, data: Union[bytes, JSON], url: Optional[List[str]] = None
@@ -180,16 +183,16 @@ def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
- freq: str,
+ freq: MatrixFrequency,
src: str,
dest: str,
):
super(LinkOutputSeriesMatrix, self).__init__(
context=context,
config=config,
+ freq=freq,
date_serializer=FactoryDateSerializer.create(freq, src),
head_writer=LinkHeadWriter(src, dest, freq),
- freq=freq,
)
@@ -198,15 +201,15 @@ def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
- freq: str,
+ freq: MatrixFrequency,
area: str,
):
super(AreaOutputSeriesMatrix, self).__init__(
context,
config=config,
+ freq=freq,
date_serializer=FactoryDateSerializer.create(freq, area),
head_writer=AreaHeadWriter(area, config.path.name[:2], freq),
- freq=freq,
)
@@ -215,12 +218,12 @@ def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
- freq: str,
+ freq: MatrixFrequency,
):
super(BindingConstraintOutputSeriesMatrix, self).__init__(
context,
config=config,
+ freq=freq,
date_serializer=FactoryDateSerializer.create(freq, "system"),
head_writer=AreaHeadWriter("system", config.path.name[:2], freq),
- freq=freq,
)
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/common/capacity/capacity.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/common/capacity/capacity.py
index d5e872192a..b671c20d38 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/common/capacity/capacity.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/common/capacity/capacity.py
@@ -1,6 +1,5 @@
-from antarest.study.storage.rawstudy.model.filesystem.config.model import (
- FileStudyTreeConfig,
-)
+from typing import List, TypedDict
+
from antarest.study.storage.rawstudy.model.filesystem.folder_node import (
FolderNode,
)
@@ -8,23 +7,57 @@
from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import (
InputSeriesMatrix,
)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
+
+
+class MatrixInfo(TypedDict, total=False):
+ name: str
+ freq: MatrixFrequency
+ start_version: int
+
+
+# noinspection SpellCheckingInspection
+MATRICES_INFO: List[MatrixInfo] = [
+ {
+ "name": "maxpower",
+ "freq": MatrixFrequency.HOURLY,
+ "start_version": 0,
+ },
+ {
+ "name": "reservoir",
+ "freq": MatrixFrequency.DAILY,
+ "start_version": 0,
+ },
+ {
+ "name": "inflowPattern",
+ "freq": MatrixFrequency.HOURLY,
+ "start_version": 650,
+ },
+ {
+ "name": "creditmodulations",
+ "freq": MatrixFrequency.HOURLY,
+ "start_version": 650,
+ },
+ {
+ "name": "waterValues",
+ "freq": MatrixFrequency.DAILY,
+ "start_version": 650,
+ },
+]
class InputHydroCommonCapacity(FolderNode):
def build(self) -> TREE:
- children: TREE = dict()
- for area in self.config.area_names():
- config_filenames = [
- "maxpower",
- "reservoir",
- ]
- if self.config.version >= 650:
- config_filenames.append("inflowPattern")
- config_filenames.append("creditmodulations")
- config_filenames.append("waterValues")
- for file in config_filenames:
- name = f"{file}_{area}"
- children[name] = InputSeriesMatrix(
- self.context, self.config.next_file(f"{name}.txt")
- )
+ children: TREE = {}
+ for info in MATRICES_INFO:
+ if self.config.version >= info["start_version"]:
+ for area in self.config.area_names():
+ name = f"{info['name']}_{area}"
+ children[name] = InputSeriesMatrix(
+ self.context,
+ self.config.next_file(f"{name}.txt"),
+ freq=info["freq"],
+ )
return children
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/series/area/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/series/area/area.py
index 1dad2ba5c3..00710cff1b 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/series/area/area.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/series/area/area.py
@@ -1,35 +1,44 @@
-from antarest.study.storage.rawstudy.model.filesystem.config.model import (
- FileStudyTreeConfig,
-)
from antarest.study.storage.rawstudy.model.filesystem.folder_node import (
FolderNode,
)
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import (
- default_scenario_hourly,
default_scenario_daily,
+ default_scenario_hourly,
default_scenario_monthly,
)
from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import (
InputSeriesMatrix,
)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
class InputHydroSeriesArea(FolderNode):
def build(self) -> TREE:
- children: TREE = {
- # TODO mod is monthly on version < 650, then daily afterward
+ freq = (
+ MatrixFrequency.DAILY
+ if self.config.version >= 650
+ else MatrixFrequency.MONTHLY
+ )
+ default_empty = (
+ default_scenario_daily
+ if self.config.version >= 650
+ else default_scenario_monthly
+ )
+ return {
"mod": InputSeriesMatrix(
self.context,
self.config.next_file("mod.txt"),
- default_empty=default_scenario_daily
- if self.config.version >= 650
- else default_scenario_monthly,
+ freq=freq,
+ default_empty=default_empty,
),
+ # Run of River
"ror": InputSeriesMatrix(
self.context,
self.config.next_file("ror.txt"),
+ freq=MatrixFrequency.HOURLY,
default_empty=default_scenario_hourly,
),
}
- return children
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py
index 65dd649249..149ebf38c9 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py
@@ -1,8 +1,5 @@
-from typing import cast
-
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
FileStudyTreeConfig,
- ENR_MODELLING,
)
from antarest.study.storage.rawstudy.model.filesystem.context import (
ContextServer,
@@ -11,6 +8,9 @@
FolderNode,
)
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
AreaOutputSeriesMatrix,
)
@@ -29,13 +29,13 @@ def __init__(
self.mc_all = mc_all
def build(self) -> TREE:
- children: TREE = dict()
+ children: TREE = {}
# filters = self.config.get_filters_synthesis(self.area)
# todo get the config related to this output (now this may fail if input has changed since the launch)
- filters = ["hourly", "daily", "weekly", "monthly", "annual"]
- for freq in filters:
+ freq: MatrixFrequency
+ for freq in MatrixFrequency:
if self.mc_all:
children[f"id-{freq}"] = AreaOutputSeriesMatrix(
self.context,
@@ -63,7 +63,8 @@ def build(self) -> TREE:
self.area,
)
- # has_enr_clusters = self.config.enr_modelling == ENR_MODELLING.CLUSTERS.value and len(self.config.get_renewable_names(self.area, only_enabled=True)) > 0
+ # has_enr_clusters = self.config.enr_modelling == ENR_MODELLING.CLUSTERS.value and
+ # len(self.config.get_renewable_names(self.area, only_enabled=True)) > 0
# todo get the config related to this output (now this may fail if input has changed since the launch)
has_enr_clusters = True
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/binding_const.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/binding_const.py
index 6eeec6ea5e..5bf1498e80 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/binding_const.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/binding_const.py
@@ -1,32 +1,29 @@
-from typing import cast
-
from antarest.study.storage.rawstudy.model.filesystem.folder_node import (
FolderNode,
)
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
- LinkOutputSeriesMatrix,
BindingConstraintOutputSeriesMatrix,
)
class OutputSimulationBindingConstraintItem(FolderNode):
def build(self) -> TREE:
- children: TREE = {}
-
# filters = self.config.get_filters_synthesis(self.area, self.link)
# todo get the config related to this output (now this may fail if input has changed since the launch)
- filters = ["hourly", "daily", "weekly", "monthly", "annual"]
- for timing in filters:
- children[
- f"binding-constraints-{timing}"
- ] = BindingConstraintOutputSeriesMatrix(
+ freq: MatrixFrequency
+ children: TREE = {
+ f"binding-constraints-{freq}": BindingConstraintOutputSeriesMatrix(
self.context,
- self.config.next_file(f"binding-constraints-{timing}.txt"),
- timing,
+ self.config.next_file(f"binding-constraints-{freq}.txt"),
+ freq,
)
-
+ for freq in MatrixFrequency
+ }
return {
child: children[child]
for child in children
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/link.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/link.py
index 64b691163b..55c50d079f 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/link.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/link.py
@@ -1,5 +1,3 @@
-from typing import cast
-
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
FileStudyTreeConfig,
)
@@ -10,6 +8,9 @@
FolderNode,
)
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
LinkOutputSeriesMatrix,
)
@@ -34,21 +35,21 @@ def build(self) -> TREE:
# filters = self.config.get_filters_synthesis(self.area, self.link)
# todo get the config related to this output (now this may fail if input has changed since the launch)
- filters = ["hourly", "daily", "weekly", "monthly", "annual"]
- for timing in filters:
- children[f"values-{timing}"] = LinkOutputSeriesMatrix(
+ freq: MatrixFrequency
+ for freq in MatrixFrequency:
+ children[f"values-{freq}"] = LinkOutputSeriesMatrix(
self.context,
- self.config.next_file(f"values-{timing}.txt"),
- timing,
+ self.config.next_file(f"values-{freq}.txt"),
+ freq,
self.area,
self.link,
)
if self.mc_all:
- children[f"id-{timing}"] = LinkOutputSeriesMatrix(
+ children[f"id-{freq}"] = LinkOutputSeriesMatrix(
self.context,
- self.config.next_file(f"id-{timing}.txt"),
- timing,
+ self.config.next_file(f"id-{freq}.txt"),
+ freq,
self.area,
self.link,
)
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/links.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/links.py
index ddf11fde9c..3f459b4d1a 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/links.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/links.py
@@ -8,7 +8,6 @@
FolderNode,
)
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
-
from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common.link import (
OutputSimulationLinkItem,
)
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/set.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/set.py
index 7decb9d1da..4070031963 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/set.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/set.py
@@ -1,5 +1,3 @@
-from typing import cast
-
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
FileStudyTreeConfig,
)
@@ -10,6 +8,9 @@
FolderNode,
)
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
AreaOutputSeriesMatrix,
)
@@ -28,25 +29,25 @@ def __init__(
self.mc_all = mc_all
def build(self) -> TREE:
- children: TREE = dict()
+ children: TREE = {}
# filters = self.config.get_filters_synthesis(self.set)
# todo get the config related to this output (now this may fail if input has changed since the launch)
- filters = ["hourly", "daily", "weekly", "monthly", "annual"]
- for timing in filters:
+ freq: MatrixFrequency
+ for freq in MatrixFrequency:
if self.mc_all:
- children[f"id-{timing}"] = AreaOutputSeriesMatrix(
+ children[f"id-{freq.value}"] = AreaOutputSeriesMatrix(
self.context,
- self.config.next_file(f"id-{timing}.txt"),
- timing,
+ self.config.next_file(f"id-{freq.value}.txt"),
+ freq,
self.set,
)
- children[f"values-{timing}"] = AreaOutputSeriesMatrix(
+ children[f"values-{freq.value}"] = AreaOutputSeriesMatrix(
self.context,
- self.config.next_file(f"values-{timing}.txt"),
- timing,
+ self.config.next_file(f"values-{freq.value}.txt"),
+ freq,
self.set,
)
diff --git a/antarest/study/storage/variantstudy/model/command/create_area.py b/antarest/study/storage/variantstudy/model/command/create_area.py
index 2cc832c612..cd029cbed1 100644
--- a/antarest/study/storage/variantstudy/model/command/create_area.py
+++ b/antarest/study/storage/variantstudy/model/command/create_area.py
@@ -91,16 +91,12 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
area_id = data["area_id"]
version = study_data.config.version
+ # fmt: off
hydro_config = study_data.tree.get(["input", "hydro", "hydro"])
- get_or_create_section(hydro_config, "inter-daily-breakdown")[
- area_id
- ] = 1
- get_or_create_section(hydro_config, "intra-daily-modulation")[
- area_id
- ] = 24
- get_or_create_section(hydro_config, "inter-monthly-breakdown")[
- area_id
- ] = 1
+ get_or_create_section(hydro_config, "inter-daily-breakdown")[area_id] = 1
+ get_or_create_section(hydro_config, "intra-daily-modulation")[area_id] = 24
+ get_or_create_section(hydro_config, "inter-monthly-breakdown")[area_id] = 1
+ # fmt: on
new_area_data: JSON = {
"input": {
@@ -229,30 +225,22 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
}
if version > 650:
- get_or_create_section(hydro_config, "initialize reservoir date")[
- area_id
- ] = 0
+ # fmt: off
+ get_or_create_section(hydro_config, "initialize reservoir date")[area_id] = 0
get_or_create_section(hydro_config, "leeway low")[area_id] = 1
get_or_create_section(hydro_config, "leeway up")[area_id] = 1
- get_or_create_section(hydro_config, "pumping efficiency")[
- area_id
- ] = 1
+ get_or_create_section(hydro_config, "pumping efficiency")[area_id] = 1
- new_area_data["input"]["hydro"]["common"]["capacity"][
- f"creditmodulations_{area_id}"
- ] = (
+ new_area_data["input"]["hydro"]["common"]["capacity"][f"creditmodulations_{area_id}"] = (
self.command_context.generator_matrix_constants.get_hydro_credit_modulations()
)
- new_area_data["input"]["hydro"]["common"]["capacity"][
- f"inflowPattern_{area_id}"
- ] = (
+ new_area_data["input"]["hydro"]["common"]["capacity"][f"inflowPattern_{area_id}"] = (
self.command_context.generator_matrix_constants.get_hydro_inflow_pattern()
)
- new_area_data["input"]["hydro"]["common"]["capacity"][
- f"waterValues_{area_id}"
- ] = (
+ new_area_data["input"]["hydro"]["common"]["capacity"][f"waterValues_{area_id}"] = (
self.command_context.generator_matrix_constants.get_null_matrix()
)
+ # fmt: on
if version >= 830:
new_area_data["input"]["areas"][area_id]["adequacy_patch"] = {
@@ -261,6 +249,15 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
new_area_data["input"]["hydro"]["hydro"] = hydro_config
+ # NOTE regarding the following configurations:
+ # - ["input", "hydro", "prepro", "correlation"]
+ # - ["input", "load", "prepro", "correlation"]
+ # - ["input", "solar", "prepro", "correlation"]
+ # - ["input", "wind", "prepro", "correlation"]
+ # When creating a new area, we should not add a new correlation
+ # value to the configuration because it does not store the values
+ # of the diagonal (always equal to 1).
+
study_data.tree.save(new_area_data)
return output
diff --git a/antarest/study/storage/variantstudy/model/command/remove_area.py b/antarest/study/storage/variantstudy/model/command/remove_area.py
index eda840d148..ca633b0d2e 100644
--- a/antarest/study/storage/variantstudy/model/command/remove_area.py
+++ b/antarest/study/storage/variantstudy/model/command/remove_area.py
@@ -1,5 +1,6 @@
+import contextlib
import logging
-from typing import Any, List, Tuple, Dict
+from typing import Any, Dict, List, Tuple
from antarest.core.model import JSON
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
@@ -13,12 +14,12 @@
remove_area_cluster_from_binding_constraints,
)
from antarest.study.storage.variantstudy.model.command.common import (
- CommandOutput,
CommandName,
+ CommandOutput,
)
from antarest.study.storage.variantstudy.model.command.icommand import (
- ICommand,
MATCH_SIGNATURE_SEPARATOR,
+ ICommand,
)
from antarest.study.storage.variantstudy.model.model import CommandDTO
@@ -36,11 +37,12 @@ def __init__(self, **data: Any) -> None:
def _remove_area_from_links_in_config(
self, study_data_config: FileStudyTreeConfig
) -> None:
- link_to_remove = []
- for area_name, area in study_data_config.areas.items():
- for link in area.links.keys():
- if link == self.id:
- link_to_remove.append((area_name, link))
+ link_to_remove = [
+ (area_name, link)
+ for area_name, area in study_data_config.areas.items()
+ for link in area.links
+ if link == self.id
+ ]
for area_name, link in link_to_remove:
del study_data_config.areas[area_name].links[link]
@@ -49,11 +51,9 @@ def _remove_area_from_sets_in_config(
) -> None:
for id, set in study_data_config.sets.items():
if set.areas and self.id in set.areas:
- try:
+ with contextlib.suppress(ValueError):
set.areas.remove(self.id)
study_data_config.sets[id] = set
- except ValueError:
- pass
def _apply_config(
self, study_data_config: FileStudyTreeConfig
@@ -69,98 +69,113 @@ def _apply_config(
return (
CommandOutput(status=True, message=f"Area '{self.id}' deleted"),
- dict(),
+ {},
)
def _remove_area_from_links(self, study_data: FileStudy) -> None:
for area_name, area in study_data.config.areas.items():
- for link in area.links.keys():
+ for link in area.links:
if link == self.id:
study_data.tree.delete(
["input", "links", area_name, "properties", self.id]
)
try:
+ # fmt: off
if study_data.config.version < 820:
- study_data.tree.delete(
- ["input", "links", area_name, self.id]
- )
+ study_data.tree.delete(["input", "links", area_name, self.id])
else:
- study_data.tree.delete(
- [
- "input",
- "links",
- area_name,
- f"{self.id}_parameters",
- ]
- )
- study_data.tree.delete(
- [
- "input",
- "links",
- area_name,
- "capacities",
- f"{self.id}_indirect",
- ]
- )
- study_data.tree.delete(
- [
- "input",
- "links",
- area_name,
- "capacities",
- f"{self.id}_direct",
- ]
- )
+ study_data.tree.delete(["input", "links", area_name, f"{self.id}_parameters"])
+ study_data.tree.delete(["input", "links", area_name, "capacities", f"{self.id}_indirect"])
+ study_data.tree.delete(["input", "links", area_name, "capacities", f"{self.id}_direct"])
+ # fmt: on
except ChildNotFoundError as e:
logger.warning(
- f"Failed to clean link data when deleting area {self.id} in study {study_data.config.study_id}",
+ f"Failed to clean link data when deleting area {self.id}"
+ f" in study {study_data.config.study_id}",
exc_info=e,
)
def _remove_area_from_binding_constraints(
self, study_data: FileStudy
) -> None:
- binding_constraints = study_data.tree.get(
- ["input", "bindingconstraints", "bindingconstraints"]
- )
-
- id_to_remove = set()
-
- for id, bc in binding_constraints.items():
- for key in bc.keys():
- if self.id in key:
- id_to_remove.add(id)
+ # fmt: off
+ binding_constraints = study_data.tree.get(["input", "bindingconstraints", "bindingconstraints"])
+
+ id_to_remove = {
+ bc_id
+ for bc_id, bc in binding_constraints.items()
+ for key in bc
+ if self.id in key
+ }
- for id in id_to_remove:
+ for bc_id in id_to_remove:
study_data.tree.delete(
- [
- "input",
- "bindingconstraints",
- binding_constraints[id]["id"],
- ]
+ ["input", "bindingconstraints", binding_constraints[bc_id]["id"]]
)
- del binding_constraints[id]
+ del binding_constraints[bc_id]
- study_data.tree.save(
- binding_constraints,
- ["input", "bindingconstraints", "bindingconstraints"],
+ study_data.tree.save(binding_constraints, ["input", "bindingconstraints", "bindingconstraints"])
+ # fmt: on
+
+ def _remove_area_from_hydro_allocation(
+ self, study_data: FileStudy
+ ) -> None:
+ """
+ Delete the column for the hydraulic production area
+ and updates the rows for the other areas.
+
+ Args:
+ study_data: file study
+ """
+ study_data.tree.delete(["input", "hydro", "allocation", self.id])
+ allocation_cfg = study_data.tree.get(
+ ["input", "hydro", "allocation", "*"]
)
+ if len(allocation_cfg) == 1:
+ # IMPORTANT: when there is only one element left the function returns
+ # the allocation of the element in place of the dictionary by zone
+ allocation_cfg = {self.id: allocation_cfg}
+ allocation_cfg.pop(self.id, None) # ensure allocation is removed
+ for prod_area, allocation_dict in allocation_cfg.items():
+ for name, allocations in allocation_dict.items():
+ allocations.pop(self.id, None)
+ study_data.tree.save(allocation_cfg, ["input", "hydro", "allocation"])
+
+ def _remove_area_from_correlation_matrices(
+ self, study_data: FileStudy
+ ) -> None:
+ """
+ Removes the values from the correlation matrix that match the current area.
+
+ This function can update the following configurations:
+ - ["input", "hydro", "prepro", "correlation"]
+
+ Args:
+ study_data:File Study to update.
+ """
+ # Today, only the 'hydro' category is fully supported, but
+ # we could also manage the 'load' 'solar' and 'wind'
+ # categories but the usage is deprecated.
+ url = ["input", "hydro", "prepro", "correlation"]
+ correlation_cfg = study_data.tree.get(url)
+ for section, correlation in correlation_cfg.items():
+ if section == "general":
+ continue
+ for key in list(correlation):
+ a1, a2 = key.split("%")
+ if a1 == self.id or a2 == self.id:
+ del correlation[key]
+ study_data.tree.save(correlation_cfg, url)
def _remove_area_from_districts(self, study_data: FileStudy) -> None:
districts = study_data.tree.get(["input", "areas", "sets"])
- for id, district in districts.items():
+ for district in districts.values():
if district.get("+", None):
- try:
+ with contextlib.suppress(ValueError):
district["+"].remove(self.id)
- except ValueError:
- pass
elif district.get("-", None):
- try:
+ with contextlib.suppress(ValueError):
district["-"].remove(self.id)
- except ValueError:
- pass
-
- districts[id] = district
study_data.tree.save(districts, ["input", "areas", "sets"])
@@ -170,113 +185,46 @@ def _remove_area_from_cluster(self, study_data: FileStudy) -> None:
def _remove_area_from_time_series(self, study_data: FileStudy) -> None:
study_data.tree.delete(["input", "thermal", "series", self.id])
+ # noinspection SpellCheckingInspection
def _apply(self, study_data: FileStudy) -> CommandOutput:
+ # fmt: off
study_data.tree.delete(["input", "areas", self.id])
-
- study_data.tree.delete(["input", "hydro", "allocation", self.id])
- study_data.tree.delete(
- ["input", "hydro", "common", "capacity", f"maxpower_{self.id}"]
- )
- study_data.tree.delete(
- ["input", "hydro", "common", "capacity", f"reservoir_{self.id}"]
- )
+ study_data.tree.delete(["input", "hydro", "common", "capacity", f"maxpower_{self.id}"])
+ study_data.tree.delete(["input", "hydro", "common", "capacity", f"reservoir_{self.id}"])
study_data.tree.delete(["input", "hydro", "prepro", self.id])
study_data.tree.delete(["input", "hydro", "series", self.id])
- study_data.tree.delete(
- ["input", "hydro", "hydro", "inter-daily-breakdown", self.id]
- )
- study_data.tree.delete(
- ["input", "hydro", "hydro", "intra-daily-modulation", self.id]
- )
- study_data.tree.delete(
- ["input", "hydro", "hydro", "inter-monthly-breakdown", self.id]
- )
+ study_data.tree.delete(["input", "hydro", "hydro", "inter-daily-breakdown", self.id])
+ study_data.tree.delete(["input", "hydro", "hydro", "intra-daily-modulation", self.id])
+ study_data.tree.delete(["input", "hydro", "hydro", "inter-monthly-breakdown", self.id])
study_data.tree.delete(["input", "load", "prepro", self.id])
study_data.tree.delete(["input", "load", "series", f"load_{self.id}"])
study_data.tree.delete(["input", "misc-gen", f"miscgen-{self.id}"])
study_data.tree.delete(["input", "reserves", self.id])
study_data.tree.delete(["input", "solar", "prepro", self.id])
- study_data.tree.delete(
- ["input", "solar", "series", f"solar_{self.id}"]
- )
+ study_data.tree.delete(["input", "solar", "series", f"solar_{self.id}"])
study_data.tree.delete(["input", "thermal", "clusters", self.id])
- study_data.tree.delete(
- ["input", "thermal", "areas", "unserverdenergycost", self.id]
- )
- study_data.tree.delete(
- ["input", "thermal", "areas", "spilledenergycost", self.id]
- )
+ study_data.tree.delete(["input", "thermal", "areas", "unserverdenergycost", self.id])
+ study_data.tree.delete(["input", "thermal", "areas", "spilledenergycost", self.id])
study_data.tree.delete(["input", "wind", "prepro", self.id])
study_data.tree.delete(["input", "wind", "series", f"wind_{self.id}"])
study_data.tree.delete(["input", "links", self.id])
+ # fmt: on
if study_data.config.version > 650:
- study_data.tree.delete(
- [
- "input",
- "hydro",
- "hydro",
- "initialize reservoir date",
- self.id,
- ]
- )
- study_data.tree.delete(
- [
- "input",
- "hydro",
- "hydro",
- "leeway low",
- self.id,
- ]
- )
- study_data.tree.delete(
- [
- "input",
- "hydro",
- "hydro",
- "leeway up",
- self.id,
- ]
- )
- study_data.tree.delete(
- [
- "input",
- "hydro",
- "hydro",
- "pumping efficiency",
- self.id,
- ]
- )
- study_data.tree.delete(
- [
- "input",
- "hydro",
- "common",
- "capacity",
- f"creditmodulations_{self.id}",
- ]
- )
- study_data.tree.delete(
- [
- "input",
- "hydro",
- "common",
- "capacity",
- f"inflowPattern_{self.id}",
- ]
- )
- study_data.tree.delete(
- [
- "input",
- "hydro",
- "common",
- "capacity",
- f"waterValues_{self.id}",
- ]
- )
+ # fmt: off
+ study_data.tree.delete(["input", "hydro", "hydro", "initialize reservoir date", self.id])
+ study_data.tree.delete(["input", "hydro", "hydro", "leeway low", self.id])
+ study_data.tree.delete(["input", "hydro", "hydro", "leeway up", self.id])
+ study_data.tree.delete(["input", "hydro", "hydro", "pumping efficiency", self.id])
+ study_data.tree.delete(["input", "hydro", "common", "capacity", f"creditmodulations_{self.id}"])
+ study_data.tree.delete(["input", "hydro", "common", "capacity", f"inflowPattern_{self.id}"])
+ study_data.tree.delete(["input", "hydro", "common", "capacity", f"waterValues_{self.id}"])
+ # fmt: on
self._remove_area_from_links(study_data)
self._remove_area_from_binding_constraints(study_data)
+ self._remove_area_from_correlation_matrices(study_data)
+ self._remove_area_from_hydro_allocation(study_data)
self._remove_area_from_districts(study_data)
self._remove_area_from_cluster(study_data)
self._remove_area_from_time_series(study_data)
@@ -310,9 +258,7 @@ def match_signature(self) -> str:
)
def match(self, other: ICommand, equal: bool = False) -> bool:
- if not isinstance(other, RemoveArea):
- return False
- return self.id == other.id
+ return isinstance(other, RemoveArea) and self.id == other.id
def _create_diff(self, other: "ICommand") -> List["ICommand"]:
return []
diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py
index 83ce875852..239deab0ee 100644
--- a/antarest/study/web/study_data_blueprint.py
+++ b/antarest/study/web/study_data_blueprint.py
@@ -1,7 +1,6 @@
import logging
-from typing import Any, Dict, List, Optional, Union
-
-from fastapi import APIRouter, Body, Depends
+from http import HTTPStatus
+from typing import Any, Dict, List, Optional, Union, cast
from antarest.core.config import Config
from antarest.core.jwt import JWTUser
@@ -12,9 +11,16 @@
from antarest.matrixstore.business.matrix_editor import (
MatrixEditInstructionDTO,
)
+from antarest.study.business.adequacy_patch_management import (
+ AdequacyPatchFormFields,
+)
from antarest.study.business.advanced_parameters_management import (
AdvancedParamsFormFields,
)
+from antarest.study.business.allocation_management import (
+ AllocationFormFields,
+ AllocationMatrix,
+)
from antarest.study.business.area_management import (
AreaCreationDTO,
AreaInfoDTO,
@@ -26,6 +32,11 @@
ConstraintTermDTO,
UpdateBindingConstProps,
)
+from antarest.study.business.correlation_management import (
+ CorrelationFormFields,
+ CorrelationManager,
+ CorrelationMatrix,
+)
from antarest.study.business.district_manager import (
DistrictCreationDTO,
DistrictInfoDTO,
@@ -42,7 +53,7 @@
from antarest.study.business.playlist_management import PlaylistColumns
from antarest.study.business.renewable_management import RenewableFormFields
from antarest.study.business.table_mode_management import (
- ColumnModelTypes,
+ ColumnsModelTypes,
TableTemplateType,
)
from antarest.study.business.thematic_trimming_management import (
@@ -52,6 +63,8 @@
from antarest.study.business.timeseries_config_management import TSFormFields
from antarest.study.model import PatchArea, PatchCluster
from antarest.study.service import StudyService
+from fastapi import APIRouter, Body, Depends
+from fastapi.params import Body, Query
logger = logging.getLogger(__name__)
@@ -61,12 +74,13 @@ def create_study_data_routes(
) -> APIRouter:
"""
Endpoint implementation for studies area management
+
Args:
study_service: study service facade to handle request
config: main server configuration
Returns:
-
+ The FastAPI route for Study data management
"""
bp = APIRouter(prefix="/v1")
auth = Auth(config)
@@ -683,7 +697,7 @@ def set_general_form_values(
@bp.get(
path="/studies/{uuid}/config/optimization/form",
tags=[APITag.study_data],
- summary="Get Optimization config values for form",
+ summary="Get optimization config values for form",
response_model=OptimizationFormFields,
response_model_exclude_none=True,
)
@@ -692,7 +706,7 @@ def get_optimization_form_values(
current_user: JWTUser = Depends(auth.get_current_user),
) -> OptimizationFormFields:
logger.info(
- msg=f"Getting Optimization management config for study {uuid}",
+ msg=f"Getting optimization config for study {uuid}",
extra={"user": current_user.id},
)
params = RequestParameters(user=current_user)
@@ -705,7 +719,7 @@ def get_optimization_form_values(
@bp.put(
path="/studies/{uuid}/config/optimization/form",
tags=[APITag.study_data],
- summary="Set Optimization config with values from form",
+ summary="Set optimization config with values from form",
)
def set_optimization_form_values(
uuid: str,
@@ -713,7 +727,7 @@ def set_optimization_form_values(
current_user: JWTUser = Depends(auth.get_current_user),
) -> None:
logger.info(
- f"Updating Optimization management config for study {uuid}",
+ f"Updating optimization config for study {uuid}",
extra={"user": current_user.id},
)
params = RequestParameters(user=current_user)
@@ -725,6 +739,51 @@ def set_optimization_form_values(
study, field_values
)
+ @bp.get(
+ path="/studies/{uuid}/config/adequacypatch/form",
+ tags=[APITag.study_data],
+ summary="Get adequacy patch config values for form",
+ response_model=AdequacyPatchFormFields,
+ response_model_exclude_none=True,
+ )
+ def get_adequacy_patch_form_values(
+ uuid: str,
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> AdequacyPatchFormFields:
+ logger.info(
+ msg=f"Getting adequacy patch config for study {uuid}",
+ extra={"user": current_user.id},
+ )
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.READ, params
+ )
+
+ return study_service.adequacy_patch_manager.get_field_values(study)
+
+ @bp.put(
+ path="/studies/{uuid}/config/adequacypatch/form",
+ tags=[APITag.study_data],
+ summary="Set adequacy patch config with values from form",
+ )
+ def set_adequacy_patch_form_values(
+ uuid: str,
+ field_values: AdequacyPatchFormFields,
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> None:
+ logger.info(
+ f"Updating adequacy patch config for study {uuid}",
+ extra={"user": current_user.id},
+ )
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.WRITE, params
+ )
+
+ study_service.adequacy_patch_manager.set_field_values(
+ study, field_values
+ )
+
@bp.get(
path="/studies/{uuid}/config/timeseries/form",
tags=[APITag.study_data],
@@ -781,7 +840,7 @@ def get_table_data(
table_type: TableTemplateType,
columns: str,
current_user: JWTUser = Depends(auth.get_current_user),
- ) -> Dict[str, ColumnModelTypes]:
+ ) -> Dict[str, ColumnsModelTypes]:
logger.info(
f"Getting template table data for study {uuid}",
extra={"user": current_user.id},
@@ -803,7 +862,7 @@ def get_table_data(
def set_table_data(
uuid: str,
table_type: TableTemplateType,
- data: Dict[str, ColumnModelTypes],
+ data: Dict[str, ColumnsModelTypes],
current_user: JWTUser = Depends(auth.get_current_user),
) -> None:
logger.info(
@@ -972,6 +1031,304 @@ def remove_constraint_term(
study, binding_constraint_id, term_id
)
+ @bp.get(
+ path="/studies/{uuid}/areas/hydro/allocation/matrix",
+ tags=[APITag.study_data],
+ summary="Get the hydraulic allocation matrix for all areas",
+ response_model=AllocationMatrix,
+ )
+ def get_allocation_matrix(
+ uuid: str,
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> AllocationMatrix:
+ """
+ Get the hydraulic allocation matrix for all areas.
+
+ Parameters:
+ - `uuid`: the study UUID.
+
+ Returns the data frame matrix, where:
+ - the rows are the areas,
+ - the columns are the hydraulic structures,
+ - the values are the allocation factors.
+ """
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.READ, params
+ )
+ all_areas = cast(
+ List[AreaInfoDTO], # because `ui=False`
+ study_service.get_all_areas(
+ uuid, area_type=AreaType.AREA, ui=False, params=params
+ ),
+ )
+ return study_service.allocation_manager.get_allocation_matrix(
+ study, all_areas
+ )
+
+ @bp.get(
+ path="/studies/{uuid}/areas/{area_id}/hydro/allocation/form",
+ tags=[APITag.study_data],
+ summary="Get the form fields used for the allocation form",
+ response_model=AllocationFormFields,
+ )
+ def get_allocation_form_fields(
+ uuid: str,
+ area_id: str,
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> AllocationFormFields:
+ """
+ Get the form fields used for the allocation form.
+
+ Parameters:
+ - `uuid`: the study UUID,
+ - `area_id`: the area ID.
+
+ Returns the allocation form fields.
+ """
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.READ, params
+ )
+ all_areas = cast(
+ List[AreaInfoDTO], # because `ui=False`
+ study_service.get_all_areas(
+ uuid, area_type=AreaType.AREA, ui=False, params=params
+ ),
+ )
+ return study_service.allocation_manager.get_allocation_form_fields(
+ all_areas, study, area_id
+ )
+
+ @bp.put(
+ path="/studies/{uuid}/areas/{area_id}/hydro/allocation/form",
+ tags=[APITag.study_data],
+ summary="Update the form fields used for the allocation form",
+ status_code=HTTPStatus.OK,
+ response_model=AllocationFormFields,
+ )
+ def set_allocation_form_fields(
+ uuid: str,
+ area_id: str,
+ data: AllocationFormFields = Body(
+ ...,
+ example=AllocationFormFields(
+ allocation=[
+ {"areaId": "EAST", "coefficient": 1},
+ {"areaId": "NORTH", "coefficient": 0.20},
+ ]
+ ),
+ ),
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> AllocationFormFields:
+ """
+ Update the hydraulic allocation of a given area.
+
+ Parameters:
+ - `uuid`: the study UUID,
+ - `area_id`: the area ID.
+
+ Returns the updated allocation form fields.
+ """
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.WRITE, params
+ )
+ all_areas = cast(
+ List[AreaInfoDTO], # because `ui=False`
+ study_service.get_all_areas(
+ uuid, area_type=AreaType.AREA, ui=False, params=params
+ ),
+ )
+ return study_service.allocation_manager.set_allocation_form_fields(
+ all_areas, study, area_id, data
+ )
+
+ @bp.get(
+ path="/studies/{uuid}/areas/hydro/correlation/matrix",
+ tags=[APITag.study_data],
+ summary="Get the hydraulic/load/solar/wind correlation matrix of a study",
+ response_model=CorrelationMatrix,
+ )
+ def get_correlation_matrix(
+ uuid: str,
+ columns: Optional[str] = Query(
+ None,
+ examples={
+ "all areas": {
+ "description": "get the correlation matrix for all areas (by default)",
+ "value": "",
+ },
+ "single area": {
+ "description": "get the correlation column for a single area",
+ "value": "north",
+ },
+ "selected areas": {
+ "description": "get the correlation columns for a selected list of areas",
+ "value": "north,east",
+ },
+ },
+ ), # type: ignore
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> CorrelationMatrix:
+ """
+ Get the hydraulic/load/solar/wind correlation matrix of a study.
+
+ Parameters:
+ - `uuid`: The UUID of the study.
+ - `columns`: A filter on the area identifiers:
+ - Use no parameter to select all areas.
+ - Use an area identifier to select a single area.
+ - Use a comma-separated list of areas to select those areas.
+
+ Returns the hydraulic/load/solar/wind correlation matrix with the following attributes:
+ - `index`: A list of all study areas.
+ - `columns`: A list of selected production areas.
+ - `data`: A 2D-array matrix of correlation coefficients with values in the range of -1 to 1.
+ """
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.READ, params
+ )
+ all_areas = cast(
+ List[AreaInfoDTO], # because `ui=False`
+ study_service.get_all_areas(
+ uuid, area_type=AreaType.AREA, ui=False, params=params
+ ),
+ )
+ manager = CorrelationManager(study_service.storage_service)
+ return manager.get_correlation_matrix(
+ all_areas,
+ study,
+ columns.split(",") if columns else [],
+ )
+
+ @bp.put(
+ path="/studies/{uuid}/areas/hydro/correlation/matrix",
+ tags=[APITag.study_data],
+ summary="Set the hydraulic/load/solar/wind correlation matrix of a study",
+ status_code=HTTPStatus.OK,
+ response_model=CorrelationMatrix,
+ )
+ def set_correlation_matrix(
+ uuid: str,
+ matrix: CorrelationMatrix = Body(
+ ...,
+ example={
+ "columns": ["north", "east", "south", "west"],
+ "data": [
+ [0.0, 0.0, 0.25, 0.0],
+ [0.0, 0.0, 0.75, 0.12],
+ [0.25, 0.75, 0.0, 0.75],
+ [0.0, 0.12, 0.75, 0.0],
+ ],
+ "index": ["north", "east", "south", "west"],
+ },
+ ),
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> CorrelationMatrix:
+ """
+ Set the hydraulic/load/solar/wind correlation matrix of a study.
+
+ Parameters:
+ - `uuid`: The UUID of the study.
+ - `index`: A list of all study areas.
+ - `columns`: A list of selected production areas.
+ - `data`: A 2D-array matrix of correlation coefficients with values in the range of -1 to 1.
+
+ Returns the hydraulic/load/solar/wind correlation matrix updated
+ """
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.WRITE, params
+ )
+ all_areas = cast(
+ List[AreaInfoDTO], # because `ui=False`
+ study_service.get_all_areas(
+ uuid, area_type=AreaType.AREA, ui=False, params=params
+ ),
+ )
+ manager = CorrelationManager(study_service.storage_service)
+ return manager.set_correlation_matrix(all_areas, study, matrix)
+
+ @bp.get(
+ path="/studies/{uuid}/areas/{area_id}/hydro/correlation/form",
+ tags=[APITag.study_data],
+ summary="Get the form fields used for the correlation form",
+ response_model=CorrelationFormFields,
+ )
+ def get_correlation_form_fields(
+ uuid: str,
+ area_id: str,
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> CorrelationFormFields:
+ """
+ Get the form fields used for the correlation form.
+
+ Parameters:
+ - `uuid`: The UUID of the study.
+ - `area_id`: the area ID.
+
+ Returns the correlation form fields in percentage.
+ """
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.READ, params
+ )
+ all_areas = cast(
+ List[AreaInfoDTO], # because `ui=False`
+ study_service.get_all_areas(
+ uuid, area_type=AreaType.AREA, ui=False, params=params
+ ),
+ )
+ manager = CorrelationManager(study_service.storage_service)
+ return manager.get_correlation_form_fields(all_areas, study, area_id)
+
+ @bp.put(
+ path="/studies/{uuid}/areas/{area_id}/hydro/correlation/form",
+ tags=[APITag.study_data],
+ summary="Set the form fields used for the correlation form",
+ status_code=HTTPStatus.OK,
+ response_model=CorrelationFormFields,
+ )
+ def set_correlation_form_fields(
+ uuid: str,
+ area_id: str,
+ data: CorrelationFormFields = Body(
+ ...,
+ example=CorrelationFormFields(
+ correlation=[
+ {"areaId": "east", "coefficient": 80},
+ {"areaId": "north", "coefficient": 20},
+ ]
+ ),
+ ),
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> CorrelationFormFields:
+ """
+ Update the hydraulic/load/solar/wind correlation of a given area.
+
+ Parameters:
+ - `uuid`: The UUID of the study.
+ - `area_id`: the area ID.
+
+ Returns the correlation form fields in percentage.
+ """
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(
+ uuid, StudyPermissionType.WRITE, params
+ )
+ all_areas = cast(
+ List[AreaInfoDTO], # because `ui=False`
+ study_service.get_all_areas(
+ uuid, area_type=AreaType.AREA, ui=False, params=params
+ ),
+ )
+ manager = CorrelationManager(study_service.storage_service)
+ return manager.set_correlation_form_fields(
+ all_areas, study, area_id, data
+ )
+
@bp.get(
path="/studies/{uuid}/config/advancedparameters/form",
tags=[APITag.study_data],
diff --git a/antarest/worker/archive_worker_service.py b/antarest/worker/archive_worker_service.py
index fd044239e7..197bbd235e 100644
--- a/antarest/worker/archive_worker_service.py
+++ b/antarest/worker/archive_worker_service.py
@@ -1,30 +1,36 @@
import argparse
-import sys
+import logging
from pathlib import Path
+from typing import Optional, Sequence
+from antarest import __version__
from antarest.core.config import Config
from antarest.core.logging.utils import configure_logger
from antarest.core.utils.utils import get_local_path
-from antarest import __version__
from antarest.utils import create_archive_worker
+# use the real module name instead of `__name__` (because `__name__ == "__main__"`)
+logger = logging.getLogger("antarest.worker.archive_worker_service")
+
+ArgsType = Optional[Sequence[str]]
-def parse_arguments() -> argparse.Namespace:
+
+def parse_arguments(args: ArgsType = None) -> argparse.Namespace:
+ version = f"%(prog)s {__version__}"
parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-v",
+ "--version",
+ help="Display worker version and exit",
+ action="version",
+ version=version,
+ )
parser.add_argument(
"-c",
"--config",
dest="config_file",
help="path to the config file",
)
- parser.add_argument(
- "-v",
- "--version",
- dest="version",
- help="Worker version",
- action="store_true",
- required=False,
- )
parser.add_argument(
"-w",
"--workspace",
@@ -34,24 +40,31 @@ def parse_arguments() -> argparse.Namespace:
)
parser.add_argument(
"-l",
+ "--local-root",
"--local_root",
dest="local_root",
help="Define the local root path",
required=False,
)
- return parser.parse_args()
+ return parser.parse_args(args)
-if __name__ == "__main__":
+def run_archive_worker(args: ArgsType = None) -> None:
res = get_local_path() / "resources"
- args = parse_arguments()
- if args.version:
- print(__version__)
- sys.exit()
- config_file = Path(args.config_file)
- local_root = Path(args.local_root or "/")
- workspace = args.workspace
+ namespace = parse_arguments(args)
+ config_file = Path(namespace.config_file)
+ local_root = Path(namespace.local_root or "/")
+ workspace = namespace.workspace
config = Config.from_yaml_file(res=res, file=config_file)
- configure_logger(config)
+ # Handler for logging to a file, rotating the log file at certain timed intervals.
+ configure_logger(
+ config, handler_cls="logging.handlers.TimedRotatingFileHandler"
+ )
+ logger.info(f"Starting Archive Worker for {namespace}...")
worker = create_archive_worker(config, workspace, Path(local_root))
worker.start(threaded=False)
+ logger.info("Archive Worker task is done, bye.")
+
+
+if __name__ == "__main__":
+ run_archive_worker()
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index b7e038c0f2..79c79672a8 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -1,6 +1,57 @@
Antares Web Changelog
=====================
+v2.14.0 (2023-05-12)
+--------------------
+
+### Features
+
+* **api-hydro:** add allocation form endpoints ([b2bee0e](https://github.com/AntaresSimulatorTeam/AntaREST/commit/b2bee0ed8e9817da2ed642474504fb25a95a8360))
+* **api:** update optimization form endpoint and add adequacy patch form endpoint ([dfa1b27](https://github.com/AntaresSimulatorTeam/AntaREST/commit/dfa1b2729ddb3e46f3b7f65a4a0079211da2c69c))
+* **ui-config:** update optimization form and add adequacy patch form ([f68c54b](https://github.com/AntaresSimulatorTeam/AntaREST/commit/f68c54b9b846d32e65d32c14c8931c625a6bd498))
+* **ui-hydro:** add allocation form ([5dbb85f](https://github.com/AntaresSimulatorTeam/AntaREST/commit/5dbb85fdc733731c5fc16a258666869486b5cddf))
+* **ui-hydro:** add inflow structure tab ([a466e34](https://github.com/AntaresSimulatorTeam/AntaREST/commit/a466e3459e25ece8f2d80c8eb501ba05c717d5fa))
+* **ui-hydro:** add row names ([94dc38c](https://github.com/AntaresSimulatorTeam/AntaREST/commit/94dc38c1fe2f5163f6b44dc31cc3639e63cd2131))
+* **ui-hydro:** display area name instead of ID ([0df0b21](https://github.com/AntaresSimulatorTeam/AntaREST/commit/0df0b2121e761a91946452874d70bc80dbe07647))
+* **ui-hydro:** update allocation form styles ([ac470c1](https://github.com/AntaresSimulatorTeam/AntaREST/commit/ac470c19410bf2d13b57ecc0bab650b24b77c495))
+* **ui-matrix:** update "Time" column and add index row headers ([3d50bf9](https://github.com/AntaresSimulatorTeam/AntaREST/commit/3d50bf9617367fe8d1fcd21e6a9835834456a10f))
+* **ui:** add @total-typescript/ts-reset lib and tsUtils (#1408) ([aa5e3e8](https://github.com/AntaresSimulatorTeam/AntaREST/commit/aa5e3e87d95b8b5061030025e89443e1fc71823d))
+* **ui:** update react-hook-form lib and use the new API (#1444) ([1d129d9](https://github.com/AntaresSimulatorTeam/AntaREST/commit/1d129d9d6bac97deee9ebc98d3334117fe837444))
+
+
+### Bug Fixes
+
+* **common:** field array change doesn't trigger on auto submit (#1439) ([910db64](https://github.com/AntaresSimulatorTeam/AntaREST/commit/910db64ca872468a1f01ced99083962022daa05c))
+* **matrix:** correct the frequency of some matrices (#1384) ([2644416](https://github.com/AntaresSimulatorTeam/AntaREST/commit/26444169b9ab60f54e8ee7a2d16fb10dbc4d537e))
+* **ui-common:** add matrices float handling ([99ba81f](https://github.com/AntaresSimulatorTeam/AntaREST/commit/99ba81fce26bbd99340990d0207761463558d4a7))
+* **ui-hydro:** correct column names ([e529a79](https://github.com/AntaresSimulatorTeam/AntaREST/commit/e529a799071e9c5485e2cba35eb5a7c2c18c25e7))
+* **ui-hydro:** update hydro matrices columns ([56641d7](https://github.com/AntaresSimulatorTeam/AntaREST/commit/56641d7ad995d8b7dd6755b13f1689b32b6296d8))
+* **ui:** fix typo on error page (#1390) ([da00131](https://github.com/AntaresSimulatorTeam/AntaREST/commit/da0013190d7e31e1afe9d8f5c3b03c378ca41507))
+* **ui:** size issue with HandsonTable ([f63edda](https://github.com/AntaresSimulatorTeam/AntaREST/commit/f63edda65345bf9848fb44a8a067a885ca5fbd83))
+
+
+### Styles
+
+* **api-tablemode:** fix typo ([5e5e4e7](https://github.com/AntaresSimulatorTeam/AntaREST/commit/5e5e4e7efcfc93e4682825a9c514417679fba89b))
+* **ui:** fix filename ([ad9f9c0](https://github.com/AntaresSimulatorTeam/AntaREST/commit/ad9f9c055713ef81a94b8c7bb01caae783ab8de9))
+
+
+### Documentation
+
+* **api:** add API documentation for the hydraulic allocation (and fix minor awkwardness) ([08680af](https://github.com/AntaresSimulatorTeam/AntaREST/commit/08680af4344b7dd9aa365267a0deb8d9094f0294))
+* **study-upgrade:** add the "How to upgrade a study?" topic in the documentation (#1400) ([2d03bef](https://github.com/AntaresSimulatorTeam/AntaREST/commit/2d03befe999e558c989e1cce1f51186beff5502b))
+
+> IMPORTANT: The `antares-launcher` Git submodule is dropped.
+
+
+### Contributors
+
+hdinia,
+skamril,
+flomnes,
+laurent-laporte-pro
+
+
v2.13.2 (2023-04-25)
--------------------
diff --git a/docs/assets/media/how-to/studies-upgrade-dialog_box.png b/docs/assets/media/how-to/studies-upgrade-dialog_box.png
new file mode 100755
index 0000000000..c623c6daff
Binary files /dev/null and b/docs/assets/media/how-to/studies-upgrade-dialog_box.png differ
diff --git a/docs/assets/media/how-to/studies-upgrade-done.png b/docs/assets/media/how-to/studies-upgrade-done.png
new file mode 100755
index 0000000000..dd00aff4f8
Binary files /dev/null and b/docs/assets/media/how-to/studies-upgrade-done.png differ
diff --git a/docs/assets/media/how-to/studies-upgrade-menu_open.png b/docs/assets/media/how-to/studies-upgrade-menu_open.png
new file mode 100755
index 0000000000..120678c9ae
Binary files /dev/null and b/docs/assets/media/how-to/studies-upgrade-menu_open.png differ
diff --git a/docs/assets/media/how-to/sudies-upgrade-menu_version.png b/docs/assets/media/how-to/sudies-upgrade-menu_version.png
new file mode 100755
index 0000000000..9cb8b65ef2
Binary files /dev/null and b/docs/assets/media/how-to/sudies-upgrade-menu_version.png differ
diff --git a/docs/how-to/studies-upgrade.md b/docs/how-to/studies-upgrade.md
new file mode 100644
index 0000000000..6c538fc59b
--- /dev/null
+++ b/docs/how-to/studies-upgrade.md
@@ -0,0 +1,60 @@
+---
+title: How to upgrade a study?
+author: Laurent LAPORTE
+date: 2023-03-10
+tags:
+
+- upgrade
+- version
+
+---
+
+# Introduction
+
+Upgrading versioned studies is an important step to ensure compatibility of your studies with the latest versions of
+Antares Web and Antares Simulator. This upgrade is necessary because some earlier versions may be deprecated and no
+longer supported.
+
+The upgrade process involves updating your study to the latest available version, using an automated process that
+ensures consistency of results across versions. This upgrade may include minor or major changes to the structure of the
+study.Generally, the changes are related to the configuration, the creation of new folders to take into account new
+functionalities (adequacy patch, xpansion, etc.) but also the upgrade of matrices.
+
+> Please note that upgrading your study to the latest version of Antares Simulator does not automatically enable new
+> features, and may require manual adjustments to your study to take full advantage of the new capabilities.
+
+We strongly recommend upgrading your studies to the latest version to take advantage of all the new features, and
+improvements in Antares Web and Antares Simulator. If you encounter any difficulties during the upgrade, please do not
+hesitate to contact our support team for assistance.
+
+# Upgrading
+
+To upgrade your study to the latest version of Antares Web and Antares Simulator, you can follow these steps:
+
+On the main page of the study, you can find the version number at the top of the menu bar:
+
+![](../assets/media/how-to/sudies-upgrade-menu_version.png)
+
+To upgrade the study, click on the nemu and select "Upgrade Study".
+
+![studies-upgrade-menu_open.png](../assets/media/how-to/studies-upgrade-menu_open.png)
+
+The confirmation dialog box will appear, click "Yes" to start the upgrade:
+
+![studies-upgrade-menu_open.png](../assets/media/how-to/studies-upgrade-dialog_box.png)
+
+The upgrade task is launched in the task manager.
+
+> **NOTE:** Most of the upgrades are instantaneous but some can take time, especially when there are matrix
+> transformations.
+
+When the upgrade is done, you can see the version number updated:
+
+![](../assets/media/how-to/studies-upgrade-done.png)
+
+Once the upgrade is complete, you can open your study and perform the manual upgrade in the configuration.
+
+# See also
+
+- Create a new study in the latest version
+- Run a study in the latest version
diff --git a/docs/install/0-INSTALL.md b/docs/install/0-INSTALL.md
index 6703b7e3d8..a51c624450 100644
--- a/docs/install/0-INSTALL.md
+++ b/docs/install/0-INSTALL.md
@@ -14,8 +14,6 @@ Requirements :
```
git clone https://github.com/AntaresSimulatorTeam/AntaREST.git
cd AntaREST
-git submodule init
-git submodule update
```
2. Install back dependencies
diff --git a/docs/install/2-DEPLOY.md b/docs/install/2-DEPLOY.md
index 701cd77464..8db71603c7 100644
--- a/docs/install/2-DEPLOY.md
+++ b/docs/install/2-DEPLOY.md
@@ -28,8 +28,8 @@ Requirements:
These steps should work on any linux system with docker and docker-compose installed.
-1. First, the steps 1 and 3 of the [quick start build](0-INSTALL.md#quick-start) must have been done. So this guide will assume that you have previously cloned the [code repository](https://github.com/AntaresSimulatorTeam/AntaREST)
- (don't forget the git submodule), the frontend built and that your working directory is at the root of the project.
+1. First, the steps 1 and 3 of the [quick start build](0-INSTALL.md#quick-start) must have been done. So this guide will assume that you have previously cloned the [code repository](https://github.com/AntaresSimulatorTeam/AntaREST),
+ the frontend built and that your working directory is at the root of the project.
2. Then download and unzip AntaresSimulator binaries:
diff --git a/examples/studies/STA-mini.zip b/examples/studies/STA-mini.zip
index c033602313..0de2e32809 100644
Binary files a/examples/studies/STA-mini.zip and b/examples/studies/STA-mini.zip differ
diff --git a/mkdocs.yml b/mkdocs.yml
index a1918c08c1..b46a354a8f 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -30,18 +30,20 @@ theme:
nav:
- Home: index.md
- 'User guide':
- - 'Introduction': 'user-guide/0-introduction.md'
- - 'User interface': 'user-guide/1-interface.md'
- - 'Variant manager': 'user-guide/2-variant_manager.md'
+ - 'Introduction': 'user-guide/0-introduction.md'
+ - 'User interface': 'user-guide/1-interface.md'
+ - 'Variant manager': 'user-guide/2-variant_manager.md'
+ - 'How to':
+ - 'Upgrade a study': 'how-to/studies-upgrade.md'
- 'Build':
- - 'Introduction': 'install/0-INSTALL.md'
- - 'Configuration': 'install/1-CONFIG.md'
- - 'Deployment': 'install/2-DEPLOY.md'
+ - 'Introduction': 'install/0-INSTALL.md'
+ - 'Configuration': 'install/1-CONFIG.md'
+ - 'Deployment': 'install/2-DEPLOY.md'
- 'Develop':
- - 'Introduction': 'architecture/0-introduction.md'
- - 'Database management': 'architecture/1-database.md'
- - 'Roadmap': 'architecture/5-roadmap.md'
- - 'Antares ecosystem' : 'https://antares-doc.readthedocs.io'
+ - 'Introduction': 'architecture/0-introduction.md'
+ - 'Database management': 'architecture/1-database.md'
+ - 'Roadmap': 'architecture/5-roadmap.md'
+ - 'Antares ecosystem': 'https://antares-doc.readthedocs.io'
- 'Changelog': 'CHANGELOG.md'
extra:
diff --git a/scripts/package_antares_web.sh b/scripts/package_antares_web.sh
index 54f6d14c9a..ee8865f5f7 100755
--- a/scripts/package_antares_web.sh
+++ b/scripts/package_antares_web.sh
@@ -46,8 +46,6 @@ fi
echo "Creating shortcuts"
if [[ "$OSTYPE" == "msys"* ]]; then
cp ../resources/AntaresWebServerShortcut.lnk ../dist/
-else
- ln -s ../dist/AntaresWeb/AntaresWebServer ../dist/AntaresWebServer
fi
echo "Unzipping example study"
diff --git a/setup.py b/setup.py
index 22f85d9bbf..4db7d9b83b 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@
setuptools.setup(
name="AntaREST",
- version="2.13.2",
+ version="2.14.0",
description="Antares Server",
long_description=long_description,
long_description_content_type="text/markdown",
diff --git a/sonar-project.properties b/sonar-project.properties
index 48f76a506c..efb731da29 100644
--- a/sonar-project.properties
+++ b/sonar-project.properties
@@ -6,5 +6,5 @@ sonar.exclusions=antarest/gui.py,antarest/main.py
sonar.python.coverage.reportPaths=coverage.xml
sonar.python.version=3.8
sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info
-sonar.projectVersion=2.13.2
+sonar.projectVersion=2.14.0
sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/*
\ No newline at end of file
diff --git a/tests/core/test_version_info.py b/tests/core/test_version_info.py
new file mode 100644
index 0000000000..195280631d
--- /dev/null
+++ b/tests/core/test_version_info.py
@@ -0,0 +1,35 @@
+import re
+from unittest.mock import patch
+
+import pytest
+from antarest.core.version_info import get_commit_id, get_dependencies
+
+
+class TestVersionInfo:
+ @pytest.mark.unit_test
+ def test_get_dependencies(self) -> None:
+ dependencies = get_dependencies()
+ assert isinstance(dependencies, dict)
+ # AntaREST is not a dependency of AntaREST
+ assert "AntaREST" not in dependencies
+ # lazy checking: we only check that FastAPI exist ;-)
+ assert "fastapi" in dependencies
+ assert all(
+ # match at least one number. eg: "pywin32 == 306"
+ re.fullmatch(r"\d+(?:\.\d+)*", ver)
+ for ver in dependencies.values()
+ )
+
+ @pytest.mark.unit_test
+ def test_get_commit_id__commit_id__exist(self, tmp_path) -> None:
+ path_commit_id = tmp_path.joinpath("commit_id")
+ path_commit_id.write_text("fake_commit")
+ assert get_commit_id(tmp_path) == "fake_commit"
+
+ @pytest.mark.unit_test
+ def test_get_commit_id__commit_id__missing(self, tmp_path) -> None:
+ with patch(
+ "antarest.core.version_info.get_last_commit_from_git",
+ return_value="mock commit",
+ ):
+ assert get_commit_id(tmp_path) == "mock commit"
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index a38448973d..bf555afa74 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -24,7 +24,7 @@ def sta_mini_zip_path(project_path: Path) -> Path:
@pytest.fixture
def app(tmp_path: str, sta_mini_zip_path: Path, project_path: Path):
- engine = create_engine("sqlite:///:memory:", echo=True)
+ engine = create_engine("sqlite:///:memory:", echo=False)
Base.metadata.create_all(engine)
DBSessionMiddleware(
Mock(),
diff --git a/tests/integration/study_data_blueprint/__init__.py b/tests/integration/study_data_blueprint/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integration/study_data_blueprint/conftest.py b/tests/integration/study_data_blueprint/conftest.py
new file mode 100644
index 0000000000..c0dbe9ffac
--- /dev/null
+++ b/tests/integration/study_data_blueprint/conftest.py
@@ -0,0 +1,57 @@
+import pytest
+from fastapi import FastAPI
+from starlette.testclient import TestClient
+
+
+@pytest.fixture(name="client")
+def fixture_client(app: FastAPI) -> TestClient:
+ """Get the webservice client used for unit testing"""
+ return TestClient(app, raise_server_exceptions=False)
+
+
+@pytest.fixture(name="admin_access_token")
+def fixture_admin_access_token(client: TestClient) -> str:
+ """Get the admin user access token used for authentication"""
+ res = client.post(
+ "/v1/login",
+ json={"username": "admin", "password": "admin"},
+ )
+ assert res.status_code == 200
+ credentials = res.json()
+ return credentials["access_token"]
+
+
+@pytest.fixture(name="user_access_token")
+def fixture_user_access_token(
+ client: TestClient,
+ admin_access_token: str,
+) -> str:
+ """Get a classic user access token used for authentication"""
+ res = client.post(
+ "/v1/users",
+ headers={"Authorization": f"Bearer {admin_access_token}"},
+ json={"name": "George", "password": "mypass"},
+ )
+ assert res.status_code == 200
+ res = client.post(
+ "/v1/login",
+ json={"username": "George", "password": "mypass"},
+ )
+ assert res.status_code == 200
+ credentials = res.json()
+ return credentials["access_token"]
+
+
+@pytest.fixture(name="study_id")
+def fixture_study_id(
+ client: TestClient,
+ user_access_token: str,
+) -> str:
+ """Get the ID of the study to upgrade"""
+ res = client.get(
+ "/v1/studies",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ study_ids = res.json()
+ return next(iter(study_ids))
diff --git a/tests/integration/study_data_blueprint/test_config_general.py b/tests/integration/study_data_blueprint/test_config_general.py
new file mode 100644
index 0000000000..e64c50aa83
--- /dev/null
+++ b/tests/integration/study_data_blueprint/test_config_general.py
@@ -0,0 +1,63 @@
+from http import HTTPStatus
+
+import pytest
+from starlette.testclient import TestClient
+
+
+@pytest.mark.unit_test
+class TestConfigGeneralForm:
+ """
+ Test the end points related to hydraulic correlation.
+
+ Those tests use the "examples/studies/STA-mini.zip" Study,
+ which contains the following areas: ["de", "es", "fr", "it"].
+ """
+
+ def test_get_general_form_values(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ ):
+ """Check `set_general_form_values` end point"""
+ res = client.get(
+ f"/v1/studies/{study_id}/config/general/form",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "buildingMode": "Custom",
+ "filtering": True,
+ "firstDay": 1,
+ "firstJanuary": "Monday",
+ "firstMonth": "january",
+ "firstWeekDay": "Monday",
+ "horizon": "2030",
+ "lastDay": 7,
+ "leapYear": False,
+ "mcScenario": True,
+ "mode": "Adequacy",
+ "nbYears": 1,
+ "selectionMode": True,
+ "simulationSynthesis": True,
+ "yearByYear": False,
+ }
+ assert actual == expected
+
+ def test_set_general_form_values(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ ):
+ """Check `set_general_form_values` end point"""
+ obj = {"horizon": 2020}
+ res = client.put(
+ f"/v1/studies/{study_id}/config/general/form",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=obj,
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ assert actual is None
diff --git a/tests/integration/study_data_blueprint/test_hydro_allocation.py b/tests/integration/study_data_blueprint/test_hydro_allocation.py
new file mode 100644
index 0000000000..7664f2eb62
--- /dev/null
+++ b/tests/integration/study_data_blueprint/test_hydro_allocation.py
@@ -0,0 +1,252 @@
+from http import HTTPStatus
+from typing import List
+
+import pytest
+from starlette.testclient import TestClient
+
+from antarest.study.business.area_management import AreaInfoDTO, AreaType
+from tests.integration.utils import wait_for
+
+
+@pytest.mark.unit_test
+class TestHydroAllocation:
+ """
+ Test the end points related to hydraulic allocation.
+
+ Those tests use the "examples/studies/STA-mini.zip" Study,
+ which contains the following areas: ["de", "es", "fr", "it"].
+ """
+
+ def test_get_allocation_form_values(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ ):
+ """Check `get_allocation_form_values` end point"""
+ area_id = "de"
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/{area_id}/hydro/allocation/form",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {"allocation": [{"areaId": "de", "coefficient": 1.0}]}
+ assert actual == expected
+
+ def test_get_allocation_form_values__variant(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ ):
+ """
+ The purpose of this test is to check that we can get the form parameters from a study variant.
+ To prepare this test, we start from a RAW study, copy it to the managed study workspace
+ and then create a variant from this managed workspace.
+ """
+ # Execute the job to copy the study to the workspace
+ res = client.post(
+ f"/v1/studies/{study_id}/copy?dest=Clone&with_outputs=false",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ res.raise_for_status()
+ task_id = res.json()
+
+ # wait for the job to finish
+ def copy_task_done() -> bool:
+ r = client.get(
+ f"/v1/tasks/{task_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ return r.json()["status"] == 3
+
+ wait_for(copy_task_done, sleep_time=0.2)
+
+ # Get the job result to retrieve the study ID
+ res = client.get(
+ f"/v1/tasks/{task_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ res.raise_for_status()
+ managed_id = res.json()["result"]["return_value"]
+
+ # create a variant study from the managed study
+ res = client.post(
+ f"/v1/studies/{managed_id}/variants?name=foo",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ res.raise_for_status()
+ variant_id = res.json()
+
+ # get allocation form
+ area_id = "de"
+ res = client.get(
+ f"/v1/studies/{variant_id}/areas/{area_id}/hydro/allocation/form",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ res.raise_for_status()
+ actual = res.json()
+ expected = {"allocation": [{"areaId": "de", "coefficient": 1.0}]}
+ assert actual == expected
+
+ @pytest.mark.parametrize(
+ "area_id, expected",
+ [
+ pytest.param(
+ "*",
+ {
+ "columns": ["de", "es", "fr", "it"],
+ "data": [
+ [1.0, 0.0, 0.0, 0.0],
+ [0.0, 1.0, 0.0, 0.0],
+ [0.0, 0.0, 1.0, 0.0],
+ [0.0, 0.0, 0.0, 1.0],
+ ],
+ "index": ["de", "es", "fr", "it"],
+ },
+ id="all-areas",
+ ),
+ ],
+ )
+ def test_get_allocation_matrix(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ area_id: str,
+ expected: List[List[float]],
+ ):
+ """Check `get_allocation_matrix` end point"""
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/hydro/allocation/matrix",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ assert actual == expected
+
+ def test_set_allocation_form_values(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ ):
+ """Check `set_allocation_form_values` end point"""
+ area_id = "de"
+ expected = {
+ "allocation": [
+ {"areaId": "de", "coefficient": 3},
+ {"areaId": "es", "coefficient": 1.0},
+ ]
+ }
+ res = client.put(
+ f"/v1/studies/{study_id}/areas/{area_id}/hydro/allocation/form",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=expected,
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ assert actual == expected
+
+ # check that the values are updated
+ res = client.get(
+ f"/v1/studies/{study_id}/raw?path=input/hydro/allocation&depth=3",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "de": {"[allocation]": {"de": 3.0, "es": 1.0}},
+ "es": {"[allocation]": {"es": 1}},
+ "fr": {"[allocation]": {"fr": 1}},
+ "it": {"[allocation]": {"it": 1}},
+ }
+ assert actual == expected
+
+ def test_create_area(
+ self, client: TestClient, user_access_token: str, study_id: str
+ ):
+ """
+ Given a study, when an area is created, the hydraulic allocation
+ column for this area must be updated with the following values:
+ - the coefficient == 1 for this area,
+ - the coefficient == 0 for the other areas.
+ Other columns must not be changed.
+ """
+ area_info = AreaInfoDTO(id="north", name="NORTH", type=AreaType.AREA)
+ res = client.post(
+ f"/v1/studies/{study_id}/areas",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ data=area_info.json(),
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/hydro/allocation/matrix",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK
+ actual = res.json()
+ expected = {
+ "columns": ["de", "es", "fr", "it", "north"],
+ "data": [
+ [1.0, 0.0, 0.0, 0.0, 0.0],
+ [0.0, 1.0, 0.0, 0.0, 0.0],
+ [0.0, 0.0, 1.0, 0.0, 0.0],
+ [0.0, 0.0, 0.0, 1.0, 0.0],
+ [0.0, 0.0, 0.0, 0.0, 1.0],
+ ],
+ "index": ["de", "es", "fr", "it", "north"],
+ }
+ assert actual == expected
+
+ def test_delete_area(
+ self, client: TestClient, user_access_token: str, study_id: str
+ ):
+ """
+ Given a study, when an area is deleted, the hydraulic allocation
+ column for this area must be removed.
+ Other columns must be updated to reflect the area deletion.
+ """
+ # First change the coefficients to avoid zero values (which are defaults).
+ obj = {
+ "de": {"[allocation]": {"de": 10, "es": 20, "fr": 30, "it": 40}},
+ "es": {"[allocation]": {"de": 11, "es": 21, "fr": 31, "it": 41}},
+ "fr": {"[allocation]": {"de": 12, "es": 22, "fr": 32, "it": 42}},
+ "it": {"[allocation]": {"de": 13, "es": 23, "fr": 33, "it": 43}},
+ }
+ for prod_area, allocation_cfg in obj.items():
+ res = client.post(
+ f"/v1/studies/{study_id}/raw?path=input/hydro/allocation/{prod_area}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=allocation_cfg,
+ )
+ assert res.status_code == HTTPStatus.NO_CONTENT, res.json()
+
+ # Then we remove the "fr" zone.
+ # The deletion should update the allocation matrix of all other zones.
+ res = client.delete(
+ f"/v1/studies/{study_id}/areas/fr",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+
+ # Check that the "fr" column is removed from the hydraulic allocation matrix.
+ # The row corresponding to "fr" must also be deleted.
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/hydro/allocation/matrix",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "columns": ["de", "es", "it"],
+ "data": [
+ [10.0, 11.0, 13.0],
+ [20.0, 21.0, 23.0],
+ [40.0, 41.0, 43.0],
+ ],
+ "index": ["de", "es", "it"],
+ }
+ assert actual == expected
diff --git a/tests/integration/study_data_blueprint/test_hydro_correlation.py b/tests/integration/study_data_blueprint/test_hydro_correlation.py
new file mode 100644
index 0000000000..3337f0193a
--- /dev/null
+++ b/tests/integration/study_data_blueprint/test_hydro_correlation.py
@@ -0,0 +1,307 @@
+from http import HTTPStatus
+from typing import List
+
+import pytest
+from antarest.study.business.area_management import AreaInfoDTO
+from starlette.testclient import TestClient
+
+
+@pytest.mark.unit_test
+class TestHydroCorrelation:
+ """
+ Test the end points related to hydraulic correlation.
+
+ Those tests use the "examples/studies/STA-mini.zip" Study,
+ which contains the following areas: ["de", "es", "fr", "it"].
+ """
+
+ def test_get_correlation_form_values(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ ):
+ """Check `get_correlation_form_values` end point"""
+ area_id = "fr"
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/{area_id}/hydro/correlation/form",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "correlation": [
+ {"areaId": "fr", "coefficient": 100.0},
+ {"areaId": "de", "coefficient": 25.0},
+ {"areaId": "es", "coefficient": 75.0},
+ {"areaId": "it", "coefficient": 75.0},
+ ]
+ }
+ assert actual == expected
+
+ def test_set_correlation_form_values(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ ):
+ """Check `set_correlation_form_values` end point"""
+ area_id = "fr"
+ obj = {
+ "correlation": [
+ {"areaId": "de", "coefficient": 20},
+ {"areaId": "es", "coefficient": -82.8},
+ {"areaId": "it", "coefficient": 0},
+ {"areaId": "fr", "coefficient": 100.0},
+ ]
+ }
+ res = client.put(
+ f"/v1/studies/{study_id}/areas/{area_id}/hydro/correlation/form",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=obj,
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "correlation": [
+ {"areaId": "de", "coefficient": 20.0},
+ {"areaId": "es", "coefficient": -82.8},
+ {"areaId": "fr", "coefficient": 100.0},
+ ]
+ }
+ assert actual == expected
+
+ # check that the form is updated correctly
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/{area_id}/hydro/correlation/form",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "correlation": [
+ {"areaId": "fr", "coefficient": 100.0},
+ {"areaId": "de", "coefficient": 20.0},
+ {"areaId": "es", "coefficient": -82.8},
+ ]
+ }
+ assert actual == expected
+
+ # check that the matrix is symmetric
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/hydro/correlation/matrix",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "columns": ["de", "es", "fr", "it"],
+ "data": [
+ [1.0, 0.0, 0.2, 0.0],
+ [0.0, 1.0, -0.828, 0.12],
+ [0.2, -0.828, 1.0, 0.0],
+ [0.0, 0.12, 0.0, 1.0],
+ ],
+ "index": ["de", "es", "fr", "it"],
+ }
+ assert actual == expected
+
+ @pytest.mark.parametrize(
+ "columns, expected",
+ [
+ pytest.param(
+ "",
+ {
+ "columns": ["de", "es", "fr", "it"],
+ "data": [
+ [1.0, 0.0, 0.25, 0.0],
+ [0.0, 1.0, 0.75, 0.12],
+ [0.25, 0.75, 1.0, 0.75],
+ [0.0, 0.12, 0.75, 1.0],
+ ],
+ "index": ["de", "es", "fr", "it"],
+ },
+ id="all-areas",
+ ),
+ pytest.param(
+ "fr,de",
+ {
+ "columns": ["de", "fr"],
+ "data": [
+ [1.0, 0.25],
+ [0.0, 0.75],
+ [0.25, 1.0],
+ [0.0, 0.75],
+ ],
+ "index": ["de", "es", "fr", "it"],
+ },
+ id="some-areas",
+ ),
+ pytest.param(
+ "fr",
+ {
+ "columns": ["fr"],
+ "data": [
+ [0.25],
+ [0.75],
+ [1.0],
+ [0.75],
+ ],
+ "index": ["de", "es", "fr", "it"],
+ },
+ id="one-area",
+ ),
+ ],
+ )
+ def test_get_correlation_matrix(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ columns: str,
+ expected: List[List[float]],
+ ):
+ """Check `get_correlation_matrix` end point"""
+ query = f"columns={columns}" if columns else ""
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/hydro/correlation/matrix?{query}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ assert actual == expected
+
+ def test_set_correlation_matrix(
+ self,
+ client: TestClient,
+ user_access_token: str,
+ study_id: str,
+ ):
+ """Check `set_correlation_matrix` end point"""
+ obj = {
+ "columns": ["fr", "it"],
+ "data": [
+ [-0.79332875, -0.96830414],
+ [-0.23220568, -0.158783],
+ [1.0, 0.82],
+ [0.82, 1.0],
+ ],
+ "index": ["de", "es", "fr", "it"],
+ }
+ res = client.put(
+ f"/v1/studies/{study_id}/areas/hydro/correlation/matrix",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=obj,
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = obj
+ assert actual == expected
+
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/hydro/correlation/matrix",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "columns": ["de", "es", "fr", "it"],
+ "data": [
+ [1.0, 0.0, -0.79332875, -0.96830414],
+ [0.0, 1.0, -0.23220568, -0.158783],
+ [-0.79332875, -0.23220568, 1.0, 0.82],
+ [-0.96830414, -0.158783, 0.82, 1.0],
+ ],
+ "index": ["de", "es", "fr", "it"],
+ }
+ assert actual == expected
+
+ def test_create_area(
+ self, client: TestClient, user_access_token: str, study_id: str
+ ):
+ """
+ Given a study, when an area is created, the hydraulic correlation
+ column for this area must be updated with the following values:
+ - the coefficient == 1 for this area,
+ - the coefficient == 0 for the other areas.
+ Other columns must not be changed.
+ """
+ area_info = AreaInfoDTO(id="north", name="NORTH", type="AREA")
+ res = client.post(
+ f"/v1/studies/{study_id}/areas",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ data=area_info.json(),
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/hydro/correlation/matrix",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK
+ actual = res.json()
+ expected = {
+ "columns": ["de", "es", "fr", "it", "north"],
+ "data": [
+ [1.0, 0.0, 0.25, 0.0, 0.0],
+ [0.0, 1.0, 0.75, 0.12, 0.0],
+ [0.25, 0.75, 1.0, 0.75, 0.0],
+ [0.0, 0.12, 0.75, 1.0, 0.0],
+ [0.0, 0.0, 0.0, 0.0, 1.0],
+ ],
+ "index": ["de", "es", "fr", "it", "north"],
+ }
+ assert actual == expected
+
+ def test_delete_area(
+ self, client: TestClient, user_access_token: str, study_id: str
+ ):
+ """
+ Given a study, when an area is deleted, the hydraulic correlation
+ column for this area must be removed.
+ Other columns must be updated to reflect the area deletion.
+ """
+ # First change the coefficients to avoid zero values (which are defaults).
+ correlation_cfg = {
+ "annual": {
+ "de%es": 0.12,
+ "de%fr": 0.13,
+ "de%it": 0.14,
+ "es%fr": 0.22,
+ "es%it": 0.23,
+ "fr%it": 0.32,
+ }
+ }
+ res = client.post(
+ f"/v1/studies/{study_id}/raw?path=input/hydro/prepro/correlation",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=correlation_cfg,
+ )
+ assert res.status_code == HTTPStatus.NO_CONTENT, res.json()
+
+ # Then we remove the "fr" zone.
+ # The deletion should update the correlation matrix of all other zones.
+ res = client.delete(
+ f"/v1/studies/{study_id}/areas/fr",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+
+ # Check that the "fr" column is removed from the hydraulic correlation matrix.
+ # The row corresponding to "fr" must also be deleted.
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/hydro/correlation/matrix",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == HTTPStatus.OK, res.json()
+ actual = res.json()
+ expected = {
+ "columns": ["de", "es", "it"],
+ "data": [
+ [1.0, 0.12, 0.14],
+ [0.12, 1.0, 0.23],
+ [0.14, 0.23, 1.0],
+ ],
+ "index": ["de", "es", "it"],
+ }
+ assert actual == expected
diff --git a/tests/integration/test_core_blueprint.py b/tests/integration/test_core_blueprint.py
index aaa6cb83e9..60949ea8c9 100644
--- a/tests/integration/test_core_blueprint.py
+++ b/tests/integration/test_core_blueprint.py
@@ -1,20 +1,37 @@
+import re
from unittest import mock
from fastapi import FastAPI
-from http import HTTPStatus
from starlette.testclient import TestClient
+class RegEx:
+ """A helper object that compares equal to a regex."""
+
+ def __init__(self, regex):
+ self.regex = regex
+ self.match = re.compile(self.regex).fullmatch
+
+ def __eq__(self, other):
+ return isinstance(other, str) and self.match(other)
+
+ def __ne__(self, other):
+ return not isinstance(other, str) or not self.match(other)
+
+ def __repr__(self):
+ return f""
+
+
class TestVersionInfo:
def test_version_info(self, app: FastAPI):
client = TestClient(app, raise_server_exceptions=False)
res = client.get("/version")
- assert res.status_code == HTTPStatus.OK
+ res.raise_for_status()
actual = res.json()
expected = {
"name": "AntaREST",
- "version": mock.ANY,
- "gitcommit": mock.ANY,
- "dependencies": {"Antares_Launcher": mock.ANY},
+ "version": RegEx(r"\d+(?:\.\d+)+"),
+ "gitcommit": RegEx(r"^[0-9a-fA-F]{40}$"),
+ "dependencies": mock.ANY,
}
assert actual == expected
diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py
index f0fc467202..f0bc853921 100644
--- a/tests/integration/test_integration.py
+++ b/tests/integration/test_integration.py
@@ -1,40 +1,36 @@
-import contextlib
import time
from pathlib import Path
-from typing import Callable
from unittest.mock import ANY
+from fastapi import FastAPI
+from starlette.testclient import TestClient
+
from antarest.core.tasks.model import TaskDTO, TaskStatus
+from antarest.study.business.adequacy_patch_management import PriceTakingOrder
from antarest.study.business.area_management import AreaType, LayerInfoDTO
from antarest.study.business.general_management import Mode
+from antarest.study.business.optimization_management import (
+ SimplexOptimizationRange,
+ TransmissionCapacities,
+ UnfeasibleProblemBehavior,
+)
from antarest.study.business.table_mode_management import (
FIELDS_INFO_BY_TYPE,
AdequacyPatchMode,
AssetType,
+ BindingConstraintOperator,
+ BindingConstraintType,
LawOption,
TableTemplateType,
TimeSeriesGenerationOption,
- TransmissionCapacity,
TimeSeriesInterpretation,
- BindingConstraintType,
- BindingConstraintOperator,
+ TransmissionCapacity,
)
from antarest.study.model import MatrixIndex, StudyDownloadLevelDTO
from antarest.study.storage.variantstudy.model.command.common import (
CommandName,
)
-from fastapi import FastAPI
-from starlette.testclient import TestClient
-
-
-def wait_for(predicate: Callable[[], bool], timeout=10):
- end = time.time() + timeout
- while time.time() < end:
- with contextlib.suppress(Exception):
- if predicate():
- return
- time.sleep(1)
- raise TimeoutError()
+from tests.integration.utils import wait_for
def init_test(app: FastAPI):
@@ -992,6 +988,8 @@ def test_area_management(app: FastAPI):
)
assert res.status_code == 200
+ # Optimization form
+
res_optimization_config = client.get(
f"/v1/studies/{study_id}/config/optimization/form",
headers={
@@ -1002,8 +1000,7 @@ def test_area_management(app: FastAPI):
assert res_optimization_config_json == {
"bindingConstraints": True,
"hurdleCosts": True,
- "transmissionCapacities": "local-values",
- "linkType": "local",
+ "transmissionCapacities": TransmissionCapacities.LOCAL_VALUES.value,
"thermalClustersMinStablePower": True,
"thermalClustersMinUdTime": True,
"dayAheadReserve": True,
@@ -1011,18 +1008,8 @@ def test_area_management(app: FastAPI):
"strategicReserve": True,
"spinningReserve": True,
"exportMps": False,
- "unfeasibleProblemBehavior": "error-verbose",
- "simplexOptimizationRange": "week",
- "splitExportedMps": False,
- "enableAdequacyPatch": False,
- "ntcFromPhysicalAreasOutToPhysicalAreasInAdequacyPatch": True,
- "ntcBetweenPhysicalAreasOutAdequacyPatch": True,
- "checkCsrCostFunction": False,
- "includeHurdleCostCsr": False,
- "priceTakingOrder": "DENS",
- "thresholdInitiateCurtailmentSharingRule": 0.0,
- "thresholdDisplayLocalMatchingRuleViolations": 0.0,
- "thresholdCsrVariableBoundsRelaxation": 3,
+ "unfeasibleProblemBehavior": UnfeasibleProblemBehavior.ERROR_VERBOSE.value,
+ "simplexOptimizationRange": SimplexOptimizationRange.WEEK.value,
}
client.put(
@@ -1032,8 +1019,8 @@ def test_area_management(app: FastAPI):
},
json={
"strategicReserve": False,
- "unfeasibleProblemBehavior": "warning-verbose",
- "ntcBetweenPhysicalAreasOutAdequacyPatch": False,
+ "unfeasibleProblemBehavior": UnfeasibleProblemBehavior.WARNING_VERBOSE.value,
+ "simplexOptimizationRange": SimplexOptimizationRange.DAY.value,
},
)
res_optimization_config = client.get(
@@ -1046,8 +1033,7 @@ def test_area_management(app: FastAPI):
assert res_optimization_config_json == {
"bindingConstraints": True,
"hurdleCosts": True,
- "transmissionCapacities": "local-values",
- "linkType": "local",
+ "transmissionCapacities": TransmissionCapacities.LOCAL_VALUES.value,
"thermalClustersMinStablePower": True,
"thermalClustersMinUdTime": True,
"dayAheadReserve": True,
@@ -1055,20 +1041,63 @@ def test_area_management(app: FastAPI):
"strategicReserve": False,
"spinningReserve": True,
"exportMps": False,
- "unfeasibleProblemBehavior": "warning-verbose",
- "simplexOptimizationRange": "week",
- "splitExportedMps": False,
+ "unfeasibleProblemBehavior": UnfeasibleProblemBehavior.WARNING_VERBOSE.value,
+ "simplexOptimizationRange": SimplexOptimizationRange.DAY.value,
+ }
+
+ # Adequacy patch form
+
+ res_adequacy_patch_config = client.get(
+ f"/v1/studies/{study_id}/config/adequacypatch/form",
+ headers={
+ "Authorization": f'Bearer {admin_credentials["access_token"]}'
+ },
+ )
+ res_adequacy_patch_config_json = res_adequacy_patch_config.json()
+ assert res_adequacy_patch_config_json == {
"enableAdequacyPatch": False,
"ntcFromPhysicalAreasOutToPhysicalAreasInAdequacyPatch": True,
- "ntcBetweenPhysicalAreasOutAdequacyPatch": False,
+ "ntcBetweenPhysicalAreasOutAdequacyPatch": True,
"checkCsrCostFunction": False,
"includeHurdleCostCsr": False,
- "priceTakingOrder": "DENS",
+ "priceTakingOrder": PriceTakingOrder.DENS.value,
"thresholdInitiateCurtailmentSharingRule": 0.0,
"thresholdDisplayLocalMatchingRuleViolations": 0.0,
"thresholdCsrVariableBoundsRelaxation": 3,
}
+ client.put(
+ f"/v1/studies/{study_id}/config/adequacypatch/form",
+ headers={
+ "Authorization": f'Bearer {admin_credentials["access_token"]}'
+ },
+ json={
+ "ntcBetweenPhysicalAreasOutAdequacyPatch": False,
+ "priceTakingOrder": PriceTakingOrder.LOAD.value,
+ "thresholdDisplayLocalMatchingRuleViolations": 1.1,
+ },
+ )
+ res_adequacy_patch_config = client.get(
+ f"/v1/studies/{study_id}/config/adequacypatch/form",
+ headers={
+ "Authorization": f'Bearer {admin_credentials["access_token"]}'
+ },
+ )
+ res_adequacy_patch_config_json = res_adequacy_patch_config.json()
+ assert res_adequacy_patch_config_json == {
+ "enableAdequacyPatch": False,
+ "ntcFromPhysicalAreasOutToPhysicalAreasInAdequacyPatch": True,
+ "ntcBetweenPhysicalAreasOutAdequacyPatch": False,
+ "checkCsrCostFunction": False,
+ "includeHurdleCostCsr": False,
+ "priceTakingOrder": PriceTakingOrder.LOAD.value,
+ "thresholdInitiateCurtailmentSharingRule": 0.0,
+ "thresholdDisplayLocalMatchingRuleViolations": 1.1,
+ "thresholdCsrVariableBoundsRelaxation": 3,
+ }
+
+ # General form
+
res_general_config = client.get(
f"/v1/studies/{study_id}/config/general/form",
headers={
@@ -1102,6 +1131,7 @@ def test_area_management(app: FastAPI):
},
json={
"mode": Mode.ADEQUACY.value,
+ "firstDay": 2,
"lastDay": 299,
"leapYear": True,
},
@@ -1115,7 +1145,7 @@ def test_area_management(app: FastAPI):
res_general_config_json = res_general_config.json()
assert res_general_config_json == {
"mode": Mode.ADEQUACY.value,
- "firstDay": 1,
+ "firstDay": 2,
"lastDay": 299,
"horizon": "",
"firstMonth": "january",
@@ -1132,6 +1162,8 @@ def test_area_management(app: FastAPI):
"thematicTrimming": False,
}
+ # Thematic trimming form
+
res_thematic_trimming_config = client.get(
f"/v1/studies/{study_id}/config/thematictrimming/form",
headers={
diff --git a/tests/integration/test_studies_upgrade.py b/tests/integration/test_studies_upgrade.py
index 644efbc6e2..1d57dda4ca 100644
--- a/tests/integration/test_studies_upgrade.py
+++ b/tests/integration/test_studies_upgrade.py
@@ -1,3 +1,4 @@
+import os
import time
import pytest
@@ -5,6 +6,8 @@
from fastapi import FastAPI
from starlette.testclient import TestClient
+RUN_ON_WINDOWS = os.name == "nt"
+
def wait_task_completion(
client: TestClient,
@@ -81,6 +84,9 @@ def fixture_study_id(
study_ids = res.json()
return next(iter(study_ids))
+ @pytest.mark.skipif(
+ RUN_ON_WINDOWS, reason="This test runs randomly on Windows"
+ )
def test_upgrade_study__next_version(
self, client: TestClient, user_access_token: str, study_id: str
):
@@ -97,6 +103,9 @@ def test_upgrade_study__next_version(
"710" in task.result.message
), f"Version not in {task.result.message=}"
+ @pytest.mark.skipif(
+ RUN_ON_WINDOWS, reason="This test runs randomly on Windows"
+ )
def test_upgrade_study__target_version(
self, client: TestClient, user_access_token: str, study_id: str
):
@@ -115,6 +124,9 @@ def test_upgrade_study__target_version(
target_version in task.result.message
), f"Version not in {task.result.message=}"
+ @pytest.mark.skipif(
+ RUN_ON_WINDOWS, reason="This test runs randomly on Windows"
+ )
def test_upgrade_study__bad_target_version(
self, client: TestClient, user_access_token: str, study_id: str
):
diff --git a/tests/integration/utils.py b/tests/integration/utils.py
new file mode 100644
index 0000000000..86d7759285
--- /dev/null
+++ b/tests/integration/utils.py
@@ -0,0 +1,15 @@
+import contextlib
+import time
+from typing import Callable
+
+
+def wait_for(
+ predicate: Callable[[], bool], timeout: float = 10, sleep_time: float = 1
+) -> None:
+ end = time.time() + timeout
+ while time.time() < end:
+ with contextlib.suppress(Exception):
+ if predicate():
+ return
+ time.sleep(sleep_time)
+ raise TimeoutError(f"task is still in progress after {timeout} seconds")
diff --git a/tests/storage/business/test_arealink_manager.py b/tests/storage/business/test_arealink_manager.py
index 23fcf9be04..cae6323b49 100644
--- a/tests/storage/business/test_arealink_manager.py
+++ b/tests/storage/business/test_arealink_manager.py
@@ -1,12 +1,10 @@
import json
-import os
import uuid
from pathlib import Path
from unittest.mock import Mock
from zipfile import ZipFile
import pytest
-
from antarest.core.jwt import DEFAULT_ADMIN_USER
from antarest.core.requests import RequestParameters
from antarest.matrixstore.service import (
@@ -14,36 +12,34 @@
SimpleMatrixService,
)
from antarest.study.business.area_management import (
+ AreaCreationDTO,
AreaManager,
AreaType,
- AreaCreationDTO,
AreaUI,
)
-from antarest.study.business.link_management import LinkManager, LinkInfoDTO
+from antarest.study.business.link_management import LinkInfoDTO, LinkManager
from antarest.study.model import (
- RawStudy,
Patch,
PatchArea,
PatchCluster,
+ RawStudy,
StudyAdditionalData,
)
from antarest.study.repository import StudyMetadataRepository
from antarest.study.storage.patch_service import PatchService
from antarest.study.storage.rawstudy.model.filesystem.config.files import build
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
- FileStudyTreeConfig,
Area,
+ Cluster,
DistrictSet,
+ FileStudyTreeConfig,
Link,
- Cluster,
)
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import (
FileStudyTree,
)
-from antarest.study.storage.rawstudy.raw_study_service import (
- RawStudyService,
-)
+from antarest.study.storage.rawstudy.raw_study_service import RawStudyService
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.business.matrix_constants_generator import (
GeneratorMatrixConstants,
@@ -57,14 +53,13 @@
from antarest.study.storage.variantstudy.variant_study_service import (
VariantStudyService,
)
-from tests.conftest import with_db_context
@pytest.fixture
-def empty_study(tmpdir: Path) -> FileStudy:
+def empty_study(tmp_path: Path) -> FileStudy:
cur_dir: Path = Path(__file__).parent
- study_path = Path(tmpdir / str(uuid.uuid4()))
- os.mkdir(study_path)
+ study_path = tmp_path.joinpath(str(uuid.uuid4()))
+ study_path.mkdir()
with ZipFile(cur_dir / "assets" / "empty_study_810.zip") as zip_output:
zip_output.extractall(path=study_path)
config = build(study_path, "1")
@@ -72,9 +67,9 @@ def empty_study(tmpdir: Path) -> FileStudy:
@pytest.fixture
-def matrix_service(tmpdir: Path) -> ISimpleMatrixService:
- matrix_path = Path(tmpdir / "matrix_store")
- os.mkdir(matrix_path)
+def matrix_service(tmp_path: Path) -> ISimpleMatrixService:
+ matrix_path = tmp_path.joinpath("matrix_store")
+ matrix_path.mkdir()
return SimpleMatrixService(matrix_path)
@@ -94,6 +89,7 @@ def test_area_crud(
raw_study_service, variant_study_service
)
)
+ # noinspection PyArgumentList
study = RawStudy(
id="1",
path=empty_study.config.study_path,
@@ -143,6 +139,7 @@ def test_area_crud(
area_manager.delete_area(study, "test2")
assert len(empty_study.config.areas.keys()) == 0
+ # noinspection PyArgumentList
study = VariantStudy(
id="2",
path=empty_study.config.study_path,
@@ -421,8 +418,6 @@ def test_get_all_area():
{"area1": "a2", "area2": "a3", "ui": None},
] == [link.dict() for link in links]
- pass
-
def test_update_area():
raw_study_service = Mock(spec=RawStudyService)
@@ -523,4 +518,4 @@ def test_update_clusters():
)
assert len(new_area_info.thermals) == 1
assert new_area_info.thermals[0].type == "a"
- assert new_area_info.thermals[0].code_oi == None
+ assert new_area_info.thermals[0].code_oi is None
diff --git a/tests/storage/repository/filesystem/matrix/output_series_matrix_test.py b/tests/storage/repository/filesystem/matrix/output_series_matrix_test.py
index cf4c521458..ccf7c2bb82 100644
--- a/tests/storage/repository/filesystem/matrix/output_series_matrix_test.py
+++ b/tests/storage/repository/filesystem/matrix/output_series_matrix_test.py
@@ -9,10 +9,25 @@
from antarest.study.storage.rawstudy.model.filesystem.matrix.head_writer import (
AreaHeadWriter,
)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
OutputSeriesMatrix,
)
+MATRIX_DAILY_DATA = """\
+DE area va hourly
+\tVARIABLES\tBEGIN\tEND
+\t2\t1\t2
+
+DE\thourly\t\t\t\t01_solar\t02_wind_on
+\t\t\t\t\tMWh\tMWh
+\tindex\tday\tmonth\thourly\tEXP\tEXP
+\t1\t1\tJAN\t00:00\t27000\t600
+\t2\t1\tJAN\t01:00\t48000\t34400
+"""
+
def test_get(tmp_path: Path):
file = tmp_path / "matrix-daily.txt"
@@ -45,9 +60,9 @@ def test_get(tmp_path: Path):
node = OutputSeriesMatrix(
context=Mock(),
config=config,
+ freq=MatrixFrequency.DAILY,
date_serializer=serializer,
head_writer=AreaHeadWriter(area="", data_type="", freq=""),
- freq="",
)
assert node.load() == matrix.to_dict(orient="split")
@@ -72,9 +87,9 @@ def test_save(tmp_path: Path):
node = OutputSeriesMatrix(
context=Mock(),
config=config,
+ freq=MatrixFrequency.DAILY,
date_serializer=serializer,
head_writer=AreaHeadWriter(area="de", data_type="va", freq="hourly"),
- freq="",
)
matrix = pd.DataFrame(
@@ -86,17 +101,5 @@ def test_save(tmp_path: Path):
)
node.dump(matrix.to_dict(orient="split"))
- print(file.read_text())
- assert (
- file.read_text()
- == """DE area va hourly
- VARIABLES BEGIN END
- 2 1 2
-
-DE hourly 01_solar 02_wind_on
- MWh MWh
- index day month hourly EXP EXP
- 1 1 JAN 00:00 27000 600
- 2 1 JAN 01:00 48000 34400
-"""
- )
+ actual = file.read_text()
+ assert actual == MATRIX_DAILY_DATA
diff --git a/tests/storage/repository/filesystem/matrix/test_matrix_node.py b/tests/storage/repository/filesystem/matrix/test_matrix_node.py
index 2849a114bd..35aa1d3dc8 100644
--- a/tests/storage/repository/filesystem/matrix/test_matrix_node.py
+++ b/tests/storage/repository/filesystem/matrix/test_matrix_node.py
@@ -1,9 +1,10 @@
-import json
from pathlib import Path
from tempfile import TemporaryDirectory
-from typing import Optional, List
+from typing import List, Optional
from unittest.mock import Mock
+import pandas as pd # type: ignore
+
from antarest.core.model import JSON
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
FileStudyTreeConfig,
@@ -12,6 +13,7 @@
ContextServer,
)
from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
MatrixNode,
)
@@ -21,7 +23,6 @@
"data": [[1, 2], [3, 4]],
}
-
MOCK_MATRIX_DTO = [[1, 2], [3, 4]]
@@ -29,7 +30,11 @@ class MockMatrixNode(MatrixNode):
def __init__(
self, context: ContextServer, config: FileStudyTreeConfig
) -> None:
- super().__init__(config=config, context=context, freq="annual")
+ super().__init__(
+ config=config,
+ context=context,
+ freq=MatrixFrequency.ANNUAL,
+ )
def parse(
self,
@@ -39,8 +44,14 @@ def parse(
) -> JSON:
return MOCK_MATRIX_JSON
- def _dump_json(self, data: JSON) -> None:
- json.dump(data, self.config.path.open("w"))
+ # def dump(
+ # self, data: Union[bytes, JSON], url: Optional[List[str]] = None
+ # ) -> None:
+ # """Dump the matrix data in JSON format to simplify the tests"""
+ # self.config.path.parent.mkdir(exist_ok=True, parents=True)
+ # self.config.path.write_text(
+ # json.dumps(data, indent=2), encoding="utf-8"
+ # )
def check_errors(
self, data: str, url: Optional[List[str]] = None, raising: bool = False
@@ -48,46 +59,51 @@ def check_errors(
pass # not used
-def test_normalize(tmp_path: Path):
- file = tmp_path / "matrix.txt"
- file.touch()
+class TestMatrixNode:
+ def test_normalize(self, tmp_path: Path):
+ file = tmp_path / "matrix.json"
+ file.touch()
+
+ matrix_service = Mock()
+ matrix_service.create.return_value = "my-id"
- matrix_service = Mock()
- matrix_service.create.return_value = "my-id"
+ resolver = Mock()
+ resolver.build_matrix_uri.return_value = "matrix://my-id"
- resolver = Mock()
- resolver.build_matrix_uri.return_value = "matrix://my-id"
+ node = MockMatrixNode(
+ context=ContextServer(matrix=matrix_service, resolver=resolver),
+ config=FileStudyTreeConfig(
+ study_path=file, path=file, study_id="mi-id", version=-1
+ ),
+ )
- node = MockMatrixNode(
- context=ContextServer(matrix=matrix_service, resolver=resolver),
- config=FileStudyTreeConfig(
- study_path=file, path=file, study_id="mi-id", version=-1
- ),
- )
+ node.normalize()
- node.normalize()
- assert node.get_link_path().read_text() == "matrix://my-id"
- assert not file.exists()
- matrix_service.create.assert_called_once_with(MOCK_MATRIX_DTO)
- resolver.build_matrix_uri.assert_called_once_with("my-id")
+ # check the result
+ assert node.get_link_path().read_text() == "matrix://my-id"
+ assert not file.exists()
+ matrix_service.create.assert_called_once_with(MOCK_MATRIX_DTO)
+ resolver.build_matrix_uri.assert_called_once_with("my-id")
+ def test_denormalize(self, tmp_path: Path):
+ file = tmp_path / "matrix.json"
-def test_denormalize(tmp_path: Path):
- file = tmp_path / "matrix.txt"
+ link = file.parent / f"{file.name}.link"
+ link.write_text("my-id")
- link = file.parent / f"{file.name}.link"
- link.write_text("my-id")
+ resolver = Mock()
+ resolver.resolve.return_value = MOCK_MATRIX_JSON
- resolver = Mock()
- resolver.resolve.return_value = MOCK_MATRIX_JSON
+ node = MockMatrixNode(
+ context=ContextServer(matrix=Mock(), resolver=resolver),
+ config=FileStudyTreeConfig(
+ study_path=file, path=file, study_id="mi-id", version=-1
+ ),
+ )
- node = MockMatrixNode(
- context=ContextServer(matrix=Mock(), resolver=resolver),
- config=FileStudyTreeConfig(
- study_path=file, path=file, study_id="mi-id", version=-1
- ),
- )
+ node.denormalize()
- node.denormalize()
- assert not link.exists()
- assert json.loads(file.read_text()) == MOCK_MATRIX_JSON
+ # check the result
+ assert not link.exists()
+ actual = pd.read_csv(file, sep="\t", header=None)
+ assert actual.values.tolist() == MOCK_MATRIX_JSON["data"]
diff --git a/tests/storage/repository/filesystem/root/__init__.py b/tests/storage/repository/filesystem/root/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/input/__init__.py b/tests/storage/repository/filesystem/root/input/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/input/hydro/__init__.py b/tests/storage/repository/filesystem/root/input/hydro/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/input/hydro/common/__init__.py b/tests/storage/repository/filesystem/root/input/hydro/common/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/input/hydro/common/capacity/__init__.py b/tests/storage/repository/filesystem/root/input/hydro/common/capacity/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/input/hydro/common/capacity/test_capacity.py b/tests/storage/repository/filesystem/root/input/hydro/common/capacity/test_capacity.py
new file mode 100644
index 0000000000..ed55f0c005
--- /dev/null
+++ b/tests/storage/repository/filesystem/root/input/hydro/common/capacity/test_capacity.py
@@ -0,0 +1,106 @@
+import uuid
+from pathlib import Path
+from unittest.mock import Mock
+
+import pytest
+
+from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.matrixstore.uri_resolver_service import UriResolverService
+from antarest.study.storage.rawstudy.model.filesystem.config.model import (
+ Area,
+ FileStudyTreeConfig,
+)
+from antarest.study.storage.rawstudy.model.filesystem.context import (
+ ContextServer,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import (
+ InputSeriesMatrix,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
+from antarest.study.storage.rawstudy.model.filesystem.root.input.hydro.common.capacity import (
+ capacity,
+)
+
+
+# noinspection SpellCheckingInspection
+BEFORE_650 = {
+ # fmt: off
+ "maxpower_en": {"default_empty": None, "freq": MatrixFrequency.HOURLY, "nb_columns": None},
+ "maxpower_fr": {"default_empty": None, "freq": MatrixFrequency.HOURLY, "nb_columns": None},
+ "reservoir_en": {"default_empty": None, "freq": MatrixFrequency.DAILY, "nb_columns": None},
+ "reservoir_fr": {"default_empty": None, "freq": MatrixFrequency.DAILY, "nb_columns": None},
+ # fmt: on
+}
+
+# noinspection SpellCheckingInspection
+AFTER_650 = {
+ # fmt: off
+ "creditmodulations_en": {"default_empty": None, "freq": MatrixFrequency.HOURLY, "nb_columns": None},
+ "creditmodulations_fr": {"default_empty": None, "freq": MatrixFrequency.HOURLY, "nb_columns": None},
+ "inflowPattern_en": {"default_empty": None, "freq": MatrixFrequency.HOURLY, "nb_columns": None},
+ "inflowPattern_fr": {"default_empty": None, "freq": MatrixFrequency.HOURLY, "nb_columns": None},
+ "maxpower_en": {"default_empty": None, "freq": MatrixFrequency.HOURLY, "nb_columns": None},
+ "maxpower_fr": {"default_empty": None, "freq": MatrixFrequency.HOURLY, "nb_columns": None},
+ "reservoir_en": {"default_empty": None, "freq": MatrixFrequency.DAILY, "nb_columns": None},
+ "reservoir_fr": {"default_empty": None, "freq": MatrixFrequency.DAILY, "nb_columns": None},
+ "waterValues_en": {"default_empty": None, "freq": MatrixFrequency.DAILY, "nb_columns": None},
+ "waterValues_fr": {"default_empty": None, "freq": MatrixFrequency.DAILY, "nb_columns": None},
+ # fmt: on
+}
+
+
+class TestInputHydroCommonCapacity:
+ @pytest.mark.parametrize(
+ "version, expected",
+ [
+ pytest.param("000", BEFORE_650, id="before-650"),
+ pytest.param("650", AFTER_650, id="after-650"),
+ ],
+ )
+ def test_build_input_hydro_common_capacity(
+ self,
+ version: str,
+ expected: dict,
+ ):
+ matrix = Mock(spec=ISimpleMatrixService)
+ resolver = Mock(spec=UriResolverService)
+ context = ContextServer(matrix=matrix, resolver=resolver)
+ study_id = str(uuid.uuid4())
+ config = FileStudyTreeConfig(
+ study_path=Path("path/to/study"),
+ path=Path("path/to/study"),
+ study_id=study_id,
+ version=int(version), # will become a `str` in the future
+ areas={
+ name: Area(
+ name=name.upper(),
+ links={},
+ thermals=[],
+ renewables=[],
+ filters_synthesis=[],
+ filters_year=[],
+ )
+ for name in ["fr", "en"]
+ },
+ )
+
+ node = capacity.InputHydroCommonCapacity(
+ context=context,
+ config=config,
+ children_glob_exceptions=None,
+ )
+ actual = node.build()
+
+ # check the result
+ value: InputSeriesMatrix
+ actual_obj = {
+ key: {
+ "default_empty": value.default_empty,
+ "freq": value.freq,
+ "nb_columns": value.nb_columns,
+ }
+ for key, value in actual.items()
+ }
+ assert actual_obj == expected
diff --git a/tests/storage/repository/filesystem/root/input/hydro/series/__init__.py b/tests/storage/repository/filesystem/root/input/hydro/series/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/input/hydro/series/area/__init__.py b/tests/storage/repository/filesystem/root/input/hydro/series/area/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/input/hydro/series/area/test_area.py b/tests/storage/repository/filesystem/root/input/hydro/series/area/test_area.py
new file mode 100644
index 0000000000..a774dae29f
--- /dev/null
+++ b/tests/storage/repository/filesystem/root/input/hydro/series/area/test_area.py
@@ -0,0 +1,100 @@
+import uuid
+from pathlib import Path
+from unittest.mock import Mock
+
+import pytest
+
+from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.matrixstore.uri_resolver_service import UriResolverService
+from antarest.study.storage.rawstudy.model.filesystem.config.model import (
+ FileStudyTreeConfig,
+)
+from antarest.study.storage.rawstudy.model.filesystem.context import (
+ ContextServer,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import (
+ default_scenario_monthly,
+ default_scenario_hourly,
+ default_scenario_daily,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import (
+ InputSeriesMatrix,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
+from antarest.study.storage.rawstudy.model.filesystem.root.input.hydro.series.area import (
+ area,
+)
+
+
+BEFORE_650 = {
+ "mod": {
+ "default_empty": default_scenario_monthly,
+ "freq": MatrixFrequency.MONTHLY,
+ "nb_columns": None,
+ },
+ "ror": {
+ "default_empty": default_scenario_hourly,
+ "freq": MatrixFrequency.HOURLY,
+ "nb_columns": None,
+ },
+}
+
+AFTER_650 = {
+ "mod": {
+ "default_empty": default_scenario_daily,
+ "freq": MatrixFrequency.DAILY,
+ "nb_columns": None,
+ },
+ "ror": {
+ "default_empty": default_scenario_hourly,
+ "freq": MatrixFrequency.HOURLY,
+ "nb_columns": None,
+ },
+}
+
+
+class TestInputHydroSeriesArea:
+ @pytest.mark.parametrize(
+ "version, expected",
+ [
+ pytest.param("000", BEFORE_650, id="before-650"),
+ pytest.param("650", AFTER_650, id="after-650"),
+ ],
+ )
+ def test_build_input_hydro_series_area(
+ self,
+ version: str,
+ expected: dict,
+ ):
+ matrix = Mock(spec=ISimpleMatrixService)
+ resolver = Mock(spec=UriResolverService)
+ context = ContextServer(matrix=matrix, resolver=resolver)
+ study_id = str(uuid.uuid4())
+ config = FileStudyTreeConfig(
+ study_path=Path("path/to/study"),
+ path=Path("path/to/study"),
+ study_id=study_id,
+ version=int(version), # will become a `str` in the future
+ areas={},
+ )
+
+ node = area.InputHydroSeriesArea(
+ context=context,
+ config=config,
+ children_glob_exceptions=None,
+ )
+ actual = node.build()
+
+ # check the result
+ value: InputSeriesMatrix
+ actual_obj = {
+ key: {
+ "default_empty": value.default_empty,
+ "freq": value.freq,
+ "nb_columns": value.nb_columns,
+ }
+ for key, value in actual.items()
+ }
+ assert actual_obj == expected
diff --git a/tests/storage/repository/filesystem/root/output/__init__.py b/tests/storage/repository/filesystem/root/output/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/output/simulation/__init__.py b/tests/storage/repository/filesystem/root/output/simulation/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/__init__.py b/tests/storage/repository/filesystem/root/output/simulation/mode/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/__init__.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_area.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_area.py
new file mode 100644
index 0000000000..e2e9783e40
--- /dev/null
+++ b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_area.py
@@ -0,0 +1,107 @@
+import uuid
+from pathlib import Path
+from unittest.mock import Mock
+
+import pytest
+
+from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.matrixstore.uri_resolver_service import UriResolverService
+from antarest.study.storage.rawstudy.model.filesystem.config.model import (
+ FileStudyTreeConfig,
+)
+from antarest.study.storage.rawstudy.model.filesystem.context import (
+ ContextServer,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
+ AreaOutputSeriesMatrix,
+)
+from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common import (
+ area,
+)
+
+# noinspection SpellCheckingInspection
+MC_ALL_TRUE = {
+ "details-annual": {"freq": MatrixFrequency.ANNUAL},
+ "details-daily": {"freq": MatrixFrequency.DAILY},
+ "details-hourly": {"freq": MatrixFrequency.HOURLY},
+ "details-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "details-res-annual": {"freq": MatrixFrequency.ANNUAL},
+ "details-res-daily": {"freq": MatrixFrequency.DAILY},
+ "details-res-hourly": {"freq": MatrixFrequency.HOURLY},
+ "details-res-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "details-res-weekly": {"freq": MatrixFrequency.WEEKLY},
+ "details-weekly": {"freq": MatrixFrequency.WEEKLY},
+ "id-annual": {"freq": MatrixFrequency.ANNUAL},
+ "id-daily": {"freq": MatrixFrequency.DAILY},
+ "id-hourly": {"freq": MatrixFrequency.HOURLY},
+ "id-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "id-weekly": {"freq": MatrixFrequency.WEEKLY},
+ "values-annual": {"freq": MatrixFrequency.ANNUAL},
+ "values-daily": {"freq": MatrixFrequency.DAILY},
+ "values-hourly": {"freq": MatrixFrequency.HOURLY},
+ "values-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "values-weekly": {"freq": MatrixFrequency.WEEKLY},
+}
+
+# noinspection SpellCheckingInspection
+MC_ALL_FALSE = {
+ "details-annual": {"freq": MatrixFrequency.ANNUAL},
+ "details-daily": {"freq": MatrixFrequency.DAILY},
+ "details-hourly": {"freq": MatrixFrequency.HOURLY},
+ "details-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "details-res-annual": {"freq": MatrixFrequency.ANNUAL},
+ "details-res-daily": {"freq": MatrixFrequency.DAILY},
+ "details-res-hourly": {"freq": MatrixFrequency.HOURLY},
+ "details-res-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "details-res-weekly": {"freq": MatrixFrequency.WEEKLY},
+ "details-weekly": {"freq": MatrixFrequency.WEEKLY},
+ "values-annual": {"freq": MatrixFrequency.ANNUAL},
+ "values-daily": {"freq": MatrixFrequency.DAILY},
+ "values-hourly": {"freq": MatrixFrequency.HOURLY},
+ "values-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "values-weekly": {"freq": MatrixFrequency.WEEKLY},
+}
+
+
+class TestOutputSimulationAreaItem:
+ @pytest.mark.parametrize(
+ "mc_all, expected",
+ [
+ pytest.param(True, MC_ALL_TRUE, id="mc-all-True"),
+ pytest.param(False, MC_ALL_FALSE, id="mc-all-False"),
+ ],
+ )
+ def test_build_output_simulation_area_item(
+ self,
+ mc_all: bool,
+ expected: dict,
+ ):
+ matrix = Mock(spec=ISimpleMatrixService)
+ resolver = Mock(spec=UriResolverService)
+ context = ContextServer(matrix=matrix, resolver=resolver)
+ study_id = str(uuid.uuid4())
+ config = FileStudyTreeConfig(
+ study_path=Path("path/to/study"),
+ path=Path("path/to/study"),
+ study_id=study_id,
+ version=850, # will become a `str` in the future
+ areas={},
+ )
+
+ node = area.OutputSimulationAreaItem(
+ context=context,
+ config=config,
+ area="fr",
+ mc_all=mc_all,
+ )
+ actual = node.build()
+
+ # check the result
+ value: AreaOutputSeriesMatrix
+ actual_obj = {
+ key: {"freq": value.freq} for key, value in actual.items()
+ }
+ assert actual_obj == expected
diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_binding_const.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_binding_const.py
new file mode 100644
index 0000000000..17ec74d716
--- /dev/null
+++ b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_binding_const.py
@@ -0,0 +1,70 @@
+import uuid
+from pathlib import Path
+from unittest.mock import Mock
+
+import pytest
+
+from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.matrixstore.uri_resolver_service import UriResolverService
+from antarest.study.storage.rawstudy.model.filesystem.config.model import (
+ FileStudyTreeConfig,
+)
+from antarest.study.storage.rawstudy.model.filesystem.context import (
+ ContextServer,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
+ BindingConstraintOutputSeriesMatrix,
+)
+from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common import (
+ binding_const,
+)
+
+# noinspection SpellCheckingInspection
+NOMINAL_CASE = {
+ "binding-constraints-annual": {"freq": MatrixFrequency.ANNUAL},
+ "binding-constraints-daily": {"freq": MatrixFrequency.DAILY},
+ "binding-constraints-hourly": {"freq": MatrixFrequency.HOURLY},
+ "binding-constraints-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "binding-constraints-weekly": {"freq": MatrixFrequency.WEEKLY},
+}
+
+
+class TestOutputSimulationBindingConstraintItem:
+ @pytest.mark.parametrize(
+ "expected",
+ [
+ pytest.param(NOMINAL_CASE, id="nominal-case-True"),
+ ],
+ )
+ def test_build_output_simulation_binding_constraint_item(
+ self,
+ expected: dict,
+ ):
+ matrix = Mock(spec=ISimpleMatrixService)
+ resolver = Mock(spec=UriResolverService)
+ context = ContextServer(matrix=matrix, resolver=resolver)
+ study_id = str(uuid.uuid4())
+ config = FileStudyTreeConfig(
+ study_path=Path("path/to/study"),
+ path=Path("path/to/study"),
+ study_id=study_id,
+ version=850, # will become a `str` in the future
+ areas={},
+ )
+
+ node = binding_const.OutputSimulationBindingConstraintItem(
+ context=context,
+ config=config,
+ children_glob_exceptions=None,
+ )
+ actual = node.build()
+
+ # check the result
+ value: BindingConstraintOutputSeriesMatrix
+ actual_obj = {
+ key: {"freq": value.freq} for key, value in actual.items()
+ }
+ assert actual_obj == expected
diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_link.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_link.py
new file mode 100644
index 0000000000..6063d23d11
--- /dev/null
+++ b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_link.py
@@ -0,0 +1,88 @@
+import uuid
+from pathlib import Path
+from unittest.mock import Mock
+
+import pytest
+
+from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.matrixstore.uri_resolver_service import UriResolverService
+from antarest.study.storage.rawstudy.model.filesystem.config.model import (
+ FileStudyTreeConfig,
+)
+from antarest.study.storage.rawstudy.model.filesystem.context import (
+ ContextServer,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
+ LinkOutputSeriesMatrix,
+)
+from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common import (
+ link,
+)
+
+# noinspection SpellCheckingInspection
+MC_ALL_TRUE = {
+ "id-annual": {"freq": MatrixFrequency.ANNUAL},
+ "id-daily": {"freq": MatrixFrequency.DAILY},
+ "id-hourly": {"freq": MatrixFrequency.HOURLY},
+ "id-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "id-weekly": {"freq": MatrixFrequency.WEEKLY},
+ "values-annual": {"freq": MatrixFrequency.ANNUAL},
+ "values-daily": {"freq": MatrixFrequency.DAILY},
+ "values-hourly": {"freq": MatrixFrequency.HOURLY},
+ "values-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "values-weekly": {"freq": MatrixFrequency.WEEKLY},
+}
+
+# noinspection SpellCheckingInspection
+MC_ALL_FALSE = {
+ "values-annual": {"freq": MatrixFrequency.ANNUAL},
+ "values-daily": {"freq": MatrixFrequency.DAILY},
+ "values-hourly": {"freq": MatrixFrequency.HOURLY},
+ "values-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "values-weekly": {"freq": MatrixFrequency.WEEKLY},
+}
+
+
+class TestOutputSimulationLinkItem:
+ @pytest.mark.parametrize(
+ "mc_all, expected",
+ [
+ pytest.param(True, MC_ALL_TRUE, id="mc-all-True"),
+ pytest.param(False, MC_ALL_FALSE, id="mc-all-False"),
+ ],
+ )
+ def test_build_output_simulation_link_item(
+ self,
+ mc_all: bool,
+ expected: dict,
+ ):
+ matrix = Mock(spec=ISimpleMatrixService)
+ resolver = Mock(spec=UriResolverService)
+ context = ContextServer(matrix=matrix, resolver=resolver)
+ study_id = str(uuid.uuid4())
+ config = FileStudyTreeConfig(
+ study_path=Path("path/to/study"),
+ path=Path("path/to/study"),
+ study_id=study_id,
+ version=850, # will become a `str` in the future
+ areas={},
+ )
+
+ node = link.OutputSimulationLinkItem(
+ context=context,
+ config=config,
+ area="fr",
+ link="fr -> de",
+ mc_all=mc_all,
+ )
+ actual = node.build()
+
+ # check the result
+ value: LinkOutputSeriesMatrix
+ actual_obj = {
+ key: {"freq": value.freq} for key, value in actual.items()
+ }
+ assert actual_obj == expected
diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_set.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_set.py
new file mode 100644
index 0000000000..03adcbe76d
--- /dev/null
+++ b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_set.py
@@ -0,0 +1,87 @@
+import uuid
+from pathlib import Path
+from unittest.mock import Mock
+
+import pytest
+
+from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.matrixstore.uri_resolver_service import UriResolverService
+from antarest.study.storage.rawstudy.model.filesystem.config.model import (
+ FileStudyTreeConfig,
+)
+from antarest.study.storage.rawstudy.model.filesystem.context import (
+ ContextServer,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import (
+ MatrixFrequency,
+)
+from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import (
+ AreaOutputSeriesMatrix,
+)
+from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common import (
+ set,
+)
+
+# noinspection SpellCheckingInspection
+MC_ALL_TRUE = {
+ "id-annual": {"freq": MatrixFrequency.ANNUAL},
+ "id-daily": {"freq": MatrixFrequency.DAILY},
+ "id-hourly": {"freq": MatrixFrequency.HOURLY},
+ "id-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "id-weekly": {"freq": MatrixFrequency.WEEKLY},
+ "values-annual": {"freq": MatrixFrequency.ANNUAL},
+ "values-daily": {"freq": MatrixFrequency.DAILY},
+ "values-hourly": {"freq": MatrixFrequency.HOURLY},
+ "values-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "values-weekly": {"freq": MatrixFrequency.WEEKLY},
+}
+
+# noinspection SpellCheckingInspection
+MC_ALL_FALSE = {
+ "values-annual": {"freq": MatrixFrequency.ANNUAL},
+ "values-daily": {"freq": MatrixFrequency.DAILY},
+ "values-hourly": {"freq": MatrixFrequency.HOURLY},
+ "values-monthly": {"freq": MatrixFrequency.MONTHLY},
+ "values-weekly": {"freq": MatrixFrequency.WEEKLY},
+}
+
+
+class TestOutputSimulationSet:
+ @pytest.mark.parametrize(
+ "mc_all, expected",
+ [
+ pytest.param(True, MC_ALL_TRUE, id="mc-all-True"),
+ pytest.param(False, MC_ALL_FALSE, id="mc-all-False"),
+ ],
+ )
+ def test_output_simulation_set(
+ self,
+ mc_all: bool,
+ expected: dict,
+ ):
+ matrix = Mock(spec=ISimpleMatrixService)
+ resolver = Mock(spec=UriResolverService)
+ context = ContextServer(matrix=matrix, resolver=resolver)
+ study_id = str(uuid.uuid4())
+ config = FileStudyTreeConfig(
+ study_path=Path("path/to/study"),
+ path=Path("path/to/study"),
+ study_id=study_id,
+ version=850, # will become a `str` in the future
+ areas={},
+ )
+
+ node = set.OutputSimulationSet(
+ context=context,
+ config=config,
+ set="foo",
+ mc_all=mc_all,
+ )
+ actual = node.build()
+
+ # check the result
+ value: AreaOutputSeriesMatrix
+ actual_obj = {
+ key: {"freq": value.freq} for key, value in actual.items()
+ }
+ assert actual_obj == expected
diff --git a/tests/study/business/test_allocation_manager.py b/tests/study/business/test_allocation_manager.py
new file mode 100644
index 0000000000..87326b9cea
--- /dev/null
+++ b/tests/study/business/test_allocation_manager.py
@@ -0,0 +1,499 @@
+import contextlib
+import datetime
+import re
+import uuid
+from unittest.mock import Mock, patch
+
+import pytest
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+
+from antarest.core.exceptions import AllocationDataNotFound, AreaNotFound
+from antarest.core.model import PublicMode
+from antarest.dbmodel import Base
+from antarest.login.model import User, Group
+from antarest.study.business.allocation_management import (
+ AllocationField,
+ AllocationFormFields,
+ AllocationMatrix,
+ AllocationManager,
+)
+from antarest.study.business.area_management import AreaInfoDTO, AreaType
+from antarest.study.model import Study, StudyContentStatus, RawStudy
+from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
+from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import (
+ FileStudyTree,
+)
+from antarest.study.storage.rawstudy.raw_study_service import RawStudyService
+from antarest.study.storage.storage_service import StudyStorageService
+from antarest.study.storage.variantstudy.command_factory import CommandFactory
+from antarest.study.storage.variantstudy.model.command.common import (
+ CommandName,
+)
+from antarest.study.storage.variantstudy.model.command.update_config import (
+ UpdateConfig,
+)
+from antarest.study.storage.variantstudy.model.command_context import (
+ CommandContext,
+)
+from antarest.study.storage.variantstudy.variant_study_service import (
+ VariantStudyService,
+)
+
+from antarest.study.business.allocation_management import (
+ AllocationManager,
+ AllocationField,
+ AllocationFormFields,
+)
+
+
+class TestAllocationField:
+ def test_base(self):
+ field = AllocationField(areaId="NORTH", coefficient=1)
+ assert field.area_id == "NORTH"
+ assert field.coefficient == 1
+
+ def test_camel_case(self):
+ field = AllocationField(areaId="NORTH", coefficient=1)
+ assert field.dict(by_alias=True) == {
+ "areaId": "NORTH",
+ "coefficient": 1,
+ }
+
+
+class TestAllocationFormFields:
+ def test_base_case(self):
+ fields = AllocationFormFields(
+ allocation=[
+ {"areaId": "NORTH", "coefficient": 0.75},
+ {"areaId": "SOUTH", "coefficient": 0.25},
+ ]
+ )
+ assert fields.allocation == [
+ AllocationField(areaId="NORTH", coefficient=0.75),
+ AllocationField(areaId="SOUTH", coefficient=0.25),
+ ]
+
+ def test_fields_not_empty(self):
+ """Check that the coefficients column is not empty"""
+ with pytest.raises(ValueError, match="empty"):
+ AllocationFormFields(
+ allocation=[],
+ )
+
+ def test_validation_fields_no_duplicate_area_id(self):
+ """Check that the coefficients column does not contain duplicate area IDs"""
+ with pytest.raises(ValueError, match="duplicate"):
+ AllocationFormFields(
+ allocation=[
+ {"areaId": "NORTH", "coefficient": 0.75},
+ {"areaId": "NORTH", "coefficient": 0.25},
+ ],
+ )
+
+ def test_validation_fields_no_negative_coefficient(self):
+ """Check that the coefficients column does not contain negative coefficients"""
+ with pytest.raises(ValueError, match="negative"):
+ AllocationFormFields(
+ allocation=[
+ {"areaId": "NORTH", "coefficient": 0.75},
+ {"areaId": "SOUTH", "coefficient": -0.25},
+ ],
+ )
+
+ def test_validation_fields_no_negative_sum_coefficient(self):
+ """Check that the coefficients values does not sum to negative"""
+ with pytest.raises(ValueError, match="negative"):
+ AllocationFormFields(
+ allocation=[
+ {"areaId": "NORTH", "coefficient": -0.75},
+ {"areaId": "SOUTH", "coefficient": -0.25},
+ ],
+ )
+
+ def test_validation_fields_no_nan_coefficient(self):
+ """Check that the coefficients values does not contain NaN coefficients"""
+ with pytest.raises(ValueError, match="NaN"):
+ AllocationFormFields(
+ allocation=[
+ {"areaId": "NORTH", "coefficient": 0.75},
+ {"areaId": "SOUTH", "coefficient": float("nan")},
+ ],
+ )
+
+
+class TestAllocationMatrix:
+ def test_base_case(self):
+ field = AllocationMatrix(
+ index=["NORTH", "SOUTH"],
+ columns=["NORTH", "SOUTH"],
+ data=[[0.75, 0.25], [0.25, 0.75]],
+ )
+ assert field.index == ["NORTH", "SOUTH"]
+ assert field.columns == ["NORTH", "SOUTH"]
+ assert field.data == [[0.75, 0.25], [0.25, 0.75]]
+
+ def test_validation_coefficients_not_empty(self):
+ """Check that the coefficients matrix is not empty"""
+ with pytest.raises(ValueError, match="empty"):
+ AllocationMatrix(
+ index=[],
+ columns=[],
+ data=[],
+ )
+
+ def test_validation_matrix_shape(self):
+ """Check that the coefficients matrix is square"""
+ with pytest.raises(ValueError, match="square"):
+ AllocationMatrix(
+ index=["NORTH", "SOUTH"],
+ columns=["NORTH"],
+ data=[[0.75, 0.25], [0.25, 0.75]],
+ )
+
+ def test_validation_matrix_sum_positive(self):
+ """Check that the coefficients matrix sum to positive"""
+ with pytest.raises(ValueError, match="negative"):
+ AllocationMatrix(
+ index=["NORTH", "SOUTH"],
+ columns=["NORTH", "SOUTH"],
+ data=[[0.75, -0.25], [-0.25, 0.75]],
+ )
+
+ def test_validation_matrix_no_nan(self):
+ """Check that the coefficients matrix does not contain NaN values"""
+ with pytest.raises(ValueError, match="NaN"):
+ AllocationMatrix(
+ index=["NORTH", "SOUTH"],
+ columns=["NORTH", "SOUTH"],
+ data=[[0.75, 0.25], [0.25, float("nan")]],
+ )
+
+ def test_validation_matrix_no_non_null_values(self):
+ """Check that the coefficients matrix does not contain only null values"""
+ with pytest.raises(ValueError, match="(?:all|zero)"):
+ AllocationMatrix(
+ index=["NORTH", "SOUTH"],
+ columns=["NORTH", "SOUTH"],
+ data=[[0, 0], [0, 0]],
+ )
+
+
+@pytest.fixture(scope="function", name="db_engine")
+def db_engine_fixture():
+ engine = create_engine("sqlite:///:memory:")
+ Base.metadata.create_all(engine)
+ yield engine
+ engine.dispose()
+
+
+@pytest.fixture(scope="function", name="db_session")
+def db_session_fixture(db_engine):
+ make_session = sessionmaker(bind=db_engine)
+ with contextlib.closing(make_session()) as session:
+ yield session
+
+
+# noinspection SpellCheckingInspection
+EXECUTE_OR_ADD_COMMANDS = (
+ "antarest.study.business.allocation_management.execute_or_add_commands"
+)
+
+
+class TestAllocationManager:
+ @pytest.fixture(name="study_storage_service")
+ def study_storage_service(self) -> StudyStorageService:
+ """Return a mocked StudyStorageService."""
+ return Mock(
+ spec=StudyStorageService,
+ variant_study_service=Mock(
+ spec=VariantStudyService,
+ command_factory=Mock(
+ spec=CommandFactory,
+ command_context=Mock(spec=CommandContext),
+ ),
+ ),
+ get_storage=Mock(
+ return_value=Mock(
+ spec=RawStudyService, get_raw=Mock(spec=FileStudy)
+ )
+ ),
+ )
+
+ # noinspection PyArgumentList
+ @pytest.fixture(name="study_uuid")
+ def study_uuid_fixture(self, db_session) -> str:
+ user = User(id=0, name="admin")
+ group = Group(id="my-group", name="group")
+ raw_study = RawStudy(
+ id=str(uuid.uuid4()),
+ name="Dummy",
+ version="850",
+ author="John Smith",
+ created_at=datetime.datetime.now(datetime.timezone.utc),
+ updated_at=datetime.datetime.now(datetime.timezone.utc),
+ public_mode=PublicMode.FULL,
+ owner=user,
+ groups=[group],
+ workspace="default",
+ path="/path/to/study",
+ content_status=StudyContentStatus.WARNING,
+ )
+ db_session.add(raw_study)
+ db_session.commit()
+ return raw_study.id
+
+ def test_get_allocation_matrix__nominal_case(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ # The study must be fetched from the database
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+
+ # Prepare the mocks
+ allocation_cfg = {
+ "n": {"[allocation]": {"n": 1}},
+ "e": {"[allocation]": {"e": 3, "s": 1}},
+ "s": {"[allocation]": {"s": 0.1, "n": 0.2, "w": 0.6}},
+ "w": {"[allocation]": {"w": 1}},
+ }
+ storage = study_storage_service.get_storage(study)
+ file_study = storage.get_raw(study)
+ file_study.tree = Mock(
+ spec=FileStudyTree,
+ get=Mock(return_value=allocation_cfg),
+ )
+
+ # Given the following arguments
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+ area_id = "*" # all areas
+ manager = AllocationManager(study_storage_service)
+
+ # run
+ matrix = manager.get_allocation_matrix(study, all_areas)
+
+ # Check
+ assert matrix == AllocationMatrix(
+ index=["n", "e", "s", "w"],
+ columns=["n", "e", "s", "w"],
+ data=[
+ [1.0, 0.0, 0.2, 0.0],
+ [0.0, 3.0, 0.0, 0.0],
+ [0.0, 1.0, 0.1, 0.0],
+ [0.0, 0.0, 0.6, 1.0],
+ ],
+ )
+
+ def test_get_allocation_matrix__no_allocation(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ # The study must be fetched from the database
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+
+ # Prepare the mocks
+ allocation_cfg = {}
+ storage = study_storage_service.get_storage(study)
+ file_study = storage.get_raw(study)
+ file_study.tree = Mock(
+ spec=FileStudyTree,
+ get=Mock(return_value=allocation_cfg),
+ )
+
+ # Given the following arguments
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+ area_id = "*"
+ manager = AllocationManager(study_storage_service)
+
+ with pytest.raises(AllocationDataNotFound) as ctx:
+ manager.get_allocation_matrix(study, all_areas)
+ assert re.fullmatch(r"Allocation data.*is not found", ctx.value.detail)
+
+ def test_get_allocation_form_fields__nominal_case(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+ allocation_cfg = {
+ "n": {"[allocation]": {"n": 1}},
+ "e": {"[allocation]": {"e": 3, "s": 1}},
+ "s": {"[allocation]": {"s": 0.1, "n": 0.2, "w": 0.6}},
+ "w": {"[allocation]": {"w": 1}},
+ }
+ storage = study_storage_service.get_storage(study)
+ file_study = storage.get_raw(study)
+ file_study.tree = Mock(
+ spec=FileStudyTree,
+ get=Mock(return_value=allocation_cfg["n"]),
+ )
+
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+
+ area_id = "n"
+ manager = AllocationManager(study_storage_service)
+
+ fields = manager.get_allocation_form_fields(
+ all_areas=all_areas, study=study, area_id=area_id
+ )
+
+ expected_allocation = [
+ AllocationField.construct(area_id=area, coefficient=value)
+ for area, value in allocation_cfg[area_id]["[allocation]"].items()
+ ]
+ assert fields.allocation == expected_allocation
+
+ def test_get_allocation_form_fields__no_allocation_data(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+ allocation_cfg = {"n": {}}
+ storage = study_storage_service.get_storage(study)
+ file_study = storage.get_raw(study)
+ file_study.tree = Mock(
+ spec=FileStudyTree,
+ get=Mock(return_value=allocation_cfg["n"]),
+ )
+
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ ]
+
+ area_id = "n"
+ manager = AllocationManager(study_storage_service)
+
+ with pytest.raises(AllocationDataNotFound) as ctx:
+ manager.get_allocation_form_fields(
+ all_areas=all_areas, study=study, area_id=area_id
+ )
+ assert "n" in ctx.value.detail
+
+ def test_set_allocation_form_fields__nominal_case(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+ area_id = "n"
+ manager = AllocationManager(study_storage_service)
+ with patch(EXECUTE_OR_ADD_COMMANDS) as exe:
+ with patch(
+ "antarest.study.business.allocation_management.AllocationManager.get_allocation_data",
+ return_value={"e": 0.5, "s": 0.25, "w": 0.25},
+ ):
+ manager.set_allocation_form_fields(
+ all_areas=all_areas,
+ study=study,
+ area_id=area_id,
+ data=AllocationFormFields.construct(
+ allocation=[
+ AllocationField.construct(
+ area_id="e", coefficient=0.5
+ ),
+ AllocationField.construct(
+ area_id="s", coefficient=0.25
+ ),
+ AllocationField.construct(
+ area_id="w", coefficient=0.25
+ ),
+ ],
+ ),
+ )
+
+ assert exe.call_count == 1
+ mock_call = exe.mock_calls[0]
+ actual_study, _, actual_commands, _ = mock_call.args
+ assert actual_study == study
+ assert len(actual_commands) == 1
+ cmd: UpdateConfig = actual_commands[0]
+ assert cmd.command_name == CommandName.UPDATE_CONFIG
+ assert cmd.target == f"input/hydro/allocation/{area_id}/[allocation]"
+ assert cmd.data == {"e": 0.5, "s": 0.25, "w": 0.25}
+
+ def test_set_allocation_form_fields__no_allocation_data(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+
+ area_id = "n"
+ manager = AllocationManager(study_storage_service)
+
+ with patch(EXECUTE_OR_ADD_COMMANDS) as exe:
+ with patch(
+ "antarest.study.business.allocation_management.AllocationManager.get_allocation_data",
+ side_effect=AllocationDataNotFound(area_id),
+ ):
+ with pytest.raises(AllocationDataNotFound) as ctx:
+ manager.set_allocation_form_fields(
+ all_areas=all_areas,
+ study=study,
+ area_id=area_id,
+ data=AllocationFormFields.construct(
+ allocation=[
+ AllocationField.construct(
+ area_id="e", coefficient=0.5
+ ),
+ AllocationField.construct(
+ area_id="s", coefficient=0.25
+ ),
+ AllocationField.construct(
+ area_id="w", coefficient=0.25
+ ),
+ ],
+ ),
+ )
+ assert "n" in ctx.value.detail
+
+ def test_set_allocation_form_fields__invalid_area_ids(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+
+ area_id = "n"
+ manager = AllocationManager(study_storage_service)
+
+ data = AllocationFormFields.construct(
+ allocation=[
+ AllocationField.construct(area_id="e", coefficient=0.5),
+ AllocationField.construct(area_id="s", coefficient=0.25),
+ AllocationField.construct(
+ area_id="invalid_area", coefficient=0.25
+ ),
+ ]
+ )
+
+ with pytest.raises(AreaNotFound) as ctx:
+ manager.set_allocation_form_fields(
+ all_areas=all_areas, study=study, area_id=area_id, data=data
+ )
+
+ assert "invalid_area" in ctx.value.detail
diff --git a/tests/study/business/test_correlation_manager.py b/tests/study/business/test_correlation_manager.py
new file mode 100644
index 0000000000..57050249f3
--- /dev/null
+++ b/tests/study/business/test_correlation_manager.py
@@ -0,0 +1,397 @@
+import contextlib
+import datetime
+import uuid
+from unittest.mock import Mock, patch
+
+import numpy as np
+import pytest
+from antarest.core.exceptions import AreaNotFound
+from antarest.core.model import PublicMode
+from antarest.dbmodel import Base
+from antarest.login.model import Group, User
+from antarest.study.business.area_management import AreaInfoDTO, AreaType
+from antarest.study.business.correlation_management import (
+ AreaCoefficientItem,
+ CorrelationFormFields,
+ CorrelationManager,
+ CorrelationMatrix,
+)
+from antarest.study.model import RawStudy, Study, StudyContentStatus
+from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
+from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import (
+ FileStudyTree,
+)
+from antarest.study.storage.rawstudy.raw_study_service import RawStudyService
+from antarest.study.storage.storage_service import StudyStorageService
+from antarest.study.storage.variantstudy.command_factory import CommandFactory
+from antarest.study.storage.variantstudy.model.command.common import (
+ CommandName,
+)
+from antarest.study.storage.variantstudy.model.command.update_config import (
+ UpdateConfig,
+)
+from antarest.study.storage.variantstudy.model.command_context import (
+ CommandContext,
+)
+from antarest.study.storage.variantstudy.variant_study_service import (
+ VariantStudyService,
+)
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+
+
+class TestCorrelationField:
+ def test_init__nominal_case(self):
+ field = AreaCoefficientItem(area_id="NORTH", coefficient=100)
+ assert field.area_id == "NORTH"
+ assert field.coefficient == 100
+
+ def test_init__camel_case_args(self):
+ field = AreaCoefficientItem(areaId="NORTH", coefficient=100)
+ assert field.area_id == "NORTH"
+ assert field.coefficient == 100
+
+
+class TestCorrelationFormFields:
+ def test_init__nominal_case(self):
+ fields = CorrelationFormFields(
+ correlation=[
+ {"area_id": "NORTH", "coefficient": 75},
+ {"area_id": "SOUTH", "coefficient": 25},
+ ]
+ )
+ assert fields.correlation == [
+ AreaCoefficientItem(area_id="NORTH", coefficient=75),
+ AreaCoefficientItem(area_id="SOUTH", coefficient=25),
+ ]
+
+ def test_validation__coefficients_not_empty(self):
+ """correlation must not be empty"""
+ with pytest.raises(ValueError, match="must not be empty"):
+ CorrelationFormFields(correlation=[])
+
+ def test_validation__coefficients_no_duplicates(self):
+ """correlation must not contain duplicate area IDs:"""
+ with pytest.raises(ValueError, match="duplicate area IDs") as ctx:
+ CorrelationFormFields(
+ correlation=[
+ {"area_id": "NORTH", "coefficient": 50},
+ {"area_id": "NORTH", "coefficient": 25},
+ {"area_id": "SOUTH", "coefficient": 25},
+ ]
+ )
+ assert "NORTH" in str(ctx.value) # duplicates
+
+ @pytest.mark.parametrize("coefficient", [-101, 101, np.nan])
+ def test_validation__coefficients_invalid_values(self, coefficient):
+ """coefficients must be between -100 and 100"""
+ with pytest.raises(
+ ValueError, match="between -100 and 100|must not contain NaN"
+ ):
+ CorrelationFormFields(
+ correlation=[
+ {"area_id": "NORTH", "coefficient": coefficient},
+ ]
+ )
+
+
+class TestCorrelationMatrix:
+ def test_init__nominal_case(self):
+ field = CorrelationMatrix(
+ index=["fr", "de"],
+ columns=["fr"],
+ data=[
+ [1.0],
+ [0.2],
+ ],
+ )
+ assert field.index == ["fr", "de"]
+ assert field.columns == ["fr"]
+ assert field.data == [
+ [1.0],
+ [0.2],
+ ]
+
+ def test_validation__coefficients_non_empty_array(self):
+ """Check that the coefficients matrix is a non-empty array"""
+ # fmt: off
+ with pytest.raises(ValueError, match="must not be empty"):
+ CorrelationMatrix(
+ index=[],
+ columns=[],
+ data=[],
+ )
+ # fmt: off
+
+ def test_validation__coefficients_array_shape(self):
+ """Check that the coefficients matrix is an array of shape 2×1"""
+ with pytest.raises(ValueError, match=r"must have shape \(\d+×\d+\)"):
+ CorrelationMatrix(
+ index=["fr", "de"],
+ columns=["fr"],
+ data=[[1, 2], [3, 4]],
+ )
+
+ @pytest.mark.parametrize("coefficient", [-1.1, 1.1, np.nan])
+ def test_validation__coefficients_invalid_value(self, coefficient):
+ """Check that all coefficients matrix has positive or nul coefficients"""
+ # fmt: off
+ with pytest.raises(ValueError, match="between -1 and 1|must not contain NaN"):
+ CorrelationMatrix(
+ index=["fr", "de"],
+ columns=["fr", "de"],
+ data=[
+ [1.0, coefficient],
+ [0.2, 0],
+ ],
+ )
+ # fmt: on
+
+ def test_validation__matrix_not_symmetric(self):
+ """Check that the correlation matrix is not symmetric"""
+ with pytest.raises(ValueError, match=r"not symmetric"):
+ CorrelationMatrix(
+ index=["fr", "de"],
+ columns=["fr", "de"],
+ data=[[0.1, 0.2], [0.3, 0.4]],
+ )
+
+
+@pytest.fixture(scope="function", name="db_engine")
+def db_engine_fixture():
+ engine = create_engine("sqlite:///:memory:")
+ Base.metadata.create_all(engine)
+ yield engine
+ engine.dispose()
+
+
+@pytest.fixture(scope="function", name="db_session")
+def db_session_fixture(db_engine):
+ make_session = sessionmaker(bind=db_engine)
+ with contextlib.closing(make_session()) as session:
+ yield session
+
+
+# noinspection SpellCheckingInspection
+EXECUTE_OR_ADD_COMMANDS = (
+ "antarest.study.business.correlation_management.execute_or_add_commands"
+)
+
+
+class TestCorrelationManager:
+ @pytest.fixture(name="study_storage_service")
+ def study_storage_service(self) -> StudyStorageService:
+ """Return a mocked StudyStorageService."""
+ return Mock(
+ spec=StudyStorageService,
+ variant_study_service=Mock(
+ spec=VariantStudyService,
+ command_factory=Mock(
+ spec=CommandFactory,
+ command_context=Mock(spec=CommandContext),
+ ),
+ ),
+ get_storage=Mock(
+ return_value=Mock(
+ spec=RawStudyService, get_raw=Mock(spec=FileStudy)
+ )
+ ),
+ )
+
+ # noinspection PyArgumentList
+ @pytest.fixture(name="study_uuid")
+ def study_uuid_fixture(self, db_session) -> str:
+ user = User(id=0, name="admin")
+ group = Group(id="my-group", name="group")
+ raw_study = RawStudy(
+ id=str(uuid.uuid4()),
+ name="Dummy",
+ version="850",
+ author="John Smith",
+ created_at=datetime.datetime.now(datetime.timezone.utc),
+ updated_at=datetime.datetime.now(datetime.timezone.utc),
+ public_mode=PublicMode.FULL,
+ owner=user,
+ groups=[group],
+ workspace="default",
+ path="/path/to/study",
+ content_status=StudyContentStatus.WARNING,
+ )
+ db_session.add(raw_study)
+ db_session.commit()
+ return raw_study.id
+
+ def test_get_correlation_matrix__nominal_case(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ # The study must be fetched from the database
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+
+ # Prepare the mocks
+ correlation_cfg = {
+ "n%n": 0.1,
+ "e%e": 0.3,
+ "s%s": 0.1,
+ "s%n": 0.2,
+ "s%w": 0.6,
+ "w%w": 0.1,
+ }
+ storage = study_storage_service.get_storage(study)
+ file_study = storage.get_raw(study)
+ file_study.tree = Mock(
+ spec=FileStudyTree,
+ get=Mock(return_value=correlation_cfg),
+ )
+
+ # Given the following arguments
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+ manager = CorrelationManager(study_storage_service)
+
+ # run
+ matrix = manager.get_correlation_matrix(
+ all_areas=all_areas, study=study, columns=[]
+ )
+
+ # Check
+ assert matrix == CorrelationMatrix(
+ index=["n", "e", "s", "w"],
+ columns=["n", "e", "s", "w"],
+ data=[
+ [1.0, 0.0, 0.2, 0.0],
+ [0.0, 1.0, 0.0, 0.0],
+ [0.2, 0.0, 1.0, 0.6],
+ [0.0, 0.0, 0.6, 1.0],
+ ],
+ )
+
+ def test_get_field_values__nominal_case(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ # The study must be fetched from the database
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+
+ # Prepare the mocks
+ # NOTE: "s%s" value is ignored
+ correlation_cfg = {"s%s": 0.1, "n%s": 0.2, "w%n": 0.6}
+ storage = study_storage_service.get_storage(study)
+ file_study = storage.get_raw(study)
+ file_study.tree = Mock(
+ spec=FileStudyTree,
+ get=Mock(return_value=correlation_cfg),
+ )
+
+ # Given the following arguments
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+ area_id = "s" # South
+ manager = CorrelationManager(study_storage_service)
+ fields = manager.get_correlation_form_fields(
+ all_areas=all_areas, study=study, area_id=area_id
+ )
+ assert fields == CorrelationFormFields(
+ correlation=[
+ AreaCoefficientItem(area_id="s", coefficient=100.0),
+ AreaCoefficientItem(area_id="n", coefficient=20.0),
+ ]
+ )
+
+ def test_set_field_values__nominal_case(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ # The study must be fetched from the database
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+
+ # Prepare the mocks: North + South
+ correlation_cfg = {}
+ storage = study_storage_service.get_storage(study)
+ file_study = storage.get_raw(study)
+ file_study.tree = Mock(
+ spec=FileStudyTree,
+ get=Mock(return_value=correlation_cfg),
+ )
+
+ # Given the following arguments
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+ area_id = "s" # South
+ manager = CorrelationManager(study_storage_service)
+ with patch(EXECUTE_OR_ADD_COMMANDS) as exe:
+ manager.set_correlation_form_fields(
+ all_areas=all_areas,
+ study=study,
+ area_id=area_id,
+ data=CorrelationFormFields(
+ correlation=[
+ AreaCoefficientItem(area_id="s", coefficient=100),
+ AreaCoefficientItem(area_id="e", coefficient=30),
+ AreaCoefficientItem(area_id="n", coefficient=40),
+ ]
+ ),
+ )
+
+ # check update
+ assert exe.call_count == 1
+ mock_call = exe.mock_calls[0]
+ # signature: execute_or_add_commands(study, file_study, commands, storage_service)
+ actual_study, _, actual_cmds, _ = mock_call.args
+ assert actual_study == study
+ assert len(actual_cmds) == 1
+ cmd: UpdateConfig = actual_cmds[0]
+ assert cmd.command_name == CommandName.UPDATE_CONFIG
+ assert cmd.target == "input/hydro/prepro/correlation/annual"
+ assert cmd.data == {"e%s": 0.3, "n%s": 0.4}
+
+ def test_set_field_values__area_not_found(
+ self, db_session, study_storage_service, study_uuid
+ ):
+ # The study must be fetched from the database
+ study: RawStudy = db_session.query(Study).get(study_uuid)
+
+ # Prepare the mocks: North + South
+ correlation_cfg = {}
+ storage = study_storage_service.get_storage(study)
+ file_study = storage.get_raw(study)
+ file_study.tree = Mock(
+ spec=FileStudyTree,
+ get=Mock(return_value=correlation_cfg),
+ )
+
+ # Given the following arguments
+ all_areas = [
+ AreaInfoDTO(id="n", name="North", type=AreaType.AREA),
+ AreaInfoDTO(id="e", name="East", type=AreaType.AREA),
+ AreaInfoDTO(id="s", name="South", type=AreaType.AREA),
+ AreaInfoDTO(id="w", name="West", type=AreaType.AREA),
+ ]
+ area_id = "n" # South
+ manager = CorrelationManager(study_storage_service)
+
+ with patch(EXECUTE_OR_ADD_COMMANDS) as exe:
+ with pytest.raises(AreaNotFound) as ctx:
+ manager.set_correlation_form_fields(
+ all_areas=all_areas,
+ study=study,
+ area_id=area_id,
+ data=CorrelationFormFields(
+ correlation=[
+ AreaCoefficientItem(
+ area_id="UNKNOWN", coefficient=3.14
+ ),
+ ]
+ ),
+ )
+ assert "'UNKNOWN'" in ctx.value.detail
+ exe.assert_not_called()
diff --git a/tests/variantstudy/conftest.py b/tests/variantstudy/conftest.py
index 77fe89bdf4..e9ad73b7d7 100644
--- a/tests/variantstudy/conftest.py
+++ b/tests/variantstudy/conftest.py
@@ -51,6 +51,7 @@ def matrix_service() -> MatrixService:
@pytest.fixture
def command_context(matrix_service: MatrixService) -> CommandContext:
+ # sourcery skip: inline-immediately-returned-variable
command_context = CommandContext(
generator_matrix_constants=GeneratorMatrixConstants(
matrix_service=matrix_service
@@ -75,10 +76,10 @@ def command_factory(matrix_service: MatrixService) -> CommandFactory:
@pytest.fixture
-def empty_study(tmp_path: str, matrix_service: MatrixService) -> FileStudy:
+def empty_study(tmp_path: Path, matrix_service: MatrixService) -> FileStudy:
project_dir: Path = Path(__file__).parent.parent.parent
empty_study_path: Path = project_dir / "resources" / "empty_study_720.zip"
- empty_study_destination_path = Path(tmp_path) / "empty-study"
+ empty_study_destination_path = tmp_path.joinpath("empty-study")
with zipfile.ZipFile(empty_study_path, "r") as zip_empty_study:
zip_empty_study.extractall(empty_study_destination_path)
@@ -90,6 +91,7 @@ def empty_study(tmp_path: str, matrix_service: MatrixService) -> FileStudy:
areas={},
sets={},
)
+ # sourcery skip: inline-immediately-returned-variable
file_study = FileStudy(
config=config,
tree=FileStudyTree(
diff --git a/tests/variantstudy/model/command/test_remove_area.py b/tests/variantstudy/model/command/test_remove_area.py
index 6fd8e646fb..b1f61f0bd9 100644
--- a/tests/variantstudy/model/command/test_remove_area.py
+++ b/tests/variantstudy/model/command/test_remove_area.py
@@ -1,10 +1,10 @@
-from checksumdir import dirhash
+import pytest
-from antarest.study.storage.rawstudy.io.reader import IniReader
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
transform_name_to_id,
)
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
+from antarest.study.storage.study_upgrader import upgrade_study
from antarest.study.storage.variantstudy.model.command.common import (
TimeStep,
BindingConstraintOperator,
@@ -41,25 +41,14 @@
class TestRemoveArea:
- def test_validation(self, empty_study: FileStudy):
- pass
-
+ @pytest.mark.parametrize("version", [810, 840])
def test_apply(
self,
empty_study: FileStudy,
command_context: CommandContext,
+ version: int,
):
- bd_config = IniReader().read(
- empty_study.config.study_path
- / "input"
- / "bindingconstraints"
- / "bindingconstraints.ini"
- )
-
- area_name = "Area"
- area_id = transform_name_to_id(area_name)
- area_name2 = "Area2"
- area_id2 = transform_name_to_id(area_name2)
+ # noinspection SpellCheckingInspection
empty_study.tree.save(
{
"input": {
@@ -84,6 +73,8 @@ def test_apply(
}
)
+ area_name = "Area"
+ area_id = transform_name_to_id(area_name)
create_area_command: ICommand = CreateArea.parse_obj(
{
"area_name": area_name,
@@ -93,14 +84,6 @@ def test_apply(
output = create_area_command.apply(study_data=empty_study)
assert output.status
- parameters = {
- "group": "Other",
- "unitcount": "1",
- "nominalcapacity": "1000000",
- "marginal-cost": "30",
- "market-bid-cost": "30",
- }
-
create_district_command = CreateDistrict(
name="foo",
base_filter=DistrictBaseFilter.add_all,
@@ -112,85 +95,99 @@ def test_apply(
########################################################################################
- empty_study_hash = dirhash(empty_study.config.study_path, "md5")
+ upgrade_study(empty_study.config.study_path, str(version))
- for version in [810, 840]:
- empty_study.config.version = version
- create_area_command: ICommand = CreateArea.parse_obj(
- {
- "area_name": area_name2,
- "command_context": command_context,
- }
- )
- output = create_area_command.apply(study_data=empty_study)
- assert output.status
-
- create_link_command: ICommand = CreateLink(
- area1=area_id,
- area2=area_id2,
- parameters={},
- command_context=command_context,
- series=[[0]],
- )
- output = create_link_command.apply(study_data=empty_study)
- assert output.status
-
- create_cluster_command = CreateCluster.parse_obj(
- {
- "area_id": area_id2,
- "cluster_name": "cluster",
- "parameters": parameters,
- "prepro": [[0]],
- "modulation": [[0]],
- "command_context": command_context,
- }
- )
- output = create_cluster_command.apply(study_data=empty_study)
- assert output.status
-
- bind1_cmd = CreateBindingConstraint(
- name="BD 2",
- time_step=TimeStep.HOURLY,
- operator=BindingConstraintOperator.LESS,
- coeffs={
- f"{area_id}%{area_id2}": [400, 30],
- f"{area_id2}.cluster": [400, 30],
+ empty_study_cfg = empty_study.tree.get(depth=999)
+ if version >= 830:
+ empty_study_cfg["input"]["areas"][area_id]["adequacy_patch"] = {
+ "adequacy-patch": {"adequacy-patch-mode": "outside"}
+ }
+ empty_study_cfg["input"]["links"][area_id]["capacities"] = {}
+
+ area_name2 = "Area2"
+ area_id2 = transform_name_to_id(area_name2)
+
+ empty_study.config.version = version
+ create_area_command: ICommand = CreateArea.parse_obj(
+ {
+ "area_name": area_name2,
+ "command_context": command_context,
+ }
+ )
+ output = create_area_command.apply(study_data=empty_study)
+ assert output.status
+
+ create_link_command: ICommand = CreateLink(
+ area1=area_id,
+ area2=area_id2,
+ parameters={},
+ command_context=command_context,
+ series=[[0]],
+ )
+ output = create_link_command.apply(study_data=empty_study)
+ assert output.status
+
+ # noinspection SpellCheckingInspection
+ create_cluster_command = CreateCluster.parse_obj(
+ {
+ "area_id": area_id2,
+ "cluster_name": "cluster",
+ "parameters": {
+ "group": "Other",
+ "unitcount": "1",
+ "nominalcapacity": "1000000",
+ "marginal-cost": "30",
+ "market-bid-cost": "30",
},
- comments="Hello",
- command_context=command_context,
- )
- output = bind1_cmd.apply(study_data=empty_study)
- assert output.status
-
- remove_district_command = RemoveDistrict(
- id="foo",
- command_context=command_context,
- )
- output = remove_district_command.apply(study_data=empty_study)
- assert output.status
-
- create_district_command = CreateDistrict(
- name="foo",
- base_filter=DistrictBaseFilter.add_all,
- filter_items=[area_id, area_id2],
- command_context=command_context,
- )
- output = create_district_command.apply(study_data=empty_study)
- assert output.status
-
- remove_area_command: ICommand = RemoveArea.parse_obj(
- {
- "id": transform_name_to_id(area_name2),
- "command_context": command_context,
- }
- )
- output = remove_area_command.apply(study_data=empty_study)
- assert output.status
-
- assert (
- dirhash(empty_study.config.study_path, "md5")
- == empty_study_hash
- )
+ "prepro": [[0]],
+ "modulation": [[0]],
+ "command_context": command_context,
+ }
+ )
+ output = create_cluster_command.apply(study_data=empty_study)
+ assert output.status
+
+ bind1_cmd = CreateBindingConstraint(
+ name="BD 2",
+ time_step=TimeStep.HOURLY,
+ operator=BindingConstraintOperator.LESS,
+ coeffs={
+ f"{area_id}%{area_id2}": [400, 30],
+ f"{area_id2}.cluster": [400, 30],
+ },
+ comments="Hello",
+ command_context=command_context,
+ )
+ output = bind1_cmd.apply(study_data=empty_study)
+ assert output.status
+
+ remove_district_command = RemoveDistrict(
+ id="foo",
+ command_context=command_context,
+ )
+ output = remove_district_command.apply(study_data=empty_study)
+ assert output.status
+
+ create_district_command = CreateDistrict(
+ name="foo",
+ base_filter=DistrictBaseFilter.add_all,
+ filter_items=[area_id, area_id2],
+ command_context=command_context,
+ )
+ output = create_district_command.apply(study_data=empty_study)
+ assert output.status
+
+ remove_area_command: ICommand = RemoveArea.parse_obj(
+ {
+ "id": transform_name_to_id(area_name2),
+ "command_context": command_context,
+ }
+ )
+ output = remove_area_command.apply(study_data=empty_study)
+ assert output.status
+
+ actual_cfg = empty_study.tree.get(depth=999)
+ assert actual_cfg == empty_study_cfg
def test_match(command_context: CommandContext):
diff --git a/tests/worker/test_archive_worker_service.py b/tests/worker/test_archive_worker_service.py
new file mode 100644
index 0000000000..b8fd4d2e33
--- /dev/null
+++ b/tests/worker/test_archive_worker_service.py
@@ -0,0 +1,86 @@
+from unittest.mock import Mock, patch
+
+import pytest
+import yaml
+
+from antarest import __version__
+from antarest.worker.archive_worker import ArchiveWorker
+from antarest.worker.archive_worker_service import run_archive_worker
+
+
+def test_run_archive_worker__version(capsys):
+ with pytest.raises(SystemExit) as ctx:
+ run_archive_worker(["--version"])
+ assert int(ctx.value.args[0]) == 0
+ out, err = capsys.readouterr()
+ assert __version__ in out
+
+
+def test_run_archive_worker__help(capsys):
+ with pytest.raises(SystemExit) as ctx:
+ run_archive_worker(["--help"])
+ assert int(ctx.value.args[0]) == 0
+ out, err = capsys.readouterr()
+ assert "CONFIG_FILE" in out
+ assert "WORKSPACE" in out
+ assert "LOCAL_ROOT" in out
+
+
+WORKER_YAML = """\
+storage:
+ tmp_dir: /antarest_tmp_dir
+ archive_dir: /studies/archives
+ matrixstore: /matrixstore
+ matrix_gc_dry_run: true
+ workspaces:
+ default:
+ path: /studies/internal
+ common_space:
+ path: /mounts/common_spaces
+
+logging:
+ logfile: /path/to/worker.log
+ json: false
+ level: INFO
+
+redis:
+ host: redis-server
+ port: 6379
+ password: '*****'
+"""
+
+
+def test_run_archive_worker__logging_setup(tmp_path):
+ """
+ The purpose of this unit test is to check that the logging is set up correctly.
+ """
+ # create a `worker.yaml` with the right log path
+ log_path = tmp_path.joinpath("worker.log")
+ obj = yaml.safe_load(WORKER_YAML)
+ obj["logging"]["logfile"] = str(log_path)
+ config_path = tmp_path.joinpath("worker.yaml")
+ with config_path.open(mode="w", encoding="utf-8") as fd:
+ yaml.dump(obj, fd)
+
+ # do not start the worker: use a Mock instead
+ create_archive_worker = Mock()
+ create_archive_worker.return_value = Mock(spec=ArchiveWorker)
+
+ # noinspection SpellCheckingInspection
+ with patch(
+ "antarest.worker.archive_worker_service.create_archive_worker",
+ new=create_archive_worker,
+ ):
+ run_archive_worker(
+ [
+ f"--config={config_path}",
+ "--workspace=foo",
+ "--local-root=/path/to/local/root",
+ ]
+ )
+
+ # check: log file is generated with 2 messages
+ assert log_path.is_file()
+ lines = log_path.read_text(encoding="utf-8").splitlines()
+ assert "Starting Archive Worker" in lines[0]
+ assert "Archive Worker task is done" in lines[1]
diff --git a/webapp/.eslintrc.json b/webapp/.eslintrc.json
index 542b985ee9..f55addeab3 100644
--- a/webapp/.eslintrc.json
+++ b/webapp/.eslintrc.json
@@ -59,6 +59,7 @@
}
],
"no-shadow": "off",
+ "no-throw-literal": "error",
"no-underscore-dangle": "off",
"no-unused-vars": "off",
"no-use-before-define": ["error", { "functions": false }],
diff --git a/webapp/package-lock.json b/webapp/package-lock.json
index 180c77d5f1..d011f3aed9 100644
--- a/webapp/package-lock.json
+++ b/webapp/package-lock.json
@@ -1,6 +1,6 @@
{
"name": "antares-web",
- "version": "2.13.0",
+ "version": "2.14.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@@ -2648,6 +2648,12 @@
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
"integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw=="
},
+ "@total-typescript/ts-reset": {
+ "version": "0.4.2",
+ "resolved": "https://registry.npmjs.org/@total-typescript/ts-reset/-/ts-reset-0.4.2.tgz",
+ "integrity": "sha512-vqd7ZUDSrXFVT1n8b2kc3LnklncDQFPvR58yUS1kEP23/nHPAO9l1lMjUfnPrXYYk4Hj54rrLKMW5ipwk7k09A==",
+ "dev": true
+ },
"@trysound/sax": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz",
@@ -13575,9 +13581,9 @@
"integrity": "sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg=="
},
"react-hook-form": {
- "version": "7.34.0",
- "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.34.0.tgz",
- "integrity": "sha512-s0/TJ09NVlEk2JPp3yit1WnMuPNBXFmUKEQPulgDi9pYBw/ZmmAFHe6AXWq73Y+kp8ye4OcMf0Jv+i/qLPektg=="
+ "version": "7.43.9",
+ "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.43.9.tgz",
+ "integrity": "sha512-AUDN3Pz2NSeoxQ7Hs6OhQhDr6gtF9YRuutGDwPQqhSUAHJSgGl2VeY3qN19MG0SucpjgDiuMJ4iC5T5uB+eaNQ=="
},
"react-i18next": {
"version": "12.1.5",
diff --git a/webapp/package.json b/webapp/package.json
index 595d07a2ba..27c305dfcc 100644
--- a/webapp/package.json
+++ b/webapp/package.json
@@ -1,6 +1,6 @@
{
"name": "antares-web",
- "version": "2.13.2",
+ "version": "2.14.0",
"private": true,
"dependencies": {
"@emotion/react": "11.10.6",
@@ -48,11 +48,11 @@
"ramda-adjunct": "3.4.0",
"react": "18.2.0",
"react-beautiful-dnd": "13.1.1",
- "react-color": "^2.19.3",
+ "react-color": "2.19.3",
"react-d3-graph": "2.6.0",
"react-dom": "18.2.0",
"react-dropzone": "14.2.3",
- "react-hook-form": "7.34.0",
+ "react-hook-form": "7.43.9",
"react-i18next": "12.1.5",
"react-json-view": "1.21.3",
"react-plotly.js": "2.6.0",
@@ -96,6 +96,7 @@
"proxy": "http://localhost:8080",
"homepage": "/",
"devDependencies": {
+ "@total-typescript/ts-reset": "0.4.2",
"@types/debug": "4.1.7",
"@types/js-cookie": "3.0.3",
"@types/lodash": "4.14.191",
diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json
index 6c7bdc460c..b22504175f 100644
--- a/webapp/public/locales/en/main.json
+++ b/webapp/public/locales/en/main.json
@@ -72,6 +72,7 @@
"global.error.failedtoretrievejobs": "Failed to retrieve job information",
"global.error.failedtoretrievelogs": "Failed to retrieve job logs",
"global.error.failedtoretrievedownloads": "Failed to retrieve downloads list",
+ "global.area.add": "Add an area",
"login.error": "Failed to authenticate",
"tasks.title": "Tasks",
"api.title": "API",
@@ -108,6 +109,7 @@
"form.field.required": "Field required",
"form.field.minLength": "{{0}} character(s) minimum",
"form.field.minValue": "The minimum value is {{0}}",
+ "form.field.maxValue": "The maximum value is {{0}}",
"form.field.notAllowedValue": "Not allowed value",
"matrix.graphSelector": "Columns",
"matrix.message.importHint": "Click or drag and drop a matrix here",
@@ -265,21 +267,24 @@
"study.modelization.tableMode.dialog.add.title": "Add table",
"study.modelization.tableMode.dialog.edit.title": "Edit table",
"study.modelization.tableMode.dialog.delete.text": "Are you sure you want to delete '{{0}}' table?",
- "study.configuration.general.simulation": "Simulation",
+ "study.configuration.general.legend.simulation": "Simulation",
+ "study.configuration.general.legend.calendar": "Calendar",
+ "study.configuration.general.legend.monteCarloScenarios": "Monte-Carlo Scenarios",
+ "study.configuration.general.legend.outputProfile": "Output profile",
"study.configuration.general.mode": "Mode",
"study.configuration.general.firstDay": "First day",
"study.configuration.general.lastDay": "Last day",
- "study.configuration.general.calendar": "Calendar",
+ "study.configuration.general.day.error.leapYearMax": "Maximum is 366 for a leap year",
+ "study.configuration.general.day.error.nonLeapYearMax": "Maximum is 365 for a non-leap year",
"study.configuration.general.horizon": "Horizon",
"study.configuration.general.year": "Year",
"study.configuration.general.week": "Week",
"study.configuration.general.firstDayOfYear": "1st January",
+ "study.configuration.general.nbYears": "Number",
+ "study.configuration.general.nbYears.error.derated": "Value must be 1 when building mode is derated",
"study.configuration.general.leapYear": "Leap year",
- "study.configuration.general.adequacyPatch": "Adequacy patch",
- "study.configuration.general.monteCarloScenarios": "Monte-Carlo Scenarios",
"study.configuration.general.buildingMode": "Building mode",
"study.configuration.general.selectionMode": "Selection mode",
- "study.configuration.general.outputProfile": "Output profile",
"study.configuration.general.simulationSynthesis": "Simulation synthesis",
"study.configuration.general.yearByYear": "Year-by-year",
"study.configuration.general.mcScenario": "MC Scenario",
@@ -310,25 +315,35 @@
"study.configuration.general.geographicTrimming": "Geographic trimming",
"study.configuration.general.thematicTrimming": "Thematic trimming",
"study.configuration.general.filtering": "Filtering",
- "study.configuration.optimization.optimization": "Optimization",
+ "study.configuration.optimization.legend.general": "General",
+ "study.configuration.optimization.legend.links": "Links",
+ "study.configuration.optimization.legend.thermalClusters": "Thermal Clusters",
+ "study.configuration.optimization.legend.reserve": "Reserve",
"study.configuration.optimization.bindingConstraints": "Binding constraints",
"study.configuration.optimization.hurdleCosts": "Hurdle costs",
"study.configuration.optimization.transmissionCapacities": "Transmission capacities",
- "study.configuration.optimization.linkType": "Link type",
"study.configuration.optimization.thermalClustersMinStablePower": "Thermal clusters min stable power",
"study.configuration.optimization.thermalClustersMinUdTime": "Thermal clusters min UD time",
"study.configuration.optimization.dayAheadReserve": "Day ahead reserve",
"study.configuration.optimization.primaryReserve": "Primary reserve",
"study.configuration.optimization.strategicReserve": "Strategic reserve",
"study.configuration.optimization.spinningReserve": "Spinning reserve",
- "study.configuration.optimization.exportMps": "Export mps",
- "study.configuration.optimization.splitExportedMps": "Split exported mps",
+ "study.configuration.optimization.exportMps": "Export MPS",
"study.configuration.optimization.unfeasibleProblemBehavior": "Unfeasible problem behavior",
"study.configuration.optimization.simplexOptimizationRange": "Simplex optimization range",
- "study.configuration.optimization.adequacyPatch": "Adequacy patch",
- "study.configuration.optimization.enableAdequacyPatch": "Enable adequacy patch",
- "study.configuration.optimization.ntcFromPhysicalAreasOutToPhysicalAreasInAdequacyPatch": "NTC from physical areas out to physical areas in adequacy patch",
- "study.configuration.optimization.ntcBetweenPhysicalAreasOutAdequacyPatch": "NTC between physical areas out adequacy patch",
+ "study.configuration.adequacyPatch.legend.general": "General",
+ "study.configuration.adequacyPatch.legend.localMatchingRule": "Local matching rule",
+ "study.configuration.adequacyPatch.legend.curtailmentSharing": "Curtailment sharing",
+ "study.configuration.adequacyPatch.legend.advanced": "Advanced",
+ "study.configuration.adequacyPatch.enableAdequacyPatch": "Enable adequacy patch",
+ "study.configuration.adequacyPatch.ntcFromPhysicalAreasOutToPhysicalAreasInAdequacyPatch": "NTC from physical areas out to physical areas in adequacy patch",
+ "study.configuration.adequacyPatch.ntcBetweenPhysicalAreasOutAdequacyPatch": "NTC between physical areas out adequacy patch",
+ "study.configuration.adequacyPatch.priceTakingOrder": "Price taking order",
+ "study.configuration.adequacyPatch.includeHurdleCostCsr": "Include hurdle cost CSR",
+ "study.configuration.adequacyPatch.thresholdInitiateCurtailmentSharingRule": "Threshold initiate curtailment sharing rule",
+ "study.configuration.adequacyPatch.thresholdDisplayLocalMatchingRuleViolations": "Threshold display local matching rule violations",
+ "study.configuration.adequacyPatch.thresholdCsrVariableBoundsRelaxation": "Threshold CSR variable bounds relaxation",
+ "study.configuration.adequacyPatch.checkCsrCostFunction": "Check CSR cost function",
"study.configuration.advancedParameters.seedsForRandomNumbers": "Seeds for random numbers",
"study.configuration.advancedParameters.spatialTimeSeriesCorrelation": "Spatial time-series correlation",
"study.configuration.advancedParameters.otherPreferences": "Other preferences",
@@ -384,6 +399,9 @@
"study.modelization.load": "Load",
"study.modelization.thermal": "Thermal Clus.",
"study.modelization.hydro": "Hydro",
+ "study.modelization.hydro.correlation.viewMatrix": "View all correlations",
+ "study.modelization.hydro.allocation.viewMatrix": "View all allocations",
+ "study.modelization.hydro.allocation.error.field.delete": "Error when deleting the allocation",
"study.modelization.wind": "Wind",
"study.modelization.solar": "Solar",
"study.modelization.renewables": "Renewables Clus.",
diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json
index e0d3293c73..690f0196a0 100644
--- a/webapp/public/locales/fr/main.json
+++ b/webapp/public/locales/fr/main.json
@@ -72,6 +72,7 @@
"global.error.failedtoretrievejobs": "Échec de la récupération des tâches",
"global.error.failedtoretrievelogs": "Échec de la récupération des logs",
"global.error.failedtoretrievedownloads": "Échec de la récupération des exports",
+ "global.area.add": "Ajouter une zone",
"login.error": "Échec de l'authentification",
"tasks.title": "Tâches",
"api.title": "API",
@@ -108,6 +109,7 @@
"form.field.required": "Champ requis",
"form.field.minLength": "{{0}} caractère(s) minimum",
"form.field.minValue": "La valeur minimum est {{0}}",
+ "form.field.maxValue": "La valeur maximum est {{0}}",
"form.field.notAllowedValue": "Valeur non autorisée",
"matrix.graphSelector": "Colonnes",
"matrix.message.importHint": "Cliquer ou glisser une matrice ici",
@@ -265,21 +267,24 @@
"study.modelization.tableMode.dialog.add.title": "Ajouter une table",
"study.modelization.tableMode.dialog.edit.title": "Modifier une table",
"study.modelization.tableMode.dialog.delete.text": "Êtes-vous sûr de vouloir supprimer la table '{{0}}' ?",
- "study.configuration.general.simulation": "Simulation",
+ "study.configuration.general.legend.simulation": "Simulation",
+ "study.configuration.general.legend.calendar": "Calendrier",
+ "study.configuration.general.legend.monteCarloScenarios": "Scénarios Monte-Carlo",
+ "study.configuration.general.legend.outputProfile": "Profil de sortie",
"study.configuration.general.mode": "Mode",
"study.configuration.general.firstDay": "Premier jour",
"study.configuration.general.lastDay": "Dernier jour",
- "study.configuration.general.calendar": "Calendrier",
+ "study.configuration.general.day.error.leapYearMax": "Le maximum est 366 pour une année bissextile",
+ "study.configuration.general.day.error.nonLeapYearMax": "Le maximum est 365 pour une année non bissextile",
"study.configuration.general.horizon": "Horizon",
"study.configuration.general.year": "Année",
"study.configuration.general.week": "Semaine",
"study.configuration.general.firstDayOfYear": "1er Janvier",
+ "study.configuration.general.nbYears": "Nombre",
+ "study.configuration.general.nbYears.error.derated": "La valeur doit être 1 lorsque building mode est 'derated'",
"study.configuration.general.leapYear": "Année bissextile",
- "study.configuration.general.adequacyPatch": "Adequacy patch",
- "study.configuration.general.monteCarloScenarios": "Monte-Carlo Scenarios",
"study.configuration.general.buildingMode": "Building mode",
"study.configuration.general.selectionMode": "Selection mode",
- "study.configuration.general.outputProfile": "Output profile",
"study.configuration.general.simulationSynthesis": "Simulation synthesis",
"study.configuration.general.yearByYear": "Year-by-year",
"study.configuration.general.mcScenario": "MC Scenario",
@@ -310,25 +315,35 @@
"study.configuration.general.geographicTrimming": "Geographic trimming",
"study.configuration.general.thematicTrimming": "Thematic trimming",
"study.configuration.general.filtering": "Filtering",
- "study.configuration.optimization.optimization": "Optimization",
+ "study.configuration.optimization.legend.general": "Générale",
+ "study.configuration.optimization.legend.links": "Liens",
+ "study.configuration.optimization.legend.thermalClusters": "Cluster thermiques",
+ "study.configuration.optimization.legend.reserve": "Réserve",
"study.configuration.optimization.bindingConstraints": "Binding constraints",
"study.configuration.optimization.hurdleCosts": "Hurdle costs",
"study.configuration.optimization.transmissionCapacities": "Transmission capacities",
- "study.configuration.optimization.linkType": "Link type",
"study.configuration.optimization.thermalClustersMinStablePower": "Thermal clusters min stable power",
"study.configuration.optimization.thermalClustersMinUdTime": "Thermal clusters min UD time",
"study.configuration.optimization.dayAheadReserve": "Day ahead reserve",
"study.configuration.optimization.primaryReserve": "Primary reserve",
"study.configuration.optimization.strategicReserve": "Strategic reserve",
"study.configuration.optimization.spinningReserve": "Spinning reserve",
- "study.configuration.optimization.exportMps": "Export mps",
- "study.configuration.optimization.splitExportedMps": "Split exported mps",
+ "study.configuration.optimization.exportMps": "Export MPS",
"study.configuration.optimization.unfeasibleProblemBehavior": "Unfeasible problem behavior",
"study.configuration.optimization.simplexOptimizationRange": "Simplex optimization range",
- "study.configuration.optimization.adequacyPatch": "Adequacy patch",
- "study.configuration.optimization.enableAdequacyPatch": "Activer l'adequacy patch",
- "study.configuration.optimization.ntcFromPhysicalAreasOutToPhysicalAreasInAdequacyPatch": "NTC from physical areas out to physical areas in adequacy patch",
- "study.configuration.optimization.ntcBetweenPhysicalAreasOutAdequacyPatch": "NTC between physical areas out adequacy patch",
+ "study.configuration.adequacyPatch.legend.general": "Générale",
+ "study.configuration.adequacyPatch.legend.localMatchingRule": "Règle de correspondance locale",
+ "study.configuration.adequacyPatch.legend.curtailmentSharing": "Partage de réduction",
+ "study.configuration.adequacyPatch.legend.advanced": "Avancée",
+ "study.configuration.adequacyPatch.enableAdequacyPatch": "Enable adequacy patch",
+ "study.configuration.adequacyPatch.ntcFromPhysicalAreasOutToPhysicalAreasInAdequacyPatch": "NTC from physical areas out to physical areas in adequacy patch",
+ "study.configuration.adequacyPatch.ntcBetweenPhysicalAreasOutAdequacyPatch": "NTC between physical areas out adequacy patch",
+ "study.configuration.adequacyPatch.priceTakingOrder": "Prix de la prise en charge",
+ "study.configuration.adequacyPatch.includeHurdleCostCsr": "Inclure le coût de la hausse de la CSR",
+ "study.configuration.adequacyPatch.thresholdInitiateCurtailmentSharingRule": "Seuil de déclenchement de la règle de partage des réductions",
+ "study.configuration.adequacyPatch.thresholdDisplayLocalMatchingRuleViolations": "Seuil d'affichage des violations de la règle d'appariement local",
+ "study.configuration.adequacyPatch.thresholdCsrVariableBoundsRelaxation": "Seuil d'assouplissement des limites variables de la CSR",
+ "study.configuration.adequacyPatch.checkCsrCostFunction": "Vérifier la fonction de coût CSR",
"study.configuration.advancedParameters.seedsForRandomNumbers": "Seeds for random numbers",
"study.configuration.advancedParameters.spatialTimeSeriesCorrelation": "Spatial time-series correlation",
"study.configuration.advancedParameters.otherPreferences": "Autres préférences",
@@ -372,9 +387,9 @@
"study.modelization.map.layers.add": "Ajouter un layer",
"study.modelization.map.layers.edit": "Modifier un layer",
"study.modelization.map.layers.delete.confirm": "Êtes-vous sûr de vouloir supprimer le layer '{{0}}' ?",
+ "study.modelization.map.districts": "Districts",
"study.modelization.map.districts.field.comments": "Commentaires",
"study.modelization.map.districts.field.outputs": "Sorties",
- "study.modelization.map.districts": "Districts",
"study.modelization.map.districts.add": "Ajouter un district",
"study.modelization.map.districts.edit": "Modifier un district",
"study.modelization.map.districts.delete.confirm": "Êtes-vous sûr de vouloir supprimer le district '{{0}}' ?",
@@ -384,6 +399,10 @@
"study.modelization.load": "Conso",
"study.modelization.thermal": "Clus. Thermiques",
"study.modelization.hydro": "Hydro",
+ "study.modelization.hydro.correlation.viewMatrix": "Voir les correlations",
+ "study.modelization.hydro.correlation.coefficient": "Coeff. (%)",
+ "study.modelization.hydro.allocation.viewMatrix": "Voir les allocations",
+ "study.modelization.hydro.allocation.error.field.delete": "Erreur lors de la suppression de l'allocation",
"study.modelization.wind": "Éolien",
"study.modelization.solar": "Solaire",
"study.modelization.renewables": "Clus. Renouvelables",
@@ -479,6 +498,8 @@
"study.error.modifiedStudy": "Erreur lors de la modification de l'étude {{studyname}}",
"study.error.launchLoad": "Échec lors de la récupération de la charge du cluster",
"study.error.upgrade": "Échec lors de la mise à jour de votre étude",
+ "study.error.createDistrict": "Failed to add district",
+ "study.error.createLayer": "Failed to add layer",
"study.success.commentsSaved": "Commentaires enregistrés avec succès",
"study.success.studyIdCopy": "Identifiant de l'étude copié !",
"study.success.jobIdCopy": "Identifiant de la tâche copié !",
diff --git a/webapp/src/components/App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx b/webapp/src/components/App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx
index 43501b71ac..69c3e849e9 100644
--- a/webapp/src/components/App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx
+++ b/webapp/src/components/App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx
@@ -41,8 +41,7 @@ function GroupForm(props: UseFormReturnPlus) {
control,
register,
getValues,
- formState: { errors },
- defaultValues,
+ formState: { errors, defaultValues },
} = props;
const userLabelId = useRef(uuidv4()).current;
diff --git a/webapp/src/components/App/Settings/index.tsx b/webapp/src/components/App/Settings/index.tsx
index d460de40e5..462a3ad428 100644
--- a/webapp/src/components/App/Settings/index.tsx
+++ b/webapp/src/components/App/Settings/index.tsx
@@ -13,6 +13,7 @@ import {
isAuthUserAdmin,
isAuthUserInGroupAdmin,
} from "../../../redux/selectors";
+import { tuple } from "../../../utils/tsUtils";
/**
* Component
@@ -26,14 +27,12 @@ function Settings() {
const tabList = useMemo(() => {
return [
- isUserAdmin && [t("global.users"), () => ],
- (isUserAdmin || isUserInGroupAdmin) && [
- t("global.group"),
- () => ,
- ],
- [t("global.tokens"), () => ],
- isUserAdmin && [t("global.maintenance"), () => ],
- ].filter(Boolean) as Array<[string, () => JSX.Element]>;
+ isUserAdmin && tuple(t("global.users"), () => ),
+ (isUserAdmin || isUserInGroupAdmin) &&
+ tuple(t("global.group"), () => ),
+ tuple(t("global.tokens"), () => ),
+ isUserAdmin && tuple(t("global.maintenance"), () => ),
+ ].filter(Boolean);
}, [isUserAdmin, isUserInGroupAdmin, t]);
////////////////////////////////////////////////////////////////
diff --git a/webapp/src/components/App/Singlestudy/NavHeader.tsx b/webapp/src/components/App/Singlestudy/NavHeader.tsx
index e48aa5188c..eba5b34b97 100644
--- a/webapp/src/components/App/Singlestudy/NavHeader.tsx
+++ b/webapp/src/components/App/Singlestudy/NavHeader.tsx
@@ -52,7 +52,10 @@ import {
displayVersionName,
} from "../../../services/utils";
import useEnqueueErrorSnackbar from "../../../hooks/useEnqueueErrorSnackbar";
-import { isCurrentStudyFavorite } from "../../../redux/selectors";
+import {
+ getLatestStudyVersion,
+ isCurrentStudyFavorite,
+} from "../../../redux/selectors";
import ExportDialog from "../Studies/ExportModal";
import StarToggle from "../../common/StarToggle";
import ConfirmationDialog from "../../common/dialogs/ConfirmationDialog";
@@ -102,6 +105,9 @@ function NavHeader(props: Props) {
} = props;
const [t, i18n] = useTranslation();
const navigate = useNavigate();
+ const dispatch = useAppDispatch();
+ const isStudyFavorite = useAppSelector(isCurrentStudyFavorite);
+ const latestVersion = useAppSelector(getLatestStudyVersion);
const [anchorEl, setAnchorEl] = useState(null);
const [openMenu, setOpenMenu] = useState("");
const [openLauncherDialog, setOpenLauncherDialog] = useState(false);
@@ -112,9 +118,7 @@ function NavHeader(props: Props) {
const [openExportDialog, setOpenExportDialog] = useState(false);
const { enqueueSnackbar } = useSnackbar();
const enqueueErrorSnackbar = useEnqueueErrorSnackbar();
- const isStudyFavorite = useAppSelector(isCurrentStudyFavorite);
- const dispatch = useAppDispatch();
-
+ const isLatestVersion = study?.version === latestVersion;
const publicModeLabel =
PUBLIC_MODE_LIST.find((mode) => mode.id === study?.publicMode)?.name || "";
@@ -410,23 +414,25 @@ function NavHeader(props: Props) {
{t("study.properties")}
-
+ {!isLatestVersion && (
+
+ )}