diff --git a/alembic/versions/782a481f3414_fix_task_job_cascade_delete.py b/alembic/versions/782a481f3414_fix_task_job_cascade_delete.py new file mode 100644 index 0000000000..77ef6c9402 --- /dev/null +++ b/alembic/versions/782a481f3414_fix_task_job_cascade_delete.py @@ -0,0 +1,137 @@ +"""fix task_job cascade delete + +Revision ID: 782a481f3414 +Revises: d495746853cc +Create Date: 2023-12-16 14:26:30.035324 + +""" +import sqlalchemy as sa # type: ignore +from alembic import context, op + +# revision identifiers, used by Alembic. +revision = "782a481f3414" +down_revision = "d495746853cc" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + connection = context.get_bind() + + # Delete logs of tasks older than one week + if "postgresql" in connection.dialect.name: + # PostgreSQL-specific code + op.execute( + """ + DELETE FROM taskjoblog + WHERE task_id IN (SELECT id FROM taskjob WHERE NOW() - creation_date > INTERVAL '1 week'); + """ + ) + + op.execute( + """ + DELETE FROM taskjob WHERE NOW() - creation_date > INTERVAL '1 week'; + """ + ) + elif "sqlite" in connection.dialect.name: + # SQLite-specific code + op.execute( + """ + DELETE FROM taskjoblog + WHERE task_id IN (SELECT id FROM taskjob WHERE creation_date < date('now', '-7 days')); + """ + ) + + op.execute( + """ + DELETE FROM taskjob WHERE creation_date < date('now', '-7 days'); + """ + ) + else: + # Other databases + pass + + # Set the name "Unknown task" to tasks that have no name + op.execute(""" UPDATE taskjob SET name = 'Unknown task' WHERE name IS NULL OR name = ''; """) + + # Attach the user "admin" to tasks that have no user + op.execute(""" UPDATE taskjob SET owner_id = 1 WHERE owner_id NOT IN (SELECT id FROM identities); """) + + # Delete logs of tasks that reference a study that has been deleted + op.execute( + """ + DELETE FROM taskjoblog + WHERE + task_id IN ( + SELECT + t.id + FROM + taskjob t + WHERE + t.ref_id IS NOT NULL + AND t.ref_id NOT IN (SELECT s.id FROM study s) + ); + """ + ) + + # Delete tasks that reference a study that has been deleted (long query) + op.execute( + """ + DELETE FROM taskjob + WHERE + ref_id IS NOT NULL + AND ref_id NOT IN (SELECT id FROM study); + """ + ) + + # Delete logs of tasks whose task_id is NULL + op.execute(""" DELETE FROM taskjoblog WHERE task_id IS NULL; """) + + # Set the status "CANCELLED" to tasks whose status is not in the list of possible values + op.execute(""" UPDATE taskjob SET status = 6 WHERE status NOT IN (1, 2, 3, 4, 5, 6); """) + + # Set the type "VARIANT_GENERATION" to tasks whose type is NULL + op.execute(""" UPDATE taskjob SET type = 'VARIANT_GENERATION' WHERE type IS NULL AND name LIKE '%Generation%'; """) + + # Set the type "EXPORT" to tasks whose type is NULL + op.execute(""" UPDATE taskjob SET type = 'EXPORT' WHERE type IS NULL AND name LIKE '%export%'; """) + + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("taskjoblog", schema=None) as batch_op: + batch_op.alter_column("task_id", existing_type=sa.VARCHAR(), nullable=False) + batch_op.drop_constraint("fk_log_taskjob_id", type_="foreignkey") + batch_op.create_foreign_key("fk_log_taskjob_id", "taskjob", ["task_id"], ["id"], ondelete="CASCADE") + + with op.batch_alter_table("taskjob", schema=None) as batch_op: + batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=False) + batch_op.create_index(batch_op.f("ix_taskjob_creation_date"), ["creation_date"], unique=False) + batch_op.create_index(batch_op.f("ix_taskjob_name"), ["name"], unique=False) + batch_op.create_index(batch_op.f("ix_taskjob_owner_id"), ["owner_id"], unique=False) + batch_op.create_index(batch_op.f("ix_taskjob_ref_id"), ["ref_id"], unique=False) + batch_op.create_index(batch_op.f("ix_taskjob_status"), ["status"], unique=False) + batch_op.create_index(batch_op.f("ix_taskjob_type"), ["type"], unique=False) + batch_op.create_foreign_key("fk_taskjob_identity_id", "identities", ["owner_id"], ["id"], ondelete="SET NULL") + batch_op.create_foreign_key("fk_taskjob_study_id", "study", ["ref_id"], ["id"], ondelete="CASCADE") + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("taskjob", schema=None) as batch_op: + batch_op.drop_constraint("fk_taskjob_study_id", type_="foreignkey") + batch_op.drop_constraint("fk_taskjob_identity_id", type_="foreignkey") + batch_op.drop_index(batch_op.f("ix_taskjob_type")) + batch_op.drop_index(batch_op.f("ix_taskjob_status")) + batch_op.drop_index(batch_op.f("ix_taskjob_ref_id")) + batch_op.drop_index(batch_op.f("ix_taskjob_owner_id")) + batch_op.drop_index(batch_op.f("ix_taskjob_name")) + batch_op.drop_index(batch_op.f("ix_taskjob_creation_date")) + batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=True) + + with op.batch_alter_table("taskjoblog", schema=None) as batch_op: + batch_op.drop_constraint("fk_log_taskjob_id", type_="foreignkey") + batch_op.create_foreign_key("fk_log_taskjob_id", "taskjob", ["task_id"], ["id"]) + batch_op.alter_column("task_id", existing_type=sa.VARCHAR(), nullable=True) + + # ### end Alembic commands ### diff --git a/antarest/__init__.py b/antarest/__init__.py index ea7c2d6185..6a92596e63 100644 --- a/antarest/__init__.py +++ b/antarest/__init__.py @@ -7,9 +7,9 @@ # Standard project metadata -__version__ = "2.16.1" +__version__ = "2.16.2" __author__ = "RTE, Antares Web Team" -__date__ = "2023-12-14" +__date__ = "2024-01-10" # noinspection SpellCheckingInspection __credits__ = "(c) Réseau de Transport de l’Électricité (RTE)" diff --git a/antarest/core/config.py b/antarest/core/config.py index b48be8ded0..849209d826 100644 --- a/antarest/core/config.py +++ b/antarest/core/config.py @@ -9,6 +9,8 @@ from antarest.core.model import JSON from antarest.core.roles import RoleType +DEFAULT_WORKSPACE_NAME = "default" + @dataclass(frozen=True) class ExternalAuthConfig: @@ -547,3 +549,18 @@ def from_yaml_file(cls, file: Path, res: Optional[Path] = None) -> "Config": if res is not None: data["resources_path"] = res return cls.from_dict(data) + + def get_workspace_path(self, *, workspace: str = DEFAULT_WORKSPACE_NAME) -> Path: + """ + Get workspace path from config file. + + Args: + workspace: Workspace name. + + Returns: + Absolute (or relative) path to the workspace directory. + """ + try: + return self.storage.workspaces[workspace].path + except KeyError: + raise ValueError(f"Workspace '{workspace}' not found in config") from None diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index ab39c3a566..3414d6477b 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -194,6 +194,11 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.CONFLICT, message) +class InvalidConstraintName(HTTPException): + def __init__(self, message: str) -> None: + super().__init__(HTTPStatus.BAD_REQUEST, message) + + class MissingDataError(HTTPException): def __init__(self, message: str) -> None: super().__init__(HTTPStatus.NOT_FOUND, message) diff --git a/antarest/core/tasks/model.py b/antarest/core/tasks/model.py index 1d7a9e1566..d1b3d51b5d 100644 --- a/antarest/core/tasks/model.py +++ b/antarest/core/tasks/model.py @@ -1,15 +1,20 @@ +import typing as t import uuid from datetime import datetime from enum import Enum -from typing import Any, List, Mapping, Optional from pydantic import BaseModel, Extra from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, Sequence, String # type: ignore from sqlalchemy.engine.base import Engine # type: ignore -from sqlalchemy.orm import Session, relationship, sessionmaker # type: ignore +from sqlalchemy.orm import relationship, sessionmaker # type: ignore from antarest.core.persistence import Base +if t.TYPE_CHECKING: + # avoid circular import + from antarest.login.model import Identity + from antarest.study.model import Study + class TaskType(str, Enum): EXPORT = "EXPORT" @@ -43,7 +48,7 @@ class TaskResult(BaseModel, extra=Extra.forbid): success: bool message: str # Can be used to store json serialized result - return_value: Optional[str] + return_value: t.Optional[str] class TaskLogDTO(BaseModel, extra=Extra.forbid): @@ -65,25 +70,25 @@ class TaskEventPayload(BaseModel, extra=Extra.forbid): class TaskDTO(BaseModel, extra=Extra.forbid): id: str name: str - owner: Optional[int] + owner: t.Optional[int] status: TaskStatus creation_date_utc: str - completion_date_utc: Optional[str] - result: Optional[TaskResult] - logs: Optional[List[TaskLogDTO]] - type: Optional[str] = None - ref_id: Optional[str] = None + completion_date_utc: t.Optional[str] + result: t.Optional[TaskResult] + logs: t.Optional[t.List[TaskLogDTO]] + type: t.Optional[str] = None + ref_id: t.Optional[str] = None class TaskListFilter(BaseModel, extra=Extra.forbid): - status: List[TaskStatus] = [] - name: Optional[str] = None - type: List[TaskType] = [] - ref_id: Optional[str] = None - from_creation_date_utc: Optional[float] = None - to_creation_date_utc: Optional[float] = None - from_completion_date_utc: Optional[float] = None - to_completion_date_utc: Optional[float] = None + status: t.List[TaskStatus] = [] + name: t.Optional[str] = None + type: t.List[TaskType] = [] + ref_id: t.Optional[str] = None + from_creation_date_utc: t.Optional[float] = None + to_creation_date_utc: t.Optional[float] = None + from_completion_date_utc: t.Optional[float] = None + to_completion_date_utc: t.Optional[float] = None class TaskJobLog(Base): # type: ignore @@ -93,10 +98,15 @@ class TaskJobLog(Base): # type: ignore message = Column(String, nullable=False) task_id = Column( String(), - ForeignKey("taskjob.id", name="fk_log_taskjob_id"), + ForeignKey("taskjob.id", name="fk_log_taskjob_id", ondelete="CASCADE"), + nullable=False, ) - def __eq__(self, other: Any) -> bool: + # Define a many-to-one relationship between `TaskJobLog` and `TaskJob`. + # If the TaskJob is deleted, all attached logs must also be deleted in cascade. + job: "TaskJob" = relationship("TaskJob", back_populates="logs", uselist=False) + + def __eq__(self, other: t.Any) -> bool: if not isinstance(other, TaskJobLog): return False return bool(other.id == self.id and other.message == self.message and other.task_id == self.task_id) @@ -111,19 +121,41 @@ def to_dto(self) -> TaskLogDTO: class TaskJob(Base): # type: ignore __tablename__ = "taskjob" - id = Column(String(), default=lambda: str(uuid.uuid4()), primary_key=True) - name = Column(String()) - status = Column(Integer(), default=lambda: TaskStatus.PENDING.value) - creation_date = Column(DateTime, default=datetime.utcnow) - completion_date = Column(DateTime, nullable=True) - result_msg = Column(String(), nullable=True) - result = Column(String(), nullable=True) - result_status = Column(Boolean(), nullable=True) - logs = relationship(TaskJobLog, uselist=True, cascade="all, delete, delete-orphan") - # this is not a foreign key to prevent the need to delete the job history if the user is deleted - owner_id = Column(Integer(), nullable=True) - type = Column(String(), nullable=True) - ref_id = Column(String(), nullable=True) + id: str = Column(String(), default=lambda: str(uuid.uuid4()), primary_key=True) + name: str = Column(String(), nullable=False, index=True) + status: int = Column(Integer(), default=lambda: TaskStatus.PENDING.value, index=True) + creation_date: datetime = Column(DateTime, default=datetime.utcnow, index=True) + completion_date: t.Optional[datetime] = Column(DateTime, nullable=True, default=None) + result_msg: t.Optional[str] = Column(String(), nullable=True, default=None) + result: t.Optional[str] = Column(String(), nullable=True, default=None) + result_status: t.Optional[bool] = Column(Boolean(), nullable=True, default=None) + type: t.Optional[str] = Column(String(), nullable=True, default=None, index=True) + owner_id: int = Column( + Integer(), + ForeignKey("identities.id", name="fk_taskjob_identity_id", ondelete="SET NULL"), + nullable=True, + default=None, + index=True, + ) + ref_id: t.Optional[str] = Column( + String(), + ForeignKey("study.id", name="fk_taskjob_study_id", ondelete="CASCADE"), + nullable=True, + default=None, + index=True, + ) + + # Define a one-to-many relationship between `TaskJob` and `TaskJobLog`. + # If the TaskJob is deleted, all attached logs must also be deleted in cascade. + logs: t.List["TaskJobLog"] = relationship("TaskJobLog", back_populates="job", cascade="all, delete, delete-orphan") + + # Define a many-to-one relationship between `TaskJob` and `Identity`. + # If the Identity is deleted, all attached TaskJob must be preserved. + owner: "Identity" = relationship("Identity", back_populates="owned_jobs", uselist=False) + + # Define a many-to-one relationship between `TaskJob` and `Study`. + # If the Study is deleted, all attached TaskJob must be deleted in cascade. + study: "Study" = relationship("Study", back_populates="jobs", uselist=False) def to_dto(self, with_logs: bool = False) -> TaskDTO: return TaskDTO( @@ -140,12 +172,12 @@ def to_dto(self, with_logs: bool = False) -> TaskDTO: ) if self.completion_date else None, - logs=sorted([log.to_dto() for log in self.logs], key=lambda l: l.id) if with_logs else None, + logs=sorted([log.to_dto() for log in self.logs], key=lambda log: log.id) if with_logs else None, type=self.type, ref_id=self.ref_id, ) - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: t.Any) -> bool: if not isinstance(other, TaskJob): return False return bool( @@ -174,7 +206,7 @@ def __repr__(self) -> str: ) -def cancel_orphan_tasks(engine: Engine, session_args: Mapping[str, bool]) -> None: +def cancel_orphan_tasks(engine: Engine, session_args: t.Mapping[str, bool]) -> None: """ Cancel all tasks that are currently running or pending. @@ -193,8 +225,9 @@ def cancel_orphan_tasks(engine: Engine, session_args: Mapping[str, bool]) -> Non TaskJob.result_msg: "Task was interrupted due to server restart", TaskJob.completion_date: datetime.utcnow(), } - with sessionmaker(bind=engine, **session_args)() as session: - session.query(TaskJob).filter(TaskJob.status.in_([TaskStatus.RUNNING.value, TaskStatus.PENDING.value])).update( - updated_values, synchronize_session=False - ) + orphan_status = [TaskStatus.RUNNING.value, TaskStatus.PENDING.value] + make_session = sessionmaker(bind=engine, **session_args) + with make_session() as session: + q = session.query(TaskJob).filter(TaskJob.status.in_(orphan_status)) # type: ignore + q.update(updated_values, synchronize_session=False) session.commit() diff --git a/antarest/core/tasks/repository.py b/antarest/core/tasks/repository.py index 294f63255b..9a579ba241 100644 --- a/antarest/core/tasks/repository.py +++ b/antarest/core/tasks/repository.py @@ -6,9 +6,8 @@ from fastapi import HTTPException from sqlalchemy.orm import Session # type: ignore -from antarest.core.tasks.model import TaskJob, TaskListFilter, TaskStatus +from antarest.core.tasks.model import TaskJob, TaskListFilter from antarest.core.utils.fastapi_sqlalchemy import db -from antarest.core.utils.utils import assert_this class TaskJobRepository: @@ -59,52 +58,35 @@ def get_or_raise(self, id: str) -> TaskJob: raise HTTPException(HTTPStatus.NOT_FOUND, f"Task {id} not found") return task - @staticmethod - def _combine_clauses(where_clauses: t.List[t.Any]) -> t.Any: - assert_this(len(where_clauses) > 0) - if len(where_clauses) > 1: - return and_( - where_clauses[0], - TaskJobRepository._combine_clauses(where_clauses[1:]), - ) - else: - return where_clauses[0] - def list(self, filter: TaskListFilter, user: t.Optional[int] = None) -> t.List[TaskJob]: - query = self.session.query(TaskJob) - where_clauses: t.List[t.Any] = [] + q = self.session.query(TaskJob) if user: - where_clauses.append(TaskJob.owner_id == user) + q = q.filter(TaskJob.owner_id == user) if len(filter.status) > 0: - where_clauses.append(TaskJob.status.in_([status.value for status in filter.status])) + _values = [status.value for status in filter.status] + q = q.filter(TaskJob.status.in_(_values)) # type: ignore if filter.name: - where_clauses.append(TaskJob.name.ilike(f"%{filter.name}%")) + q = q.filter(TaskJob.name.ilike(f"%{filter.name}%")) # type: ignore if filter.to_creation_date_utc: - where_clauses.append( - TaskJob.creation_date.__le__(datetime.datetime.fromtimestamp(filter.to_creation_date_utc)) - ) + _date = datetime.datetime.fromtimestamp(filter.to_creation_date_utc) + q = q.filter(TaskJob.creation_date <= _date) if filter.from_creation_date_utc: - where_clauses.append( - TaskJob.creation_date.__ge__(datetime.datetime.fromtimestamp(filter.from_creation_date_utc)) - ) + _date = datetime.datetime.fromtimestamp(filter.from_creation_date_utc) + q = q.filter(TaskJob.creation_date >= _date) if filter.to_completion_date_utc: - where_clauses.append( - TaskJob.completion_date.__le__(datetime.datetime.fromtimestamp(filter.to_completion_date_utc)) - ) + _date = datetime.datetime.fromtimestamp(filter.to_completion_date_utc) + _clause = and_(TaskJob.completion_date.isnot(None), TaskJob.completion_date <= _date) # type: ignore + q = q.filter(_clause) if filter.from_completion_date_utc: - where_clauses.append( - TaskJob.completion_date.__ge__(datetime.datetime.fromtimestamp(filter.from_completion_date_utc)) - ) + _date = datetime.datetime.fromtimestamp(filter.from_completion_date_utc) + _clause = and_(TaskJob.completion_date.isnot(None), TaskJob.completion_date >= _date) # type: ignore + q = q.filter(_clause) if filter.ref_id is not None: - where_clauses.append(TaskJob.ref_id.__eq__(filter.ref_id)) - if len(filter.type) > 0: - where_clauses.append(TaskJob.type.in_([task_type.value for task_type in filter.type])) - if len(where_clauses) > 1: - query = query.where(TaskJobRepository._combine_clauses(where_clauses)) - elif len(where_clauses) == 1: - query = query.where(*where_clauses) - - tasks: t.List[TaskJob] = query.all() + q = q.filter(TaskJob.ref_id == filter.ref_id) + if filter.type: + _types = [task_type.value for task_type in filter.type] + q = q.filter(TaskJob.type.in_(_types)) # type: ignore + tasks: t.List[TaskJob] = q.all() return tasks def delete(self, tid: str) -> None: @@ -113,12 +95,3 @@ def delete(self, tid: str) -> None: if task: session.delete(task) session.commit() - - def update_timeout(self, task_id: str, timeout: int) -> None: - """Update task status to TIMEOUT.""" - session = self.session - task: TaskJob = session.get(TaskJob, task_id) - task.status = TaskStatus.TIMEOUT - task.result_msg = f"Task '{task_id}' timeout after {timeout} seconds" - task.result_status = False - session.commit() diff --git a/antarest/launcher/adapters/slurm_launcher/slurm_launcher.py b/antarest/launcher/adapters/slurm_launcher/slurm_launcher.py index 00283b9ce8..e4412a344e 100644 --- a/antarest/launcher/adapters/slurm_launcher/slurm_launcher.py +++ b/antarest/launcher/adapters/slurm_launcher/slurm_launcher.py @@ -26,8 +26,8 @@ from antarest.launcher.adapters.abstractlauncher import AbstractLauncher, LauncherCallbacks, LauncherInitException from antarest.launcher.adapters.log_manager import LogTailManager from antarest.launcher.model import JobStatus, LauncherParametersDTO, LogType, XpansionParametersDTO -from antarest.study.storage.rawstudy.io.reader import IniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter logger = logging.getLogger(__name__) logging.getLogger("paramiko").setLevel("WARN") diff --git a/antarest/launcher/service.py b/antarest/launcher/service.py index 2aa2d73802..25a777ef5a 100644 --- a/antarest/launcher/service.py +++ b/antarest/launcher/service.py @@ -304,9 +304,10 @@ def _filter_from_user_permission(self, job_results: List[JobResult], user: Optio orphan_visibility_threshold = datetime.utcnow() - timedelta(days=ORPHAN_JOBS_VISIBILITY_THRESHOLD) allowed_job_results = [] - studies = { - study.id: study for study in self.study_service.repository.get_list([job.study_id for job in job_results]) - } + + studies_ids = [job_result.study_id for job_result in job_results] + studies = {study.id: study for study in self.study_service.repository.get_all(studies_ids=studies_ids)} + for job_result in job_results: if job_result.study_id in studies: if assert_permission( diff --git a/antarest/login/model.py b/antarest/login/model.py index 5012a4995c..097de6d75e 100644 --- a/antarest/login/model.py +++ b/antarest/login/model.py @@ -15,6 +15,7 @@ if t.TYPE_CHECKING: # avoid circular import + from antarest.core.tasks.model import TaskJob from antarest.launcher.model import JobResult @@ -140,6 +141,10 @@ class Identity(Base): # type: ignore # If an identity is deleted, all the associated job results are detached from the identity. job_results: t.List["JobResult"] = relationship("JobResult", back_populates="owner", cascade="save-update, merge") + # Define a one-to-many relationship with `TaskJob`. + # If an identity is deleted, all the associated task jobs are detached from the identity. + owned_jobs: t.List["TaskJob"] = relationship("TaskJob", back_populates="owner", cascade="save-update, merge") + def to_dto(self) -> UserInfo: return UserInfo(id=self.id, name=self.name) diff --git a/antarest/singleton_services.py b/antarest/singleton_services.py index f106099523..3f3f94a116 100644 --- a/antarest/singleton_services.py +++ b/antarest/singleton_services.py @@ -1,3 +1,4 @@ +import time from pathlib import Path from typing import Dict, List, cast @@ -71,6 +72,20 @@ def _init(config_file: Path, services_list: List[Module]) -> Dict[Module, IServi def start_all_services(config_file: Path, services_list: List[Module]) -> None: + """ + Start all services in a worker. + + This function is used to start all services in a worker. + Each worker is started in a different docker image. + + Args: + config_file: Path to the configuration file (`application.yaml`) + services_list: List of services to start. + """ services = _init(config_file, services_list) for service in services: services[service].start(threaded=True) + # Once started, the worker must wait indefinitely (demon service). + # This loop may be interrupted using Crl+C + while True: + time.sleep(2) diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index 88870772e5..56a4b44a8d 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -24,6 +24,7 @@ "RenewableClusterCreation", "RenewableClusterOutput", "RenewableManager", + "TimeSeriesInterpretation", ) _CLUSTER_PATH = "input/renewables/clusters/{area_id}/list/{cluster_id}" diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 7caeabd9ab..c9eb01d9fa 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -6,6 +6,7 @@ ConstraintAlreadyExistError, ConstraintIdNotFoundError, DuplicateConstraintName, + InvalidConstraintName, MissingDataError, NoBindingConstraintError, NoConstraintError, @@ -16,6 +17,10 @@ from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.storage_service import StudyStorageService +from antarest.study.storage.variantstudy.business.matrix_constants.binding_constraint.series import ( + default_bc_hourly, + default_bc_weekly_daily, +) from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( BindingConstraintProperties, @@ -168,15 +173,20 @@ def create_binding_constraint( study: Study, data: BindingConstraintPropertiesWithName, ) -> None: - binding_constraints = self.get_binding_constraint(study, None) - existing_ids = [bd.id for bd in binding_constraints] # type: ignore - bd_id = transform_name_to_id(data.name) - if bd_id in existing_ids: - raise DuplicateConstraintName(f"A binding constraint with the same name already exists: {bd_id}.") + bc_id = transform_name_to_id(data.name) + + if not bc_id: + raise InvalidConstraintName(f"Invalid binding constraint name: {data.name}.") file_study = self.storage_service.get_storage(study).get_raw(study) + binding_constraints = self.get_binding_constraint(study, None) + existing_ids = {bc.id for bc in binding_constraints} # type: ignore + + if bc_id in existing_ids: + raise DuplicateConstraintName(f"A binding constraint with the same name already exists: {bc_id}.") + command = CreateBindingConstraint( - name=bd_id, + name=data.name, enabled=data.enabled, time_step=data.time_step, operator=data.operator, @@ -200,13 +210,24 @@ def update_binding_constraint( if not isinstance(constraint, BindingConstraintDTO): raise NoBindingConstraintError(study.id) + if data.key == "time_step" and data.value != constraint.time_step: + # The user changed the time step, we need to update the matrix accordingly + matrix = { + BindingConstraintFrequency.HOURLY.value: default_bc_hourly, + BindingConstraintFrequency.DAILY.value: default_bc_weekly_daily, + BindingConstraintFrequency.WEEKLY.value: default_bc_weekly_daily, + }[data.value].tolist() + else: + # The user changed another property, we keep the matrix as it is + matrix = constraint.values + command = UpdateBindingConstraint( id=constraint.id, enabled=data.value if data.key == "enabled" else constraint.enabled, time_step=data.value if data.key == "time_step" else constraint.time_step, operator=data.value if data.key == "operator" else constraint.operator, coeffs=BindingConstraintManager.constraints_to_coeffs(constraint), - values=constraint.values, + values=matrix, filter_year_by_year=data.value if data.key == "filterByYear" else constraint.filter_year_by_year, filter_synthesis=data.value if data.key == "filterSynthesis" else constraint.filter_synthesis, comments=data.value if data.key == "comments" else constraint.comments, diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index c124adf27d..e8c32a0c13 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -10,6 +10,7 @@ from antarest.study.business.utils import FormFieldsBaseModel, execute_or_add_commands from antarest.study.common.default_values import FilteringOptions, LinkProperties, NodalOptimization from antarest.study.model import RawStudy +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, TimeSeriesGenerationOption from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.storage_service import StudyStorageService @@ -40,12 +41,6 @@ class TransmissionCapacity(EnumIgnoreCase): ENABLED = "enabled" -class BindingConstraintType(EnumIgnoreCase): - HOURLY = "hourly" - DAILY = "daily" - WEEKLY = "weekly" - - class BindingConstraintOperator(EnumIgnoreCase): LESS = "less" GREATER = "greater" @@ -114,7 +109,7 @@ class RenewableColumns(FormFieldsBaseModel): class BindingConstraintColumns(FormFieldsBaseModel): - type: Optional[BindingConstraintType] + type: Optional[BindingConstraintFrequency] operator: Optional[BindingConstraintOperator] enabled: Optional[StrictBool] @@ -337,7 +332,7 @@ class PathVars(TypedDict, total=False): TableTemplateType.BINDING_CONSTRAINT: { "type": { "path": f"{BINDING_CONSTRAINT_PATH}/type", - "default_value": BindingConstraintType.HOURLY.value, + "default_value": BindingConstraintFrequency.HOURLY.value, }, "operator": { "path": f"{BINDING_CONSTRAINT_PATH}/operator", diff --git a/antarest/study/business/xpansion_management.py b/antarest/study/business/xpansion_management.py index 91554bfe94..dc595d5428 100644 --- a/antarest/study/business/xpansion_management.py +++ b/antarest/study/business/xpansion_management.py @@ -7,11 +7,12 @@ import zipfile from fastapi import HTTPException, UploadFile -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, Extra, Field, ValidationError, root_validator, validator from antarest.core.exceptions import BadZipBinary from antarest.core.model import JSON from antarest.study.business.enum_ignore_case import EnumIgnoreCase +from antarest.study.business.utils import AllOptionalMetaclass from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.bucket_node import BucketNode from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy @@ -39,34 +40,34 @@ class Master(EnumIgnoreCase): RELAXED = "relaxed" -class CutType(EnumIgnoreCase): - AVERAGE = "average" - YEARLY = "yearly" - WEEKLY = "weekly" - - class Solver(EnumIgnoreCase): CBC = "Cbc" COIN = "Coin" XPRESS = "Xpress" -class MaxIteration(EnumIgnoreCase): - INF = "+Inf" +class XpansionSensitivitySettings(BaseModel): + """ + A DTO representing the sensitivity analysis settings used for Xpansion. + The sensitivity analysis is optional. -class XpansionSensitivitySettingsDTO(BaseModel): - epsilon: float = 10000.0 - projection: t.List[str] = Field(default_factory=list) - capex: bool = False + Attributes: + epsilon: Max deviation from optimum (€). + projection: List of candidate names to project (the candidate names should be in "candidates.ini" file). + capex: Whether to include CAPEX in the sensitivity analysis. + """ + + epsilon: float = Field(default=0, ge=0, description="Max deviation from optimum (€)") + projection: t.List[str] = Field(default_factory=list, description="List of candidate names to project") + capex: bool = Field(default=False, description="Whether to include capex in the sensitivity analysis") @validator("projection", pre=True) def projection_validation(cls, v: t.Optional[t.Sequence[str]]) -> t.Sequence[str]: return [] if v is None else v -# noinspection SpellCheckingInspection -class XpansionSettingsDTO(BaseModel): +class XpansionSettings(BaseModel, extra=Extra.ignore, validate_assignment=True, allow_population_by_field_name=True): """ A data transfer object representing the general settings used for Xpansion. @@ -78,54 +79,152 @@ class XpansionSettingsDTO(BaseModel): yearly_weights: Path of the Monte-Carlo weights file for the solution. additional_constraints: Path of the additional constraints file for the solution. relaxed_optimality_gap: Threshold to switch from relaxed to integer master. - cut_type: The type of cut used in the Benders decomposition. - ampl_solver: The solver used by AMPL. - ampl_presolve: The pre-solve setting used by AMPL. - ampl_solve_bounds_frequency: The frequency with which to solve bounds using AMPL. relative_gap: Tolerance on relative gap for the solution. batch_size: Amount of batches in the Benders decomposition. separation_parameter: The separation parameter used in the Benders decomposition. solver: The solver used to solve the master and the sub-problems in the Benders decomposition. timelimit: The timelimit (in seconds) of the Benders step. - log_level: The severity of the solver's log. - sensitivity_config: The sensitivity configuration for Xpansion. + log_level: The severity of the solver's logs in range [0, 3]. + sensitivity_config: The sensitivity analysis configuration for Xpansion, if any. Raises: ValueError: If the `relaxed_optimality_gap` attribute is not a float or a string ending with "%" and a valid float. + ValueError: If the `max_iteration` attribute is not a valid integer. + """ + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#master + master: Master = Field(default=Master.INTEGER, description="Master problem resolution mode") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#uc_type + uc_type: UcType = Field(default=UcType.EXPANSION_FAST, description="Unit commitment type") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#optimality_gap + optimality_gap: float = Field(default=1, ge=0, description="Absolute optimality gap (€)") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#relative_gap + relative_gap: float = Field(default=1e-6, ge=0, description="Relative optimality gap") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#relaxed_optimality_gap + relaxed_optimality_gap: float = Field(default=1e-5, ge=0, description="Relative optimality gap for relaxation") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#max_iteration + max_iteration: int = Field(default=1000, gt=0, description="Maximum number of iterations") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#solver + solver: Solver = Field(default=Solver.XPRESS, description="Solver") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#log_level + log_level: int = Field(default=0, ge=0, le=3, description="Log level in range [0, 3]") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#separation_parameter + separation_parameter: float = Field(default=0.5, gt=0, le=1, description="Separation parameter in range ]0, 1]") + + # https://antares-xpansion.readthedocs.io/en/stable/user-guide/get-started/settings-definition/#batch_size + batch_size: int = Field(default=96, ge=0, description="Number of batches") + + yearly_weights: str = Field( + "", + alias="yearly-weights", + description="Yearly weights file", + ) + additional_constraints: str = Field( + "", + alias="additional-constraints", + description="Additional constraints file", + ) + + # (deprecated field) + timelimit: int = int(1e12) + + # The sensitivity analysis is optional + sensitivity_config: t.Optional[XpansionSensitivitySettings] = None + + @root_validator(pre=True) + def normalize_values(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMapping[str, t.Any]: + if "relaxed-optimality-gap" in values: + values["relaxed_optimality_gap"] = values.pop("relaxed-optimality-gap") + + relaxed_optimality_gap = values.get("relaxed_optimality_gap") + if relaxed_optimality_gap and isinstance(relaxed_optimality_gap, str): + relaxed_optimality_gap = relaxed_optimality_gap.strip() + if relaxed_optimality_gap.endswith("%"): + # Don't divide by 100, because the value is already a percentage. + values["relaxed_optimality_gap"] = float(relaxed_optimality_gap[:-1]) + else: + values["relaxed_optimality_gap"] = float(relaxed_optimality_gap) + + separation_parameter = values.get("separation_parameter") + if separation_parameter and isinstance(separation_parameter, str): + separation_parameter = separation_parameter.strip() + if separation_parameter.endswith("%"): + values["separation_parameter"] = float(separation_parameter[:-1]) / 100 + else: + values["separation_parameter"] = float(separation_parameter) + + if "max_iteration" in values: + max_iteration = float(values["max_iteration"]) + if max_iteration == float("inf"): + values["max_iteration"] = 1000 + + return values + + +class GetXpansionSettings(XpansionSettings): + """ + DTO object used to get the Xpansion settings. """ - optimality_gap: t.Optional[float] = Field(default=1, ge=0) - - max_iteration: t.Optional[t.Union[int, MaxIteration]] = Field(default=MaxIteration.INF, ge=0) - - uc_type: UcType = UcType.EXPANSION_FAST - master: Master = Master.INTEGER - yearly_weights: t.Optional[str] = Field(None, alias="yearly-weights") - additional_constraints: t.Optional[str] = Field(None, alias="additional-constraints") - relaxed_optimality_gap: t.Optional[t.Union[float, str]] = Field(None, alias="relaxed-optimality-gap") - cut_type: t.Optional[CutType] = Field(None, alias="cut-type") - ampl_solver: t.Optional[str] = Field(None, alias="ampl.solver") - ampl_presolve: t.Optional[int] = Field(None, alias="ampl.presolve") - ampl_solve_bounds_frequency: t.Optional[int] = Field(None, alias="ampl.solve_bounds_frequency") - relative_gap: t.Optional[float] = Field(default=None, ge=0) - batch_size: t.Optional[int] = Field(default=0, ge=0) - separation_parameter: t.Optional[float] = Field(default=0.5, ge=0, le=1) - solver: t.Optional[Solver] = None - timelimit: t.Optional[int] = 1000000000000 # 1e12 - log_level: t.Optional[int] = 0 - sensitivity_config: t.Optional[XpansionSensitivitySettingsDTO] = None - - @validator("relaxed_optimality_gap") - def relaxed_optimality_gap_validation(cls, v: t.Optional[t.Union[float, str]]) -> t.Optional[t.Union[float, str]]: - if isinstance(v, float): - return v - if isinstance(v, str): - stripped_v = v.strip() - if stripped_v.endswith("%") and float(stripped_v[:-1]): - return v - raise ValueError("season_correlation is not allowed for 'thermal' type") - return v + @classmethod + def from_config(cls, config_obj: JSON) -> "GetXpansionSettings": + """ + Create a GetXpansionSettings object from a JSON object. + + First, make an attempt to validate the JSON object. + If it fails, try to read the settings without validation, + so that the user can fix the issue in the form. + + Args: + config_obj: The JSON object to read. + + Returns: + The object which may contains extra attributes or invalid values. + """ + try: + return cls(**config_obj) + except ValidationError: + return cls.construct(**config_obj) + + +class UpdateXpansionSettings(XpansionSettings, metaclass=AllOptionalMetaclass): + """ + DTO object used to update the Xpansion settings. + + Fields with a value of `None` are ignored, this allows a partial update of the settings. + For that reason the fields "yearly-weights" and "additional-constraints" must + be set to "" instead of `None` if you want to remove the file. + """ + + # note: for some reason, the alias is not taken into account when using the metaclass, + # so we have to redefine the fields with the alias. + + # On the other hand, we make these fields mandatory, because there is an anomaly on the front side: + # When the user does not select any file, the front sends a request without the "yearly-weights" + # or "additional-constraints" field, instead of sending the field with an empty value. + # This is not a problem as long as the front sends a request with all the fields (PUT case), + # but it is a problem for partial requests (PATCH case). + + yearly_weights: str = Field( + "", + alias="yearly-weights", + description="Yearly weights file", + ) + + additional_constraints: str = Field( + "", + alias="additional-constraints", + description="Additional constraints file", + ) class XpansionCandidateDTO(BaseModel): @@ -235,34 +334,18 @@ def create_xpansion_configuration(self, study: Study, zipped_config: t.Optional[ ) raise BadZipBinary("Only zip file are allowed.") - study_version = file_study.config.version - - xpansion_settings = { - "optimality_gap": 1, - "max_iteration": "+Inf", - "uc_type": "expansion_fast", - "master": "integer", - "yearly-weights": None, - "additional-constraints": None, - } - - if study_version < 800: - xpansion_settings["relaxed-optimality-gap"] = 1e6 - xpansion_settings["cut-type"] = "yearly" - xpansion_settings["ampl.solver"] = "cbc" - xpansion_settings["ampl.presolve"] = 0 - xpansion_settings["ampl.solve_bounds_frequency"] = 1000000 + xpansion_settings = XpansionSettings() + settings_obj = xpansion_settings.dict(by_alias=True, exclude_none=True, exclude={"sensitivity_config"}) + if xpansion_settings.sensitivity_config: + sensitivity_obj = xpansion_settings.sensitivity_config.dict(by_alias=True, exclude_none=True) else: - xpansion_settings["relative_gap"] = 1e-12 - xpansion_settings["solver"] = Solver.CBC.value - xpansion_settings["batch_size"] = 0 - xpansion_settings["separation_parameter"] = 0.5 + sensitivity_obj = {} xpansion_configuration_data = { "user": { "expansion": { - "settings": xpansion_settings, - "sensitivity": {"sensitivity_in": {}}, + "settings": settings_obj, + "sensitivity": {"sensitivity_in": sensitivity_obj}, "candidates": {}, "capa": {}, "weights": {}, @@ -278,61 +361,50 @@ def delete_xpansion_configuration(self, study: Study) -> None: file_study = self.study_storage_service.get_storage(study).get_raw(study) file_study.tree.delete(["user", "expansion"]) - def get_xpansion_settings(self, study: Study) -> XpansionSettingsDTO: + def get_xpansion_settings(self, study: Study) -> GetXpansionSettings: logger.info(f"Getting xpansion settings for study '{study.id}'") file_study = self.study_storage_service.get_storage(study).get_raw(study) - settings_obj = file_study.tree.get(["user", "expansion", "settings"]) + config_obj = file_study.tree.get(["user", "expansion", "settings"]) with contextlib.suppress(KeyError): - settings_obj["sensitivity_config"] = file_study.tree.get( + config_obj["sensitivity_config"] = file_study.tree.get( ["user", "expansion", "sensitivity", "sensitivity_in"] ) - return XpansionSettingsDTO(**settings_obj) - - @staticmethod - def _assert_xpansion_settings_additional_constraints_is_valid( - file_study: FileStudy, - additional_constraints: str, - ) -> None: - if additional_constraints: - try: - file_study.tree.get( - [ - "user", - "expansion", - "constraints", - additional_constraints, - ] - ) - except ChildNotFoundError: - raise XpansionFileNotFoundError( - f"The 'additional-constraints' file '{additional_constraints}' does not exist" - ) + return GetXpansionSettings.from_config(config_obj) def update_xpansion_settings( - self, study: Study, new_xpansion_settings_dto: XpansionSettingsDTO - ) -> XpansionSettingsDTO: + self, study: Study, new_xpansion_settings: UpdateXpansionSettings + ) -> GetXpansionSettings: logger.info(f"Updating xpansion settings for study '{study.id}'") + + actual_settings = self.get_xpansion_settings(study) + settings_fields = new_xpansion_settings.dict(by_alias=False, exclude_none=True, exclude={"sensitivity_config"}) + updated_settings = actual_settings.copy(deep=True, update=settings_fields) + file_study = self.study_storage_service.get_storage(study).get_raw(study) - if new_xpansion_settings_dto.additional_constraints: - self._assert_xpansion_settings_additional_constraints_is_valid( - file_study, new_xpansion_settings_dto.additional_constraints - ) - file_study.tree.save( - new_xpansion_settings_dto.dict(by_alias=True, exclude={"sensitivity_config"}), - ["user", "expansion", "settings"], - ) - if new_xpansion_settings_dto.sensitivity_config: - file_study.tree.save( - new_xpansion_settings_dto.sensitivity_config.dict(), - [ - "user", - "expansion", - "sensitivity", - "sensitivity_in", - ], - ) - return new_xpansion_settings_dto + # Specific handling of the additional constraints file: + # - If the file name is `None`, it means that the user does not want to select an additional constraints file. + # - If the file name is empty, it means that the user wants to deselect the additional constraints file, + # but he does not want to delete it from the expansion configuration folder. + # - If the file name is not empty, it means that the user wants to select an additional constraints file. + # It is therefore necessary to check that the file exists. + constraints_file = new_xpansion_settings.additional_constraints + if constraints_file: + try: + constraints_url = ["user", "expansion", "constraints", constraints_file] + file_study.tree.get(constraints_url) + except ChildNotFoundError: + msg = f"Additional constraints file '{constraints_file}' does not exist" + raise XpansionFileNotFoundError(msg) from None + + config_obj = updated_settings.dict(by_alias=True, exclude={"sensitivity_config"}) + file_study.tree.save(config_obj, ["user", "expansion", "settings"]) + + if new_xpansion_settings.sensitivity_config: + sensitivity_obj = new_xpansion_settings.sensitivity_config.dict(by_alias=True) + file_study.tree.save(sensitivity_obj, ["user", "expansion", "sensitivity", "sensitivity_in"]) + + return self.get_xpansion_settings(study) @staticmethod def _assert_link_profile_are_files( @@ -431,7 +503,8 @@ def _assert_investment_candidate_is_valid( or (bool_max_investment and not bool_max_units and not bool_unit_size) ): raise BadCandidateFormatError( - "The candidate is not well formatted.\nIt should either contain max-investment or (max-units and unit-size)." + "The candidate is not well formatted." + "\nIt should either contain max-investment or (max-units and unit-size)." ) def _assert_candidate_is_correct( @@ -454,24 +527,25 @@ def _assert_candidate_is_correct( ) assert xpansion_candidate_dto.annual_cost_per_mw - def add_candidate(self, study: Study, xpansion_candidate_dto: XpansionCandidateDTO) -> None: + def add_candidate(self, study: Study, xpansion_candidate: XpansionCandidateDTO) -> XpansionCandidateDTO: file_study = self.study_storage_service.get_storage(study).get_raw(study) - candidates = file_study.tree.get(["user", "expansion", "candidates"]) + candidates_obj = file_study.tree.get(["user", "expansion", "candidates"]) - self._assert_candidate_is_correct(candidates, file_study, xpansion_candidate_dto) + self._assert_candidate_is_correct(candidates_obj, file_study, xpansion_candidate) # Find next candidate id - max_id = 2 if not candidates else int(sorted(candidates.keys()).pop()) + 2 + max_id = 2 if not candidates_obj else int(sorted(candidates_obj.keys()).pop()) + 2 next_id = next( - str(i) for i in range(1, max_id) if str(i) not in candidates + str(i) for i in range(1, max_id) if str(i) not in candidates_obj ) # The primary key is actually the name, the id does not matter and is never checked. - logger.info(f"Adding candidate '{xpansion_candidate_dto.name}' to study '{study.id}'") - candidates[next_id] = xpansion_candidate_dto.dict(by_alias=True, exclude_none=True) - candidates_data = {"user": {"expansion": {"candidates": candidates}}} + logger.info(f"Adding candidate '{xpansion_candidate.name}' to study '{study.id}'") + candidates_obj[next_id] = xpansion_candidate.dict(by_alias=True, exclude_none=True) + candidates_data = {"user": {"expansion": {"candidates": candidates_obj}}} file_study.tree.save(candidates_data) # Should we add a field in the study config containing the xpansion candidates like the links or the areas ? + return self.get_candidate(study, xpansion_candidate.name) def get_candidate(self, study: Study, candidate_name: str) -> XpansionCandidateDTO: logger.info(f"Getting candidate '{candidate_name}' of study '{study.id}'") @@ -524,11 +598,12 @@ def delete_candidate(self, study: Study, candidate_name: str) -> None: logger.info(f"Deleting candidate '{candidate_name}' from study '{study.id}'") file_study.tree.delete(["user", "expansion", "candidates", candidate_id]) - def update_xpansion_constraints_settings(self, study: Study, constraints_file_name: t.Optional[str]) -> None: - self.update_xpansion_settings( - study, - XpansionSettingsDTO.parse_obj({"additional-constraints": constraints_file_name}), - ) + def update_xpansion_constraints_settings(self, study: Study, constraints_file_name: str) -> GetXpansionSettings: + # Make sure filename is not `None`, because `None` values are ignored by the update. + constraints_file_name = constraints_file_name or "" + # noinspection PyArgumentList + xpansion_settings = UpdateXpansionSettings(additional_constraints=constraints_file_name) + return self.update_xpansion_settings(study, xpansion_settings) def _raw_file_dir(self, raw_file_type: XpansionResourceFileType) -> t.List[str]: if raw_file_type == XpansionResourceFileType.CONSTRAINTS: @@ -566,8 +641,8 @@ def _add_raw_files( for file in files: content = file.file.read() - if type(content) != bytes: - content = content.encode() + if isinstance(content, str): + content = content.encode(encoding="utf-8") buffer[file.filename] = content file_study.tree.save(data) @@ -633,7 +708,7 @@ def list_root_files(self, study: Study) -> t.List[str]: root_files = [ key for key, node in t.cast(FolderNode, file_study.tree.get_node(["user", "expansion"])).build().items() - if key not in registered_filenames and type(node) != BucketNode + if key not in registered_filenames and not isinstance(node, BucketNode) ] return root_files diff --git a/antarest/study/model.py b/antarest/study/model.py index a1e5093c4e..de71a1add3 100644 --- a/antarest/study/model.py +++ b/antarest/study/model.py @@ -1,9 +1,9 @@ +import dataclasses import enum +import typing as t import uuid -from dataclasses import dataclass from datetime import datetime, timedelta from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple from pydantic import BaseModel from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer, String, Table # type: ignore @@ -14,6 +14,10 @@ from antarest.core.persistence import Base from antarest.login.model import Group, GroupDTO, Identity +if t.TYPE_CHECKING: + # avoid circular import + from antarest.core.tasks.model import TaskJob + DEFAULT_WORKSPACE_NAME = "default" groups_metadata = Table( @@ -23,7 +27,7 @@ Column("study_id", String(36), ForeignKey("study.id")), ) -STUDY_REFERENCE_TEMPLATES: Dict[str, str] = { +STUDY_REFERENCE_TEMPLATES: t.Dict[str, str] = { "600": "empty_study_613.zip", "610": "empty_study_613.zip", "640": "empty_study_613.zip", @@ -52,7 +56,6 @@ class CommentsDto(BaseModel): comments: str -@dataclass class StudyAdditionalData(Base): # type:ignore """ Study additional data @@ -69,7 +72,7 @@ class StudyAdditionalData(Base): # type:ignore horizon = Column(String) patch = Column(String(), nullable=True) - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: t.Any) -> bool: if not super().__eq__(other): return False if not isinstance(other, StudyAdditionalData): @@ -77,7 +80,6 @@ def __eq__(self, other: Any) -> bool: return bool(other.author == self.author and other.horizon == self.horizon and other.patch == self.patch) -@dataclass class Study(Base): # type: ignore """ Standard Study entity @@ -111,6 +113,11 @@ class Study(Base): # type: ignore uselist=False, cascade="all, delete, delete-orphan", ) + + # Define a one-to-many relationship between `Study` and `TaskJob`. + # If the Study is deleted, all attached TaskJob must be deleted in cascade. + jobs: t.List["TaskJob"] = relationship("TaskJob", back_populates="study", cascade="all, delete, delete-orphan") + __mapper_args__ = {"polymorphic_identity": "study", "polymorphic_on": type} def __str__(self) -> str: @@ -126,7 +133,7 @@ def __str__(self) -> str: f" groups={[str(u) + ',' for u in self.groups]}" ) - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: t.Any) -> bool: if not isinstance(other, Study): return False return bool( @@ -143,11 +150,10 @@ def __eq__(self, other: Any) -> bool: and other.archived == self.archived ) - def to_json_summary(self) -> Any: + def to_json_summary(self) -> t.Any: return {"id": self.id, "name": self.name} -@dataclass class RawStudy(Study): """ Study filesystem based entity implementation. @@ -168,7 +174,7 @@ class RawStudy(Study): "polymorphic_identity": "rawstudy", } - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: t.Any) -> bool: if not super().__eq__(other): return False if not isinstance(other, RawStudy): @@ -181,7 +187,7 @@ def __eq__(self, other: Any) -> bool: ) -@dataclass +@dataclasses.dataclass class StudyFolder: """ DTO used by watcher to keep synchronized studies and workspace organization and database @@ -189,25 +195,25 @@ class StudyFolder: path: Path workspace: str - groups: List[Group] + groups: t.List[Group] class PatchStudy(BaseModel): - scenario: Optional[str] = None - doc: Optional[str] = None - status: Optional[str] = None - comments: Optional[str] = None - tags: List[str] = [] + scenario: t.Optional[str] = None + doc: t.Optional[str] = None + status: t.Optional[str] = None + comments: t.Optional[str] = None + tags: t.List[str] = [] class PatchArea(BaseModel): - country: Optional[str] = None - tags: List[str] = [] + country: t.Optional[str] = None + tags: t.List[str] = [] class PatchCluster(BaseModel): - type: Optional[str] = None - code_oi: Optional[str] = None + type: t.Optional[str] = None + code_oi: t.Optional[str] = None class Config: @classmethod @@ -216,18 +222,18 @@ def alias_generator(cls, string: str) -> str: class PatchOutputs(BaseModel): - reference: Optional[str] = None + reference: t.Optional[str] = None class Patch(BaseModel): - study: Optional[PatchStudy] = None - areas: Optional[Dict[str, PatchArea]] = None - thermal_clusters: Optional[Dict[str, PatchCluster]] = None - outputs: Optional[PatchOutputs] = None + study: t.Optional[PatchStudy] = None + areas: t.Optional[t.Dict[str, PatchArea]] = None + thermal_clusters: t.Optional[t.Dict[str, PatchCluster]] = None + outputs: t.Optional[PatchOutputs] = None class OwnerInfo(BaseModel): - id: Optional[int] = None + id: t.Optional[int] = None name: str @@ -239,38 +245,38 @@ class StudyMetadataDTO(BaseModel): updated: str type: str owner: OwnerInfo - groups: List[GroupDTO] + groups: t.List[GroupDTO] public_mode: PublicMode workspace: str managed: bool archived: bool - horizon: Optional[str] - scenario: Optional[str] - status: Optional[str] - doc: Optional[str] - folder: Optional[str] = None - tags: List[str] = [] + horizon: t.Optional[str] + scenario: t.Optional[str] + status: t.Optional[str] + doc: t.Optional[str] + folder: t.Optional[str] = None + tags: t.List[str] = [] class StudyMetadataPatchDTO(BaseModel): - name: Optional[str] = None - author: Optional[str] = None - horizon: Optional[str] = None - scenario: Optional[str] = None - status: Optional[str] = None - doc: Optional[str] = None - tags: List[str] = [] + name: t.Optional[str] = None + author: t.Optional[str] = None + horizon: t.Optional[str] = None + scenario: t.Optional[str] = None + status: t.Optional[str] = None + doc: t.Optional[str] = None + tags: t.List[str] = [] class StudySimSettingsDTO(BaseModel): - general: Dict[str, Any] - input: Dict[str, Any] - output: Dict[str, Any] - optimization: Dict[str, Any] - otherPreferences: Dict[str, Any] - advancedParameters: Dict[str, Any] - seedsMersenneTwister: Dict[str, Any] - playlist: Optional[List[int]] = None + general: t.Dict[str, t.Any] + input: t.Dict[str, t.Any] + output: t.Dict[str, t.Any] + optimization: t.Dict[str, t.Any] + otherPreferences: t.Dict[str, t.Any] + advancedParameters: t.Dict[str, t.Any] + seedsMersenneTwister: t.Dict[str, t.Any] + playlist: t.Optional[t.List[int]] = None class StudySimResultDTO(BaseModel): @@ -335,12 +341,12 @@ class StudyDownloadDTO(BaseModel): """ type: StudyDownloadType - years: Optional[List[int]] + years: t.Optional[t.List[int]] level: StudyDownloadLevelDTO - filterIn: Optional[str] - filterOut: Optional[str] - filter: Optional[List[str]] - columns: Optional[List[str]] + filterIn: t.Optional[str] + filterOut: t.Optional[str] + filter: t.Optional[t.List[str]] + columns: t.Optional[t.List[str]] synthesis: bool = False includeClusters: bool = False @@ -355,25 +361,25 @@ class MatrixIndex(BaseModel): class TimeSerie(BaseModel): name: str unit: str - data: List[Optional[float]] = [] + data: t.List[t.Optional[float]] = [] class TimeSeriesData(BaseModel): type: StudyDownloadType name: str - data: Dict[str, List[TimeSerie]] = {} + data: t.Dict[str, t.List[TimeSerie]] = {} class MatrixAggregationResultDTO(BaseModel): index: MatrixIndex - data: List[TimeSeriesData] - warnings: List[str] + data: t.List[TimeSeriesData] + warnings: t.List[str] class MatrixAggregationResult(BaseModel): index: MatrixIndex - data: Dict[Tuple[StudyDownloadType, str], Dict[str, List[TimeSerie]]] - warnings: List[str] + data: t.Dict[t.Tuple[StudyDownloadType, str], t.Dict[str, t.List[TimeSerie]]] + warnings: t.List[str] def to_dto(self) -> MatrixAggregationResultDTO: return MatrixAggregationResultDTO.construct( diff --git a/antarest/study/repository.py b/antarest/study/repository.py index 1a830c7428..ac7f730fca 100644 --- a/antarest/study/repository.py +++ b/antarest/study/repository.py @@ -2,12 +2,13 @@ import logging import typing as t +from sqlalchemy import and_, or_ # type: ignore from sqlalchemy.orm import Session, joinedload, with_polymorphic # type: ignore from antarest.core.interfaces.cache import CacheConstants, ICache from antarest.core.utils.fastapi_sqlalchemy import db from antarest.study.common.utils import get_study_information -from antarest.study.model import RawStudy, Study, StudyAdditionalData +from antarest.study.model import DEFAULT_WORKSPACE_NAME, RawStudy, Study, StudyAdditionalData logger = logging.getLogger(__name__) @@ -81,7 +82,7 @@ def get(self, id: str) -> t.Optional[Study]: ) return study - def one(self, id: str) -> Study: + def one(self, study_id: str) -> Study: """Get the study by ID or raise `sqlalchemy.exc.NoResultFound` if not found in database.""" # When we fetch a study, we also need to fetch the associated owner and groups # to check the permissions of the current user efficiently. @@ -89,30 +90,42 @@ def one(self, id: str) -> Study: self.session.query(Study) .options(joinedload(Study.owner)) .options(joinedload(Study.groups)) - .filter_by(id=id) + .filter_by(id=study_id) .one() ) return study - def get_list(self, study_id: t.List[str]) -> t.List[Study]: - # When we fetch a study, we also need to fetch the associated owner and groups - # to check the permissions of the current user efficiently. - studies: t.List[Study] = ( - self.session.query(Study) - .options(joinedload(Study.owner)) - .options(joinedload(Study.groups)) - .where(Study.id.in_(study_id)) - .all() - ) - return studies - def get_additional_data(self, study_id: str) -> t.Optional[StudyAdditionalData]: study: StudyAdditionalData = self.session.query(StudyAdditionalData).get(study_id) return study - def get_all(self) -> t.List[Study]: + def get_all( + self, + managed: t.Optional[bool] = None, + studies_ids: t.Optional[t.List[str]] = None, + exists: bool = True, + ) -> t.List[Study]: + # When we fetch a study, we also need to fetch the associated owner and groups + # to check the permissions of the current user efficiently. + # We also need to fetch the additional data to display the study information + # efficiently (see: `utils.get_study_information`) entity = with_polymorphic(Study, "*") - studies: t.List[Study] = self.session.query(entity).filter(RawStudy.missing.is_(None)).all() + + q = self.session.query(entity) + if exists: + q = q.filter(RawStudy.missing.is_(None)) + q = q.options(joinedload(entity.owner)) + q = q.options(joinedload(entity.groups)) + q = q.options(joinedload(entity.additional_data)) + if managed is not None: + if managed: + q = q.filter(or_(entity.type == "variantstudy", RawStudy.workspace == DEFAULT_WORKSPACE_NAME)) + else: + q = q.filter(entity.type == "rawstudy") + q = q.filter(RawStudy.workspace != DEFAULT_WORKSPACE_NAME) + if studies_ids is not None: + q = q.filter(entity.id.in_(studies_ids)) + studies: t.List[Study] = q.all() return studies def get_all_raw(self, show_missing: bool = True) -> t.List[RawStudy]: diff --git a/antarest/study/service.py b/antarest/study/service.py index a8da7fc60c..7a9ee71507 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -65,7 +65,12 @@ from antarest.study.business.thematic_trimming_management import ThematicTrimmingManager from antarest.study.business.timeseries_config_management import TimeSeriesConfigManager from antarest.study.business.utils import execute_or_add_commands -from antarest.study.business.xpansion_management import XpansionCandidateDTO, XpansionManager, XpansionSettingsDTO +from antarest.study.business.xpansion_management import ( + GetXpansionSettings, + UpdateXpansionSettings, + XpansionCandidateDTO, + XpansionManager, +) from antarest.study.model import ( DEFAULT_WORKSPACE_NAME, NEW_DEFAULT_STUDY_VERSION, @@ -104,14 +109,7 @@ should_study_be_denormalized, upgrade_study, ) -from antarest.study.storage.utils import ( - assert_permission, - get_default_workspace_path, - get_start_date, - is_managed, - remove_from_cache, - study_matcher, -) +from antarest.study.storage.utils import assert_permission, get_start_date, is_managed, remove_from_cache, study_matcher from antarest.study.storage.variantstudy.model.command.icommand import ICommand from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix from antarest.study.storage.variantstudy.model.command.update_comments import UpdateComments @@ -128,6 +126,20 @@ MAX_MISSING_STUDY_TIMEOUT = 2 # days +def get_disk_usage(path: Union[str, Path]) -> int: + path = Path(path) + if path.suffix.lower() in {".zip", "7z"}: + return os.path.getsize(path) + total_size = 0 + with os.scandir(path) as it: + for entry in it: + if entry.is_file(): + total_size += entry.stat().st_size + elif entry.is_dir(): + total_size += get_disk_usage(path=str(entry.path)) + return total_size + + class StudyUpgraderTask: """ Task to perform a study upgrade. @@ -452,14 +464,17 @@ def get_studies_information( for k in cached_studies: studies[k] = StudyMetadataDTO.parse_obj(cached_studies[k]) else: - logger.info("Retrieving all studies") - all_studies = self.repository.get_all() + if managed: + logger.info("Retrieving all managed studies") + all_studies = self.repository.get_all(managed=True) + else: + logger.info("Retrieving all studies") + all_studies = self.repository.get_all() logger.info("Studies retrieved") for study in all_studies: - if not managed or is_managed(study): - study_metadata = self._try_get_studies_information(study) - if study_metadata is not None: - studies[study_metadata.id] = study_metadata + study_metadata = self._try_get_studies_information(study) + if study_metadata is not None: + studies[study_metadata.id] = study_metadata self.cache_service.put(cache_key, studies) return { s.id: s @@ -616,7 +631,7 @@ def create_study( str: The ID of the newly created study. """ sid = str(uuid4()) - study_path = str(get_default_workspace_path(self.config) / sid) + study_path = self.config.get_workspace_path() / sid author = self.get_user_name(params) @@ -1297,7 +1312,7 @@ def import_study( BadArchiveContent: If the archive is corrupted or in an unknown format. """ sid = str(uuid4()) - path = str(get_default_workspace_path(self.config) / sid) + path = str(self.config.get_workspace_path() / sid) study = RawStudy( id=sid, workspace=DEFAULT_WORKSPACE_NAME, @@ -2035,7 +2050,7 @@ def delete_xpansion_configuration(self, uuid: str, params: RequestParameters) -> self._assert_study_unarchived(study) self.xpansion_manager.delete_xpansion_configuration(study) - def get_xpansion_settings(self, uuid: str, params: RequestParameters) -> XpansionSettingsDTO: + def get_xpansion_settings(self, uuid: str, params: RequestParameters) -> GetXpansionSettings: study = self.get_study(uuid) assert_permission(params.user, study, StudyPermissionType.READ) return self.xpansion_manager.get_xpansion_settings(study) @@ -2043,9 +2058,9 @@ def get_xpansion_settings(self, uuid: str, params: RequestParameters) -> Xpansio def update_xpansion_settings( self, uuid: str, - xpansion_settings_dto: XpansionSettingsDTO, + xpansion_settings_dto: UpdateXpansionSettings, params: RequestParameters, - ) -> XpansionSettingsDTO: + ) -> GetXpansionSettings: study = self.get_study(uuid) assert_permission(params.user, study, StudyPermissionType.READ) self._assert_study_unarchived(study) @@ -2056,7 +2071,7 @@ def add_candidate( uuid: str, xpansion_candidate_dto: XpansionCandidateDTO, params: RequestParameters, - ) -> None: + ) -> XpansionCandidateDTO: study = self.get_study(uuid) assert_permission(params.user, study, StudyPermissionType.WRITE) self._assert_study_unarchived(study) @@ -2093,9 +2108,9 @@ def delete_xpansion_candidate(self, uuid: str, candidate_name: str, params: Requ def update_xpansion_constraints_settings( self, uuid: str, - constraints_file_name: Optional[str], + constraints_file_name: str, params: RequestParameters, - ) -> None: + ) -> GetXpansionSettings: study = self.get_study(uuid) assert_permission(params.user, study, StudyPermissionType.WRITE) self._assert_study_unarchived(study) @@ -2135,11 +2150,10 @@ def check_and_update_all_study_versions_in_database(self, params: RequestParamet if params.user and not params.user.is_site_admin(): logger.error(f"User {params.user.id} is not site admin") raise UserHasNotPermissionError() - studies = self.repository.get_all() + studies = self.repository.get_all(managed=False) for study in studies: - if isinstance(study, RawStudy) and not is_managed(study): - storage = self.storage_service.raw_study_service - storage.check_and_update_study_version_in_database(study) + storage = self.storage_service.raw_study_service + storage.check_and_update_study_version_in_database(study) def archive_outputs(self, study_id: str, params: RequestParameters) -> None: logger.info(f"Archiving all outputs for study {study_id}") @@ -2351,3 +2365,21 @@ def upgrade_study( custom_event_messages=None, request_params=params, ) + + def get_disk_usage(self, uuid: str, params: RequestParameters) -> int: + """ + This function computes the disk size used to store the study with + id=`uuid` if such study exists and user has permissions + otherwise it raises an error + + Args: + uuid: the study id + params: user request parameters + + return: + disk usage of the study with id = `uuid` + """ + study = self.get_study(uuid=uuid) + assert_permission(params.user, study, StudyPermissionType.READ) + path = str(self.storage_service.get_storage(study).get_study_path(study)) + return get_disk_usage(path=path) diff --git a/antarest/study/storage/auto_archive_service.py b/antarest/study/storage/auto_archive_service.py index bd7fe31f7d..8a15cb0f49 100644 --- a/antarest/study/storage/auto_archive_service.py +++ b/antarest/study/storage/auto_archive_service.py @@ -26,17 +26,14 @@ def __init__(self, study_service: StudyService, config: Config): self.max_parallel = self.config.storage.auto_archive_max_parallel def _try_archive_studies(self) -> None: - now = datetime.datetime.utcnow() - study_ids_to_archive: List[Tuple[str, bool]] = [] + old_date = datetime.datetime.utcnow() - datetime.timedelta(days=self.config.storage.auto_archive_threshold_days) with db(): - studies: List[Study] = self.study_service.repository.get_all() + studies: List[Study] = self.study_service.repository.get_all(managed=True, exists=False) # list of study id and boolean indicating if it's a raw study (True) or a variant (False) study_ids_to_archive = [ (study.id, isinstance(study, RawStudy)) for study in studies - if is_managed(study) - and (study.last_access or study.updated_at) - < now - datetime.timedelta(days=self.config.storage.auto_archive_threshold_days) + if (study.last_access or study.updated_at) < old_date and (isinstance(study, VariantStudy) or not study.archived) ] for study_id, is_raw_study in study_ids_to_archive[0 : self.max_parallel]: diff --git a/antarest/study/storage/rawstudy/ini_reader.py b/antarest/study/storage/rawstudy/ini_reader.py new file mode 100644 index 0000000000..f145b948a9 --- /dev/null +++ b/antarest/study/storage/rawstudy/ini_reader.py @@ -0,0 +1,200 @@ +import typing as t +from abc import ABC, abstractmethod +from pathlib import Path + +from antarest.core.model import JSON, SUB_JSON + + +def convert_value(value: str) -> t.Union[str, int, float, bool]: + """Convert value to the appropriate type for JSON.""" + + try: + # Infinity values are not supported by JSON, so we use a string instead. + mapping = {"true": True, "false": False, "+inf": "+Inf", "-inf": "-Inf", "inf": "+Inf"} + return t.cast(t.Union[str, int, float, bool], mapping[value.lower()]) + except KeyError: + try: + return int(value) + except ValueError: + try: + return float(value) + except ValueError: + return value + + +def convert_obj(item: t.Any) -> SUB_JSON: + """Convert object to the appropriate type for JSON (scalar, dictionary or list).""" + + if isinstance(item, dict): + return {key: convert_obj(value) for key, value in item.items()} + elif isinstance(item, list): + return [convert_obj(value) for value in item] + else: + return convert_value(item) + + +class IReader(ABC): + """ + Init file Reader interface + """ + + @abstractmethod + def read(self, path: t.Any) -> JSON: + """ + Parse `.ini` file to json object. + + Args: + path: Path to `.ini` file or file-like object. + + Returns: + Dictionary of parsed `.ini` file which can be converted to JSON. + """ + raise NotImplementedError() + + +class IniReader(IReader): + """ + Custom `.ini` reader for `.ini` files which have duplicate keys in a section. + + This class is required, to parse `settings/generaldata.ini` files which + has duplicate keys like "playlist_year_weight", "playlist_year +", "playlist_year -", + "select_var -", "select_var +", in the `[playlist]` section. + + For instance:: + + [playlist] + playlist_reset = false + playlist_year + = 6 + playlist_year + = 8 + playlist_year + = 13 + + It is also required to parse `input/areas/sets.ini` files which have keys like "+" or "-". + + For instance:: + + [all areas] + caption = All areas + comments = Spatial aggregates on all areas + + = east + + = west + + This class is not compatible with standard `.ini` readers. + """ + + def __init__(self, special_keys: t.Sequence[str] = (), section_name: str = "settings") -> None: + super().__init__() + + # Default section name to use if `.ini` file has no section. + self._special_keys = set(special_keys) + + # List of keys which should be parsed as list. + self._section_name = section_name + + def __repr__(self) -> str: # pragma: no cover + """Return a string representation of the object.""" + cls = self.__class__.__name__ + # use getattr() to make sure that the attributes are defined + special_keys = tuple(getattr(self, "_special_keys", ())) + section_name = getattr(self, "_section_name", "settings") + return f"{cls}(special_keys={special_keys!r}, section_name={section_name!r})" + + def read(self, path: t.Any) -> JSON: + if isinstance(path, (Path, str)): + try: + with open(path, mode="r", encoding="utf-8") as f: + sections = self._parse_ini_file(f) + except UnicodeDecodeError: + # On windows, `.ini` files may use "cp1252" encoding + with open(path, mode="r", encoding="cp1252") as f: + sections = self._parse_ini_file(f) + except FileNotFoundError: + # If the file is missing, an empty dictionary is returned. + # This is required tp mimic the behavior of `configparser.ConfigParser`. + return {} + + elif hasattr(path, "read"): + with path: + sections = self._parse_ini_file(path) + + else: # pragma: no cover + raise TypeError(repr(type(path))) + + return t.cast(JSON, convert_obj(sections)) + + def _parse_ini_file(self, ini_file: t.TextIO) -> JSON: + """ + Parse `.ini` file to JSON object. + + The following parsing rules are applied: + + - If the file has no section, then the default section name is used. + This case is required to parse Xpansion `user/expansion/settings.ini` files + (using `SimpleKeyValueReader` subclass). + + - If the file has duplicate sections, then the values are merged. + This case is required when the end-user produced an ill-formed `.ini` file. + This ensures the parsing is robust even if some values may be lost. + + - If a section has duplicate keys, then the values are merged. + This case is required, for instance, to parse `settings/generaldata.ini` files which + has duplicate keys like "playlist_year_weight", "playlist_year +", "playlist_year -", + "select_var -", "select_var +", in the `[playlist]` section. + In this case, duplicate keys must be declared in the `special_keys` argument, + to parse them as list. + + - If a section has no key, then an empty dictionary is returned. + This case is required to parse `input/hydro/prepro/correlation.ini` files. + + - If a section name has square brackets, then they are preserved. + This case is required to parse `input/hydro/allocation/{area-id}.ini` files. + + Args: + ini_file: file or file-like object. + + Returns: + Dictionary of parsed `.ini` file which can be converted to JSON. + """ + # NOTE: This algorithm is 1.93x faster than configparser.ConfigParser + sections: t.Dict[str, t.Dict[str, t.Any]] = {} + section_name = self._section_name + + for line in ini_file: + line = line.strip() + if not line or line.startswith(";") or line.startswith("#"): + continue + elif line.startswith("["): + section_name = line[1:-1] + sections.setdefault(section_name, {}) + elif "=" in line: + key, value = map(str.strip, line.split("=", 1)) + section = sections.setdefault(section_name, {}) + if key in self._special_keys: + section.setdefault(key, []).append(value) + else: + section[key] = value + else: + raise ValueError(f"☠☠☠ Invalid line: {line!r}") + + return sections + + +class SimpleKeyValueReader(IniReader): + """ + Simple INI reader for "settings.ini" file which has no section. + """ + + def read(self, path: t.Any) -> JSON: + """ + Parse `.ini` file which has no section to JSON object. + + This class is required to parse Xpansion `user/expansion/settings.ini` files. + + Args: + path: Path to `.ini` file or file-like object. + + Returns: + Dictionary of parsed key/value pairs. + """ + sections = super().read(path) + obj = t.cast(t.Mapping[str, JSON], sections) + return obj[self._section_name] diff --git a/antarest/study/storage/rawstudy/io/writer/ini_writer.py b/antarest/study/storage/rawstudy/ini_writer.py similarity index 79% rename from antarest/study/storage/rawstudy/io/writer/ini_writer.py rename to antarest/study/storage/rawstudy/ini_writer.py index 3f7a581137..9b348f1b27 100644 --- a/antarest/study/storage/rawstudy/io/writer/ini_writer.py +++ b/antarest/study/storage/rawstudy/ini_writer.py @@ -1,14 +1,13 @@ import ast -from configparser import RawConfigParser +import configparser +import typing as t from pathlib import Path -from typing import Any, List, Optional from antarest.core.model import JSON -from antarest.study.storage.rawstudy.io.reader import IniReader -class IniConfigParser(RawConfigParser): - def __init__(self, special_keys: Optional[List[str]] = None) -> None: +class IniConfigParser(configparser.RawConfigParser): + def __init__(self, special_keys: t.Optional[t.List[str]] = None) -> None: super().__init__() self.special_keys = special_keys @@ -16,15 +15,6 @@ def __init__(self, special_keys: Optional[List[str]] = None) -> None: def optionxform(self, optionstr: str) -> str: return optionstr - @staticmethod - def format_value(value: Any) -> Any: - parsed_value = IniReader.parse_value(value) - if isinstance(parsed_value, bool): - return str(parsed_value).lower() - elif isinstance(parsed_value, float): - return "%.6f" % parsed_value - return value - def _write_line( # type:ignore self, delimiter, @@ -66,7 +56,7 @@ class IniWriter: Standard INI writer. """ - def __init__(self, special_keys: Optional[List[str]] = None): + def __init__(self, special_keys: t.Optional[t.List[str]] = None): self.special_keys = special_keys def write(self, data: JSON, path: Path) -> None: diff --git a/antarest/study/storage/rawstudy/io/reader/__init__.py b/antarest/study/storage/rawstudy/io/reader/__init__.py deleted file mode 100644 index b3b4346414..0000000000 --- a/antarest/study/storage/rawstudy/io/reader/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from antarest.study.storage.rawstudy.io.reader.ini_reader import IniReader, MultipleSameKeysIniReader - -__all__ = ["IniReader", "MultipleSameKeysIniReader"] diff --git a/antarest/study/storage/rawstudy/io/reader/ini_reader.py b/antarest/study/storage/rawstudy/io/reader/ini_reader.py deleted file mode 100644 index 0830746469..0000000000 --- a/antarest/study/storage/rawstudy/io/reader/ini_reader.py +++ /dev/null @@ -1,166 +0,0 @@ -import configparser -import contextlib -import re -from abc import ABC, abstractmethod -from pathlib import Path -from typing import Any, List, Optional, Union - -from antarest.core.model import JSON, SUB_JSON - - -class IReader(ABC): - """ - Init file Reader interface - """ - - @abstractmethod - def read(self, path: Any) -> JSON: - """ - Parse .ini file to json - Args: - path: .ini file - - Returns: json content - - """ - raise NotImplementedError() - - -class IniReader(IReader): - """ - Standard .ini file reader. Use for general purpose. - """ - - @staticmethod - def _parse_bool(value: str) -> Optional[bool]: - return {"true": True, "false": False}.get(value.lower()) - - @staticmethod - def _parse_int(value: str) -> Optional[int]: - try: - return int(value) - except ValueError: - return None - - @staticmethod - def _parse_float(value: str) -> Optional[float]: - try: - return float(value) - except ValueError: - return None - - @staticmethod - def parse_value(value: str) -> Union[bool, int, float, str]: - def strict_bool(v: str) -> bool: - return {"true": True, "false": False}[v.lower()] - - for parser in [strict_bool, int, float]: - with contextlib.suppress(KeyError, ValueError): - return parser(value) # type: ignore - return value - - @staticmethod - def _parse_json(json: configparser.SectionProxy) -> JSON: - return {key: IniReader.parse_value(value) for key, value in json.items()} - - def read(self, path: Any) -> JSON: - config = IniConfigParser() - if isinstance(path, Path): - config.read(path) - else: - config.read_file(path) - return {key: IniReader._parse_json(config[key]) for key in config if key != "DEFAULT"} - - -class SimpleKeyValueReader(IReader): - """ - Standard .ini file reader. Use for general purpose. - """ - - @staticmethod - def _parse_inf(value: str) -> Optional[str]: - try: - return "+Inf" if float(value) == float("inf") else None - except ValueError: - return None - - # noinspection PyProtectedMember - @staticmethod - def parse_value(value: str) -> SUB_JSON: - parsed: Union[str, int, float, bool, None] = SimpleKeyValueReader._parse_inf(value) - parsed = parsed if parsed is not None else IniReader._parse_bool(value) - parsed = parsed if parsed is not None else IniReader._parse_int(value) - parsed = parsed if parsed is not None else IniReader._parse_float(value) - return parsed if parsed is not None else value - - @staticmethod - def _parse_json(json: JSON) -> JSON: - return {key: SimpleKeyValueReader.parse_value(value) for key, value in json.items()} - - def read(self, path: Any) -> JSON: - json = {} - ini_file = path.open(mode="r", encoding="utf-8") if isinstance(path, Path) else path - with ini_file: - for line in ini_file: - line = line.strip() - if line and not line.startswith("#"): - key, value = line.split("=") - json[key.strip()] = value.strip() - - return self._parse_json(json) - - -# noinspection SpellCheckingInspection -class IniConfigParser(configparser.RawConfigParser): - # The regex to match a section is modified so that it can parse - # a section name in square brackets, such as "[[allocation]]" - _SECT_TMPL = r"\[ (?P
.+) \]" - SECTCRE = re.compile(_SECT_TMPL, re.VERBOSE) - - # noinspection SpellCheckingInspection - def optionxform(self, optionstr: str) -> str: - return optionstr - - -class MultipleSameKeysIniReader(IReader): - """ - Custom .ini reader for inputs/sets.ini file. - This file has format : - ```python - [chap] - + = areaA - + = areaB - ``` - - multikey is not compatible with standard .ini readers - """ - - def __init__(self, special_keys: Optional[List[str]] = None) -> None: - self.special_keys = special_keys or [] - super().__init__() - - def read(self, path: Any) -> JSON: - data: JSON = {} - section = "" - ini_file = path.open(mode="r", encoding="utf-8") if isinstance(path, Path) else path - with ini_file: - for line in ini_file: - line = line.strip() - if match := re.fullmatch(r"\[(.*)]", line): - section = match[1] - data[section] = {} - elif "=" in line: - key, arg = map(str.strip, line.split("=", 1)) - value = IniReader.parse_value(arg) - group = data[section] - if key in group: - if isinstance(group[key], list): - group[key].append(value) - else: - group[key] = [group[key], value] - elif key in self.special_keys: - group[key] = [value] - else: - group[key] = value - - return data diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py index 3648051773..a396ea950d 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py @@ -1,10 +1,30 @@ +import typing as t from enum import Enum -from typing import Set from pydantic import BaseModel class BindingConstraintFrequency(str, Enum): + """ + Frequency of binding constraint + + - HOURLY: hourly time series with 8784 lines + - DAILY: daily time series with 366 lines + - WEEKLY: weekly time series with 366 lines (same as daily) + + Usage example: + + >>> bcf = BindingConstraintFrequency.HOURLY + >>> bcf == "hourly" + True + >>> bcf = BindingConstraintFrequency.DAILY + >>> "daily" == bcf + True + >>> bcf = BindingConstraintFrequency.WEEKLY + >>> bcf != "daily" + True + """ + HOURLY = "hourly" DAILY = "daily" WEEKLY = "weekly" @@ -12,6 +32,6 @@ class BindingConstraintFrequency(str, Enum): class BindingConstraintDTO(BaseModel): id: str - areas: Set[str] - clusters: Set[str] + areas: t.Set[str] + clusters: t.Set[str] time_step: BindingConstraintFrequency diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/files.py b/antarest/study/storage/rawstudy/model/filesystem/config/files.py index 6f07b03adf..3727f320ec 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/files.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/files.py @@ -1,15 +1,15 @@ +import io import json import logging import re import tempfile +import typing as t import zipfile from enum import Enum from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, cast from antarest.core.model import JSON -from antarest.core.utils.utils import extract_file_to_tmp_dir -from antarest.study.storage.rawstudy.io.reader import IniReader, MultipleSameKeysIniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( BindingConstraintDTO, BindingConstraintFrequency, @@ -46,7 +46,7 @@ class FileType(Enum): MULTI_INI = "multi_ini" -def build(study_path: Path, study_id: str, output_path: Optional[Path] = None) -> "FileStudyTreeConfig": +def build(study_path: Path, study_id: str, output_path: t.Optional[Path] = None) -> "FileStudyTreeConfig": """ Extracts data from the filesystem to build a study config. @@ -87,34 +87,65 @@ def _extract_data_from_file( root: Path, inside_root_path: Path, file_type: FileType, - multi_ini_keys: Optional[List[str]] = None, -) -> Any: + multi_ini_keys: t.Sequence[str] = (), +) -> t.Any: """ Extract and process data from various types of files. + + Args: + root: Directory or ZIP file containing the study. + inside_root_path: Relative path to the file to extract. + file_type: Type of the file to extract: text, simple INI or multi INI. + multi_ini_keys: List of keys to use for multi INI files. + + Returns: + The content of the file, processed according to its type: + - TXT: list of lines + - SIMPLE_INI or MULTI_INI: dictionary of keys/values """ - tmp_dir = None - try: - if root.suffix.lower() == ".zip": - output_data_path, tmp_dir = extract_file_to_tmp_dir(root, inside_root_path) + is_zip_file: bool = root.suffix.lower() == ".zip" + posix_path: str = inside_root_path.as_posix() + + if file_type == FileType.TXT: + # Parse the file as a list of lines, return an empty list if missing. + if is_zip_file: + with zipfile.ZipFile(root) as zf: + try: + with zf.open(posix_path) as f: + text = f.read().decode("utf-8") + return text.splitlines(keepends=False) + except KeyError: + # File not found in the ZIP archive + return [] else: output_data_path = root / inside_root_path + try: + return output_data_path.read_text(encoding="utf-8").splitlines(keepends=False) + except FileNotFoundError: + return [] + + elif file_type in {FileType.MULTI_INI, FileType.SIMPLE_INI}: + # Parse the file as a dictionary of keys/values, return an empty dictionary if missing. + reader = IniReader(multi_ini_keys) + if is_zip_file: + with zipfile.ZipFile(root) as zf: + try: + with zf.open(posix_path) as f: + buffer = io.StringIO(f.read().decode("utf-8")) + return reader.read(buffer) + except KeyError: + # File not found in the ZIP archive + return {} + else: + output_data_path = root / inside_root_path + try: + return reader.read(output_data_path) + except FileNotFoundError: + return {} - if file_type == FileType.TXT: - text = output_data_path.read_text(encoding="utf-8") - return text.splitlines(keepends=False) - elif file_type == FileType.MULTI_INI: - multi_reader = MultipleSameKeysIniReader(multi_ini_keys) - return multi_reader.read(output_data_path) - elif file_type == FileType.SIMPLE_INI: - ini_reader = IniReader() - return ini_reader.read(output_data_path) - else: # pragma: no cover - raise NotImplementedError(file_type) - - finally: - if tmp_dir: - tmp_dir.cleanup() + else: # pragma: no cover + raise NotImplementedError(file_type) def _parse_version(path: Path) -> int: @@ -127,7 +158,7 @@ def _parse_version(path: Path) -> int: return version -def _parse_parameters(path: Path) -> Tuple[bool, List[str], str]: +def _parse_parameters(path: Path) -> t.Tuple[bool, t.List[str], str]: general = _extract_data_from_file( root=path, inside_root_path=Path("settings/generaldata.ini"), @@ -135,14 +166,14 @@ def _parse_parameters(path: Path) -> Tuple[bool, List[str], str]: ) store_new_set: bool = general.get("output", {}).get("storenewset", False) - archive_input_series: List[str] = [ + archive_input_series: t.List[str] = [ e.strip() for e in general.get("output", {}).get("archives", "").strip().split(",") if e.strip() ] enr_modelling: str = general.get("other preferences", {}).get("renewable-generation-modelling", "aggregated") return store_new_set, archive_input_series, enr_modelling -def _parse_bindings(root: Path) -> List[BindingConstraintDTO]: +def _parse_bindings(root: Path) -> t.List[BindingConstraintDTO]: bindings = _extract_data_from_file( root=root, inside_root_path=Path("input/bindingconstraints/bindingconstraints.ini"), @@ -173,7 +204,7 @@ def _parse_bindings(root: Path) -> List[BindingConstraintDTO]: return output_list -def _parse_sets(root: Path) -> Dict[str, DistrictSet]: +def _parse_sets(root: Path) -> t.Dict[str, DistrictSet]: obj = _extract_data_from_file( root=root, inside_root_path=Path("input/areas/sets.ini"), @@ -191,7 +222,7 @@ def _parse_sets(root: Path) -> Dict[str, DistrictSet]: } -def _parse_areas(root: Path) -> Dict[str, Area]: +def _parse_areas(root: Path) -> t.Dict[str, Area]: areas = _extract_data_from_file( root=root, inside_root_path=Path("input/areas/list.txt"), @@ -201,7 +232,7 @@ def _parse_areas(root: Path) -> Dict[str, Area]: return {transform_name_to_id(a): parse_area(root, a) for a in areas} -def _parse_outputs(output_path: Path) -> Dict[str, Simulation]: +def _parse_outputs(output_path: Path) -> t.Dict[str, Simulation]: if not output_path.is_dir(): return {} sims = {} @@ -284,7 +315,7 @@ def parse_simulation(path: Path, canonical_name: str) -> Simulation: xpansion = "" ini_path = path / "about-the-study" / "parameters.ini" - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) try: obj: JSON = reader.read(ini_path) except FileNotFoundError: @@ -308,10 +339,10 @@ def parse_simulation(path: Path, canonical_name: str) -> Simulation: ) -def get_playlist(config: JSON) -> Optional[Dict[int, float]]: +def get_playlist(config: JSON) -> t.Optional[t.Dict[int, float]]: general_config = config.get("general", {}) - nb_years = cast(int, general_config.get("nbyears")) - playlist_activated = cast(bool, general_config.get("user-playlist", False)) + nb_years = t.cast(int, general_config.get("nbyears")) + playlist_activated = t.cast(bool, general_config.get("user-playlist", False)) if not playlist_activated: return None playlist_config = config.get("playlist", {}) @@ -340,13 +371,13 @@ def parse_area(root: Path, area: str) -> "Area": ) -def _parse_thermal(root: Path, area: str) -> List[ThermalConfigType]: +def _parse_thermal(root: Path, area: str) -> t.List[ThermalConfigType]: """ Parse the thermal INI file, return an empty list if missing. """ version = _parse_version(root) relpath = Path(f"input/thermal/clusters/{area}/list.ini") - config_dict: Dict[str, Any] = _extract_data_from_file( + config_dict: t.Dict[str, t.Any] = _extract_data_from_file( root=root, inside_root_path=relpath, file_type=FileType.SIMPLE_INI ) config_list = [] @@ -359,13 +390,20 @@ def _parse_thermal(root: Path, area: str) -> List[ThermalConfigType]: return config_list -def _parse_renewables(root: Path, area: str) -> List[RenewableConfigType]: +def _parse_renewables(root: Path, area: str) -> t.List[RenewableConfigType]: """ Parse the renewables INI file, return an empty list if missing. """ + + # Before version 8.1, we only have "Load", "Wind" and "Solar" objects. + # We can't use renewable clusters. version = _parse_version(root) + if version < 810: + return [] + + # Since version 8.1 of the solver, we can use "renewable clusters" objects. relpath = Path(f"input/renewables/clusters/{area}/list.ini") - config_dict: Dict[str, Any] = _extract_data_from_file( + config_dict: t.Dict[str, t.Any] = _extract_data_from_file( root=root, inside_root_path=relpath, file_type=FileType.SIMPLE_INI, @@ -380,7 +418,7 @@ def _parse_renewables(root: Path, area: str) -> List[RenewableConfigType]: return config_list -def _parse_st_storage(root: Path, area: str) -> List[STStorageConfigType]: +def _parse_st_storage(root: Path, area: str) -> t.List[STStorageConfigType]: """ Parse the short-term storage INI file, return an empty list if missing. """ @@ -391,7 +429,7 @@ def _parse_st_storage(root: Path, area: str) -> List[STStorageConfigType]: return [] relpath = Path(f"input/st-storage/clusters/{area}/list.ini") - config_dict: Dict[str, Any] = _extract_data_from_file( + config_dict: t.Dict[str, t.Any] = _extract_data_from_file( root=root, inside_root_path=relpath, file_type=FileType.SIMPLE_INI, @@ -406,7 +444,7 @@ def _parse_st_storage(root: Path, area: str) -> List[STStorageConfigType]: return config_list -def _parse_links(root: Path, area: str) -> Dict[str, Link]: +def _parse_links(root: Path, area: str) -> t.Dict[str, Link]: properties_ini = _extract_data_from_file( root=root, inside_root_path=Path(f"input/links/{area}/properties.ini"), @@ -415,7 +453,7 @@ def _parse_links(root: Path, area: str) -> Dict[str, Link]: return {link: Link.from_json(properties_ini[link]) for link in list(properties_ini.keys())} -def _parse_filters_synthesis(root: Path, area: str) -> List[str]: +def _parse_filters_synthesis(root: Path, area: str) -> t.List[str]: optimization = _extract_data_from_file( root=root, inside_root_path=Path(f"input/areas/{area}/optimization.ini"), @@ -425,7 +463,7 @@ def _parse_filters_synthesis(root: Path, area: str) -> List[str]: return Link.split(filters) -def _parse_filters_year(root: Path, area: str) -> List[str]: +def _parse_filters_year(root: Path, area: str) -> t.List[str]: optimization = _extract_data_from_file( root=root, inside_root_path=Path(f"input/areas/{area}/optimization.ini"), diff --git a/antarest/study/storage/rawstudy/model/filesystem/folder_node.py b/antarest/study/storage/rawstudy/model/filesystem/folder_node.py index 53c7db0cbc..7d174cfbc6 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/folder_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/folder_node.py @@ -142,7 +142,7 @@ def save( (name,), sub_url = self.extract_child(children, url) return children[name].save(data, sub_url) else: - assert isinstance(data, Dict) + assert isinstance(data, dict) for key in data: children[key].save(data[key]) diff --git a/antarest/study/storage/rawstudy/model/filesystem/ini_file_node.py b/antarest/study/storage/rawstudy/model/filesystem/ini_file_node.py index 2a7612a0fa..d44c89f4f0 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/ini_file_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/ini_file_node.py @@ -13,9 +13,8 @@ from filelock import FileLock from antarest.core.model import JSON, SUB_JSON -from antarest.study.storage.rawstudy.io.reader import IniReader -from antarest.study.storage.rawstudy.io.reader.ini_reader import IReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader, IReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.inode import INode diff --git a/antarest/study/storage/rawstudy/model/filesystem/json_file_node.py b/antarest/study/storage/rawstudy/model/filesystem/json_file_node.py index d0b759761a..ee14da91ea 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/json_file_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/json_file_node.py @@ -3,8 +3,8 @@ from typing import Any, Dict, Optional, cast from antarest.core.model import JSON -from antarest.study.storage.rawstudy.io.reader.ini_reader import IReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/areas/sets.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/areas/sets.py index ddde3fd66a..1748e49def 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/areas/sets.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/areas/sets.py @@ -1,5 +1,5 @@ -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode @@ -22,6 +22,6 @@ def __init__(self, context: ContextServer, config: FileStudyTreeConfig): context, config, types={}, - reader=MultipleSameKeysIniReader(["+", "-"]), + reader=IniReader(["+", "-"]), writer=IniWriter(special_keys=["+", "-"]), ) diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/allocation/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/allocation/area.py index 6629de639c..12e6d2875a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/allocation/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/allocation/area.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/prepro/area/thermal/thermal.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/prepro/area/thermal/thermal.py index bab6d2145a..f2ae671336 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/prepro/area/thermal/thermal.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/prepro/area/thermal/thermal.py @@ -1,12 +1,22 @@ from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import TREE from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix +from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency class InputThermalPreproAreaThermal(FolderNode): + """ + Folder containing thermal cluster data: `input/thermal/prepro/{area_id}/{cluster_id}`. + + This folder contains the following files: + + - `data.txt` (matrix): TS Generator matrix (daily) + - `modulation.txt` (matrix): Modulation matrix (hourly) + """ + def build(self) -> TREE: children: TREE = { - "data": InputSeriesMatrix(self.context, self.config.next_file("data.txt")), + "data": InputSeriesMatrix(self.context, self.config.next_file("data.txt"), freq=MatrixFrequency.DAILY), "modulation": InputSeriesMatrix(self.context, self.config.next_file("modulation.txt")), } return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/settings/generaldata.py b/antarest/study/storage/rawstudy/model/filesystem/root/settings/generaldata.py index 5557902feb..7bce83396b 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/settings/generaldata.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/settings/generaldata.py @@ -1,8 +1,8 @@ from copy import deepcopy from typing import Any, Dict -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode @@ -147,6 +147,6 @@ def __init__(self, context: ContextServer, config: FileStudyTreeConfig): context, config, types=types, - reader=MultipleSameKeysIniReader(DUPLICATE_KEYS), + reader=IniReader(DUPLICATE_KEYS), writer=IniWriter(special_keys=DUPLICATE_KEYS), ) diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/user/expansion/settings.py b/antarest/study/storage/rawstudy/model/filesystem/root/user/expansion/settings.py index 5eb2558fd4..58dda7b433 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/user/expansion/settings.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/user/expansion/settings.py @@ -1,63 +1,43 @@ -from antarest.study.storage.rawstudy.io.reader.ini_reader import SimpleKeyValueReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import SimpleKeyValueWriter +from antarest.study.storage.rawstudy.ini_reader import SimpleKeyValueReader +from antarest.study.storage.rawstudy.ini_writer import SimpleKeyValueWriter from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode -# noinspection SpellCheckingInspection class ExpansionSettings(IniFileNode): - # /!\ The name of all the parameters is correct. - # Especially the differences of "_" and "-" in parameter names. """ - Common: - - optimality_gap: float = 1 - - max_iteration: int = +Inf - - uc_type: str = "expansion_fast" or "expansion_accurate". default="expansion_fast" - - master: str = "integer" or "relaxed". default="integer" - - yearly-weights: str = filename. default = None - - additional-constraints: str = filename. default = None + Since version >= 800: - version < 800 only: - - relaxed-optimality-gap: float = 0.001 # relaxed-optimality-gap > 0 - - cut-type: str = "average", "yearly" or "weekly". default="yearly" - - ampl.solver: str = "cbc" - - ampl.presolve: int = 0 - - ampl.solve_bounds_frequency: int = 1000000 - - version >= 800 only: - - relative_gap: float = 1e-12 - - solver: str = "Cbc", "Coin" or "Xpress". default="Cbc" - - batch_size: int = 0 - - separation_parameter: float = 0.5 # 0 <= separation_parameter <= 1 + - master: str = "integer" or "relaxed". default="integer" + - uc_type: str = "expansion_fast" or "expansion_accurate". default="expansion_fast" + - optimality_gap: float = 1 + - relative_gap: float = 1e-6 + - relaxed_optimality_gap: float = 1e-5 + - max_iteration: int = 1000 + - solver: str = "Cbc", "Coin" or "Xpress". default="Cbc" + - log_level: int = 0, 1, 2, 3. default=0 + - separation_parameter: float = 0.5 # 0 < separation_parameter <= 1 + - batch_size: int = 0 + - yearly-weights: str = filename. default = "" + - additional-constraints: str = filename. default = "" """ def __init__(self, context: ContextServer, config: FileStudyTreeConfig): - common_types = { + types = { + "master": str, + "uc_type": str, "optimality_gap": float, + "relative_gap": float, + "relaxed_optimality_gap": float, "max_iteration": int, - "uc_type": str, - "master": str, + "solver": str, + "log_level": int, + "separation_parameter": float, + "batch_size": int, "yearly-weights": str, - "additional_constraints": str, + "additional-constraints": str, } - if config.version < 800: - types = { - "relaxed-optimality-gap": float, - "cut-type": str, - "ampl.solver": str, - "ampl.presolve": int, - "ampl.solve_bounds_frequency": int, - **common_types, - } - else: - types = { - "relative-gap": float, - "solver": str, - "batch_size": int, - "separation_parameter": float, - **common_types, - } super().__init__( context, config, diff --git a/antarest/study/storage/rawstudy/raw_study_service.py b/antarest/study/storage/rawstudy/raw_study_service.py index fcde0db88e..f5f98c97e5 100644 --- a/antarest/study/storage/rawstudy/raw_study_service.py +++ b/antarest/study/storage/rawstudy/raw_study_service.py @@ -24,7 +24,6 @@ create_new_empty_study, export_study_flat, fix_study_root, - get_default_workspace_path, is_managed, remove_from_cache, update_antares_info, @@ -237,7 +236,7 @@ def copy( id=dest_id, name=dest_name, workspace=DEFAULT_WORKSPACE_NAME, - path=str(get_default_workspace_path(self.config) / dest_id), + path=str(self.config.get_workspace_path() / dest_id), created_at=datetime.utcnow(), updated_at=datetime.utcnow(), version=src_meta.version, diff --git a/antarest/study/storage/study_upgrader/upgrader_710.py b/antarest/study/storage/study_upgrader/upgrader_710.py index 8edc2fde0a..b1679b3342 100644 --- a/antarest/study/storage/study_upgrader/upgrader_710.py +++ b/antarest/study/storage/study_upgrader/upgrader_710.py @@ -1,7 +1,7 @@ from pathlib import Path -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS GENERAL_DATA_PATH = "settings/generaldata.ini" @@ -18,7 +18,7 @@ def upgrade_710(study_path: Path) -> None: study_path: path to the study directory. """ - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(study_path / GENERAL_DATA_PATH) data["general"]["geographic-trimming"] = data["general"]["filtering"] data["general"]["thematic-trimming"] = False diff --git a/antarest/study/storage/study_upgrader/upgrader_800.py b/antarest/study/storage/study_upgrader/upgrader_800.py index a25b61334b..b53a792985 100644 --- a/antarest/study/storage/study_upgrader/upgrader_800.py +++ b/antarest/study/storage/study_upgrader/upgrader_800.py @@ -1,7 +1,7 @@ from pathlib import Path -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS GENERAL_DATA_PATH = "settings/generaldata.ini" @@ -18,7 +18,7 @@ def upgrade_800(study_path: Path) -> None: study_path: path to the study directory. """ - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(study_path / GENERAL_DATA_PATH) data["other preferences"]["hydro-heuristic-policy"] = "accommodate rule curves" data["optimization"]["include-exportstructure"] = False diff --git a/antarest/study/storage/study_upgrader/upgrader_810.py b/antarest/study/storage/study_upgrader/upgrader_810.py index 275bafb1e4..e28ef4d4b6 100644 --- a/antarest/study/storage/study_upgrader/upgrader_810.py +++ b/antarest/study/storage/study_upgrader/upgrader_810.py @@ -1,7 +1,7 @@ from pathlib import Path -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS GENERAL_DATA_PATH = "settings/generaldata.ini" @@ -18,7 +18,7 @@ def upgrade_810(study_path: Path) -> None: study_path: path to the study directory. """ - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(study_path / GENERAL_DATA_PATH) data["other preferences"]["renewable-generation-modelling"] = "aggregated" writer = IniWriter(special_keys=DUPLICATE_KEYS) diff --git a/antarest/study/storage/study_upgrader/upgrader_830.py b/antarest/study/storage/study_upgrader/upgrader_830.py index d2067ef137..414db9b8e2 100644 --- a/antarest/study/storage/study_upgrader/upgrader_830.py +++ b/antarest/study/storage/study_upgrader/upgrader_830.py @@ -1,8 +1,8 @@ import glob from pathlib import Path -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS GENERAL_DATA_PATH = "settings/generaldata.ini" @@ -19,7 +19,7 @@ def upgrade_830(study_path: Path) -> None: study_path: path to the study directory. """ - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(study_path / GENERAL_DATA_PATH) data["adequacy patch"] = { "include-adq-patch": False, diff --git a/antarest/study/storage/study_upgrader/upgrader_840.py b/antarest/study/storage/study_upgrader/upgrader_840.py index 40e1909788..a96aa0072a 100644 --- a/antarest/study/storage/study_upgrader/upgrader_840.py +++ b/antarest/study/storage/study_upgrader/upgrader_840.py @@ -1,7 +1,7 @@ from pathlib import Path -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS GENERAL_DATA_PATH = "settings/generaldata.ini" @@ -23,7 +23,7 @@ def upgrade_840(study_path: Path) -> None: study_path: path to the study directory. """ - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(study_path / GENERAL_DATA_PATH) data["optimization"]["transmission-capacities"] = MAPPING_TRANSMISSION_CAPACITIES[ data["optimization"]["transmission-capacities"] diff --git a/antarest/study/storage/study_upgrader/upgrader_850.py b/antarest/study/storage/study_upgrader/upgrader_850.py index 303bf85e5c..08695f1e3d 100644 --- a/antarest/study/storage/study_upgrader/upgrader_850.py +++ b/antarest/study/storage/study_upgrader/upgrader_850.py @@ -1,7 +1,7 @@ from pathlib import Path -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS # noinspection SpellCheckingInspection @@ -19,7 +19,7 @@ def upgrade_850(study_path: Path) -> None: study_path: path to the study directory. """ - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(study_path / GENERAL_DATA_PATH) data["adequacy patch"]["price-taking-order"] = "DENS" diff --git a/antarest/study/storage/study_upgrader/upgrader_860.py b/antarest/study/storage/study_upgrader/upgrader_860.py index c74ee8d2c9..23ea05f178 100644 --- a/antarest/study/storage/study_upgrader/upgrader_860.py +++ b/antarest/study/storage/study_upgrader/upgrader_860.py @@ -1,7 +1,7 @@ from pathlib import Path -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS @@ -10,7 +10,7 @@ def upgrade_860(study_path: Path) -> None: - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(study_path / GENERAL_DATA_PATH) data["adequacy patch"]["enable-first-step "] = True writer = IniWriter(special_keys=DUPLICATE_KEYS) diff --git a/antarest/study/storage/utils.py b/antarest/study/storage/utils.py index 8ec6499e50..b23d10333a 100644 --- a/antarest/study/storage/utils.py +++ b/antarest/study/storage/utils.py @@ -11,7 +11,6 @@ from uuid import uuid4 from zipfile import ZipFile -from antarest.core.config import Config from antarest.core.exceptions import StudyValidationError, UnsupportedStudyVersion from antarest.core.interfaces.cache import CacheConstants, ICache from antarest.core.jwt import JWTUser @@ -27,8 +26,8 @@ StudyDownloadLevelDTO, StudyMetadataDTO, ) -from antarest.study.storage.rawstudy.io.reader import IniReader -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy, StudyFactory from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree from antarest.study.storage.rawstudy.model.helpers import FileStudyHelpers @@ -36,28 +35,7 @@ logger = logging.getLogger(__name__) -def get_workspace_path(config: Config, workspace: str) -> Path: - """ - Retrieve workspace path from config - - Args: - workspace: workspace name - config: antarest config - Returns: path - - """ - return config.storage.workspaces[workspace].path - - -def get_default_workspace_path(config: Config) -> Path: - """ - Get path of default workspace - Returns: path - - """ - return get_workspace_path(config, DEFAULT_WORKSPACE_NAME) - - +# noinspection SpellCheckingInspection def update_antares_info(metadata: Study, study_tree: FileStudyTree, *, update_author: bool) -> None: """ Update the "antares" information directly in the study tree. @@ -81,16 +59,17 @@ def update_antares_info(metadata: Study, study_tree: FileStudyTree, *, update_au def fix_study_root(study_path: Path) -> None: """ - Fix possibly the wrong study root in zipped archive (when the study root is nested) + Fix possibly the wrong study root in zipped archive (when the study root is nested). - @param study_path the study initial root path + Args: + study_path: the study initial root path """ # TODO: what if it is a zipped output ? if study_path.suffix == ".zip": return None if not study_path.is_dir(): - raise StudyValidationError("Not a directory") + raise StudyValidationError("Not a directory: '{study_path}'") root_path = study_path contents = os.listdir(root_path) @@ -105,7 +84,7 @@ def fix_study_root(study_path: Path) -> None: logger.debug(f"Searching study root in {new_root}") root_path = new_root if not new_root.is_dir(): - raise StudyValidationError("Not a directory") + raise StudyValidationError("Not a directory: '{new_root}'") contents = os.listdir(new_root) if sub_root_path is not None: @@ -364,9 +343,11 @@ def export_study_flat( output_src_path = output_src_path or study_dir / "output" output_dest_path = dest / "output" - ignore_patterns = lambda directory, contents: ["output"] if str(directory) == str(study_dir) else [] - shutil.copytree(src=study_dir, dst=dest, ignore=ignore_patterns) + def ignore_outputs(directory: str, _: t.Sequence[str]) -> t.Sequence[str]: + return ["output"] if str(directory) == str(study_dir) else [] + + shutil.copytree(src=study_dir, dst=dest, ignore=ignore_outputs) if outputs and output_src_path.exists(): if output_list_filter is None: diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index 901294a73d..79783e1bc9 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -22,7 +22,12 @@ from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO -__all__ = ("AbstractBindingConstraintCommand", "CreateBindingConstraint", "check_matrix_values") +__all__ = ( + "AbstractBindingConstraintCommand", + "CreateBindingConstraint", + "check_matrix_values", + "BindingConstraintProperties", +) MatrixType = List[List[MatrixData]] diff --git a/antarest/study/storage/variantstudy/model/dbmodel.py b/antarest/study/storage/variantstudy/model/dbmodel.py index 1a88a76853..bbe264f89f 100644 --- a/antarest/study/storage/variantstudy/model/dbmodel.py +++ b/antarest/study/storage/variantstudy/model/dbmodel.py @@ -2,10 +2,9 @@ import json import typing as t import uuid -from dataclasses import dataclass from pathlib import Path -from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Table # type: ignore +from sqlalchemy import Column, DateTime, ForeignKey, Integer, String # type: ignore from sqlalchemy.orm import relationship # type: ignore from antarest.core.persistence import Base @@ -13,7 +12,6 @@ from antarest.study.storage.variantstudy.model.model import CommandDTO -@dataclass class VariantStudySnapshot(Base): # type: ignore """ Variant Study Snapshot based entity implementation. @@ -37,7 +35,6 @@ def __str__(self) -> str: return f"[Snapshot] id={self.id}, created_at={self.created_at}" -@dataclass class CommandBlock(Base): # type: ignore """ Command Block based entity implementation. @@ -60,8 +57,17 @@ class CommandBlock(Base): # type: ignore def to_dto(self) -> CommandDTO: return CommandDTO(id=self.id, action=self.command, args=json.loads(self.args)) + def __str__(self) -> str: + return ( + f"CommandBlock(id={self.id!r}," + f" study_id={self.study_id!r}," + f" index={self.index!r}," + f" command={self.command!r}," + f" version={self.version!r}," + f" args={self.args!r})" + ) + -@dataclass class VariantStudy(Study): """ Study filesystem based entity implementation. diff --git a/antarest/study/storage/variantstudy/variant_study_service.py b/antarest/study/storage/variantstudy/variant_study_service.py index f9d3eea0aa..f8e9fb95a1 100644 --- a/antarest/study/storage/variantstudy/variant_study_service.py +++ b/antarest/study/storage/variantstudy/variant_study_service.py @@ -41,13 +41,7 @@ from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, FileStudyTreeConfigDTO from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy, StudyFactory from antarest.study.storage.rawstudy.raw_study_service import RawStudyService -from antarest.study.storage.utils import ( - assert_permission, - export_study_flat, - get_default_workspace_path, - is_managed, - remove_from_cache, -) +from antarest.study.storage.utils import assert_permission, export_study_flat, is_managed, remove_from_cache from antarest.study.storage.variantstudy.business.utils import transform_command_to_dto from antarest.study.storage.variantstudy.command_factory import CommandFactory from antarest.study.storage.variantstudy.model.command.icommand import ICommand @@ -536,7 +530,7 @@ def create_variant_study(self, uuid: str, name: str, params: RequestParameters) assert_permission(params.user, study, StudyPermissionType.READ) new_id = str(uuid4()) - study_path = str(get_default_workspace_path(self.config) / new_id) + study_path = str(self.config.get_workspace_path() / new_id) if study.additional_data: # noinspection PyArgumentList additional_data = StudyAdditionalData( @@ -832,7 +826,7 @@ def copy( The newly created study. """ new_id = str(uuid4()) - study_path = str(get_default_workspace_path(self.config) / new_id) + study_path = str(self.config.get_workspace_path() / new_id) if src_meta.additional_data: # noinspection PyArgumentList additional_data = StudyAdditionalData( diff --git a/antarest/study/web/studies_blueprint.py b/antarest/study/web/studies_blueprint.py index 76b5e1a706..1b1dc72312 100644 --- a/antarest/study/web/studies_blueprint.py +++ b/antarest/study/web/studies_blueprint.py @@ -726,4 +726,26 @@ def invalidate_study_listing_cache( params = RequestParameters(user=current_user) return study_service.invalidate_cache_listing(params) + @bp.get( + "/studies/{uuid}/disk-usage", + summary="Compute study disk usage", + tags=[APITag.study_management], + ) + def study_disk_usage( + uuid: str, + current_user: JWTUser = Depends(auth.get_current_user), + ) -> int: + """ + Compute disk usage of an input study + + Args: + - `uuid`: the UUID of the study whose disk usage is to be retrieved. + + Return: + - The disk usage of the study in bytes. + """ + logger.info("Retrieving study disk usage", extra={"user": current_user.id}) + params = RequestParameters(user=current_user) + return study_service.get_disk_usage(uuid=uuid, params=params) + return bp diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 440539a4ab..a1d74379ba 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -3,13 +3,14 @@ from typing import Any, Dict, List, Optional, Sequence, Union, cast from fastapi import APIRouter, Body, Depends -from fastapi.params import Body, Query +from fastapi.params import Query from starlette.responses import RedirectResponse from antarest.core.config import Config from antarest.core.jwt import JWTUser from antarest.core.model import StudyPermissionType from antarest.core.requests import RequestParameters +from antarest.core.utils.utils import sanitize_uuid from antarest.core.utils.web import APITag from antarest.login.auth import Auth from antarest.matrixstore.matrix_editor import MatrixEditInstruction @@ -24,8 +25,8 @@ RenewableClusterInput, RenewableClusterOutput, ) -from antarest.study.business.areas.st_storage_management import * -from antarest.study.business.areas.thermal_management import * +from antarest.study.business.areas.st_storage_management import * # noqa +from antarest.study.business.areas.thermal_management import * # noqa from antarest.study.business.binding_constraint_management import ( BindingConstraintPropertiesWithName, ConstraintTermDTO, @@ -42,6 +43,7 @@ from antarest.study.business.timeseries_config_management import TSFormFields from antarest.study.model import PatchArea, PatchCluster from antarest.study.service import StudyService +from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id logger = logging.getLogger(__name__) @@ -199,6 +201,8 @@ def delete_area( extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) + uuid = sanitize_uuid(uuid) + area_id = transform_name_to_id(area_id) study_service.delete_area(uuid, area_id, params) return area_id @@ -219,6 +223,8 @@ def delete_link( extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) + area_from = transform_name_to_id(area_from) + area_to = transform_name_to_id(area_to) study_service.delete_link(uuid, area_from, area_to, params) return f"{area_from}%{area_to}" @@ -751,14 +757,14 @@ def set_timeseries_form_values( study_service.ts_config_manager.set_field_values(study, field_values) @bp.get( - path="/studies/{uuid}/tablemode/form", + path="/studies/{uuid}/tablemode", tags=[APITag.study_data], summary="Get table data for table form", # `Any` because `Union[AreaColumns, LinkColumns]` not working response_model=Dict[str, Dict[str, Any]], response_model_exclude_none=True, ) - def get_table_data( + def get_table_mode( uuid: str, table_type: TableTemplateType, columns: str, @@ -774,11 +780,11 @@ def get_table_data( return study_service.table_mode_manager.get_table_data(study, table_type, columns.split(",")) @bp.put( - path="/studies/{uuid}/tablemode/form", + path="/studies/{uuid}/tablemode", tags=[APITag.study_data], summary="Set table data with values from table form", ) - def set_table_data( + def set_table_mode( uuid: str, table_type: TableTemplateType, data: Dict[str, ColumnsModelTypes], diff --git a/antarest/study/web/xpansion_studies_blueprint.py b/antarest/study/web/xpansion_studies_blueprint.py index af9496ba4c..1b46af1a84 100644 --- a/antarest/study/web/xpansion_studies_blueprint.py +++ b/antarest/study/web/xpansion_studies_blueprint.py @@ -1,6 +1,6 @@ import json import logging -from typing import Any, List, Optional, Union +import typing as t from fastapi import APIRouter, Depends, File, UploadFile from starlette.responses import Response @@ -12,9 +12,10 @@ from antarest.core.utils.web import APITag from antarest.login.auth import Auth from antarest.study.business.xpansion_management import ( + GetXpansionSettings, + UpdateXpansionSettings, XpansionCandidateDTO, XpansionResourceFileType, - XpansionSettingsDTO, ) from antarest.study.service import StudyService @@ -24,13 +25,10 @@ def create_xpansion_routes(study_service: StudyService, config: Config) -> APIRouter: """ Endpoint implementation for xpansion studies management + Args: study_service: study service facade to handle request - ftm: file transfer manager config: main server configuration - - Returns: - """ bp = APIRouter(prefix="/v1") auth = Auth(config) @@ -42,9 +40,9 @@ def create_xpansion_routes(study_service: StudyService, config: Config) -> APIRo ) def create_xpansion_configuration( uuid: str, - file: Optional[UploadFile] = File(None), + file: t.Optional[UploadFile] = File(None), current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Any: logger.info( f"Creating Xpansion Configuration for study {uuid}", extra={"user": current_user.id}, @@ -60,7 +58,7 @@ def create_xpansion_configuration( def delete_xpansion_configuration( uuid: str, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Any: logger.info( f"Deleting Xpansion Configuration for study {uuid}", extra={"user": current_user.id}, @@ -72,12 +70,11 @@ def delete_xpansion_configuration( "/studies/{uuid}/extensions/xpansion/settings", tags=[APITag.xpansion_study_management], summary="Get Xpansion Settings", - response_model=XpansionSettingsDTO, ) def get_settings( uuid: str, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> GetXpansionSettings: logger.info( f"Fetching Xpansion Settings of the study {uuid}", extra={"user": current_user.id}, @@ -89,19 +86,18 @@ def get_settings( "/studies/{uuid}/extensions/xpansion/settings", tags=[APITag.xpansion_study_management], summary="Update Xpansion Settings", - response_model=XpansionSettingsDTO, ) def update_settings( uuid: str, - xpansion_settings_dto: XpansionSettingsDTO, + xpansion_settings: UpdateXpansionSettings, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> GetXpansionSettings: logger.info( f"Updating Xpansion Settings Of Study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) - return study_service.update_xpansion_settings(uuid, xpansion_settings_dto, params) + return study_service.update_xpansion_settings(uuid, xpansion_settings, params) @bp.put( "/studies/{uuid}/extensions/xpansion/settings/additional-constraints", @@ -112,13 +108,13 @@ def update_additional_constraints_settings( uuid: str, filename: str = "", current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> GetXpansionSettings: logger.info( f"Updating Xpansion Settings Of Study {uuid} with additional constraints {filename}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) - study_service.update_xpansion_constraints_settings(uuid, filename, params) + return study_service.update_xpansion_constraints_settings(uuid, filename, params) @bp.post( "/studies/{uuid}/extensions/xpansion/candidates", @@ -129,7 +125,7 @@ def add_candidate( uuid: str, xpansion_candidate_dto: XpansionCandidateDTO, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> XpansionCandidateDTO: logger.info( f"Adding new candidate {xpansion_candidate_dto.dict(by_alias=True)} to study {uuid}", extra={"user": current_user.id}, @@ -141,13 +137,12 @@ def add_candidate( "/studies/{uuid}/extensions/xpansion/candidates/{candidate_name}", tags=[APITag.xpansion_study_management], summary="Get Xpansion Candidate", - response_model=XpansionCandidateDTO, ) def get_candidate( uuid: str, candidate_name: str, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> XpansionCandidateDTO: logger.info("Fetching study list", extra={"user": current_user.id}) params = RequestParameters(user=current_user) return study_service.get_candidate(uuid, candidate_name, params) @@ -156,12 +151,11 @@ def get_candidate( "/studies/{uuid}/extensions/xpansion/candidates", tags=[APITag.xpansion_study_management], summary="Get Xpansion Candidates", - response_model=List[XpansionCandidateDTO], ) def get_candidates( uuid: str, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Sequence[XpansionCandidateDTO]: logger.info("Fetching study list", extra={"user": current_user.id}) params = RequestParameters(user=current_user) return study_service.get_candidates(uuid, params) @@ -176,7 +170,7 @@ def update_candidate( candidate_name: str, xpansion_candidate_dto: XpansionCandidateDTO, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Any: logger.info( f"Updating xpansion candidate {xpansion_candidate_dto.name} of the study {uuid}", extra={"user": current_user.id}, @@ -193,7 +187,7 @@ def delete_candidate( uuid: str, candidate_name: str, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Any: logger.info( f"Deleting candidate {candidate_name} of the study {uuid}", extra={"user": current_user.id}, @@ -211,7 +205,7 @@ def add_resource( resource_type: XpansionResourceFileType, file: UploadFile = File(...), current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Any: logger.info( f"Add xpansion {resource_type} files in the study {uuid}", extra={"user": current_user.id}, @@ -233,7 +227,7 @@ def delete_resource( resource_type: XpansionResourceFileType, filename: str, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Any: logger.info( f"Deleting xpansion {resource_type} file from the study {uuid}", extra={"user": current_user.id}, @@ -255,7 +249,7 @@ def get_resource_content( resource_type: XpansionResourceFileType, filename: str, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Any: logger.info( f"Getting xpansion {resource_type} file {filename} from the study {uuid}", extra={"user": current_user.id}, @@ -265,7 +259,7 @@ def get_resource_content( StudyPermissionType.READ, RequestParameters(user=current_user), ) - output: Union[JSON, bytes, str] = study_service.xpansion_manager.get_resource_content( + output: t.Union[JSON, bytes, str] = study_service.xpansion_manager.get_resource_content( study, resource_type, filename ) @@ -292,9 +286,9 @@ def get_resource_content( ) def list_resources( uuid: str, - resource_type: Optional[XpansionResourceFileType] = None, + resource_type: t.Optional[XpansionResourceFileType] = None, current_user: JWTUser = Depends(auth.get_current_user), - ) -> Any: + ) -> t.Any: logger.info( f"Getting xpansion {resource_type} resources files from the study {uuid}", extra={"user": current_user.id}, diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 1e587bf6e3..15fa0dedad 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,6 +1,52 @@ Antares Web Changelog ===================== +v2.16.2 (2024-01-10) +-------------------- + +### Features + +* **bc:** add input validation for binding constraint creation [`#1868`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1868) +* **study-size:** add new route to get a study disk usage (in bytes) [`#1878`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1878) +* **table-mode:** update Table Mode view [`#1883`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1883) +* **thermals, st-storages:** add a dialog to define a name when duplicating a cluster or a storage [`#1866`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1866) +* **debug-view:** introduce advanced `JSONEditor` and `Debug` view updates [`#1885`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1885) + + +### Performance + +* **db:** improve performance by using joins instead of N+1 queries [`#1848`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1848) +* **raw-study:** improve INI file reading performance for RAW studies [`#1879`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1879) + + +### Bug Fixes + +* **bc:** automatically change binding constraint matrix when changing frequency [`#1867`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1867) +* **ci:** avoid reflecting user-controlled data (SonarCloud issue) [`#1886`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1886) +* **db:** correct alembic migration script used to purge obsolete tasks [`#1881`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1881) +* **db:** add missing constraints and relationships in `TaskJob` table [`#1872`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1872) +* **services:** ensure all worker services run indefinitely [`#1870`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1870) +* **study-factory:** ignore non-existent files in archived studies during build [`#1871`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1871) +* **thermals:** correct TS Generator matrix index for thermal clusters [`#1882`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1882) +* **ui:** prevent synchro issues between tabs and react-router [`#1869`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1869) +* **xpansion:** update for improved parameter handling and code refactoring [`#1865`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1865) + + +### Documentation + +* **st-storage:** add ST-Storage documentation [`#1873`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1873) + + +### Tests + +* remove Locust dependency and unit tests (not really useful) [`34c97e0`](https://github.com/AntaresSimulatorTeam/AntaREST/commit/34c97e05fe8a623a799cd31519b7982dae579368) + + +### Refactoring + +* **bc:** remove duplicate class BindingConstraintType [`#1860`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1860) + + v2.16.1 (2023-12-14) -------------------- diff --git a/docs/assets/media/user-guide/study/01-map.tab.png b/docs/assets/media/user-guide/study/01-map.tab.png new file mode 100644 index 0000000000..7890e89799 Binary files /dev/null and b/docs/assets/media/user-guide/study/01-map.tab.png differ diff --git a/docs/assets/media/user-guide/study/02-areas.tab.png b/docs/assets/media/user-guide/study/02-areas.tab.png new file mode 100644 index 0000000000..94e2b29a15 Binary files /dev/null and b/docs/assets/media/user-guide/study/02-areas.tab.png differ diff --git a/docs/assets/media/user-guide/study/03-links.tab.png b/docs/assets/media/user-guide/study/03-links.tab.png new file mode 100644 index 0000000000..0f6771de6d Binary files /dev/null and b/docs/assets/media/user-guide/study/03-links.tab.png differ diff --git a/docs/assets/media/user-guide/study/04-binding-constraints.tab.png b/docs/assets/media/user-guide/study/04-binding-constraints.tab.png new file mode 100644 index 0000000000..4fdae52ef5 Binary files /dev/null and b/docs/assets/media/user-guide/study/04-binding-constraints.tab.png differ diff --git a/docs/assets/media/user-guide/study/05-debug.tab.png b/docs/assets/media/user-guide/study/05-debug.tab.png new file mode 100644 index 0000000000..59ae230307 Binary files /dev/null and b/docs/assets/media/user-guide/study/05-debug.tab.png differ diff --git a/docs/assets/media/user-guide/study/06-table-mode.tab.png b/docs/assets/media/user-guide/study/06-table-mode.tab.png new file mode 100644 index 0000000000..064e2b9b82 Binary files /dev/null and b/docs/assets/media/user-guide/study/06-table-mode.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/01-properties-form.png b/docs/assets/media/user-guide/study/areas/01-properties-form.png new file mode 100644 index 0000000000..55c3afe312 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/01-properties-form.png differ diff --git a/docs/assets/media/user-guide/study/areas/01-properties-tab.png b/docs/assets/media/user-guide/study/areas/01-properties-tab.png new file mode 100644 index 0000000000..c869f9bdd0 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/01-properties-tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/02-load.series.png b/docs/assets/media/user-guide/study/areas/02-load.series.png new file mode 100644 index 0000000000..eb73e10214 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/02-load.series.png differ diff --git a/docs/assets/media/user-guide/study/areas/02-load.tab.png b/docs/assets/media/user-guide/study/areas/02-load.tab.png new file mode 100644 index 0000000000..e0aa20f90a Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/02-load.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/03-thermals.form.png b/docs/assets/media/user-guide/study/areas/03-thermals.form.png new file mode 100644 index 0000000000..ac431175c3 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/03-thermals.form.png differ diff --git a/docs/assets/media/user-guide/study/areas/03-thermals.list.png b/docs/assets/media/user-guide/study/areas/03-thermals.list.png new file mode 100644 index 0000000000..bf7633c6b2 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/03-thermals.list.png differ diff --git a/docs/assets/media/user-guide/study/areas/03-thermals.series.png b/docs/assets/media/user-guide/study/areas/03-thermals.series.png new file mode 100644 index 0000000000..90fbca354b Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/03-thermals.series.png differ diff --git a/docs/assets/media/user-guide/study/areas/03-thermals.tab.png b/docs/assets/media/user-guide/study/areas/03-thermals.tab.png new file mode 100644 index 0000000000..19da1306ae Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/03-thermals.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/04-renewables.form.png b/docs/assets/media/user-guide/study/areas/04-renewables.form.png new file mode 100644 index 0000000000..9fe5b50173 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/04-renewables.form.png differ diff --git a/docs/assets/media/user-guide/study/areas/04-renewables.list.png b/docs/assets/media/user-guide/study/areas/04-renewables.list.png new file mode 100644 index 0000000000..387a0fe316 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/04-renewables.list.png differ diff --git a/docs/assets/media/user-guide/study/areas/04-renewables.series.png b/docs/assets/media/user-guide/study/areas/04-renewables.series.png new file mode 100644 index 0000000000..6a15e97719 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/04-renewables.series.png differ diff --git a/docs/assets/media/user-guide/study/areas/04-renewables.tab.png b/docs/assets/media/user-guide/study/areas/04-renewables.tab.png new file mode 100644 index 0000000000..8fcc5b96e0 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/04-renewables.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/05-hydro.management-options.form.png b/docs/assets/media/user-guide/study/areas/05-hydro.management-options.form.png new file mode 100644 index 0000000000..0dbf56e089 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/05-hydro.management-options.form.png differ diff --git a/docs/assets/media/user-guide/study/areas/05-hydro.tab.png b/docs/assets/media/user-guide/study/areas/05-hydro.tab.png new file mode 100644 index 0000000000..398ae91971 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/05-hydro.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/06-wind.tab.png b/docs/assets/media/user-guide/study/areas/06-wind.tab.png new file mode 100644 index 0000000000..7fd98af046 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/06-wind.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/07-solar.tab.png b/docs/assets/media/user-guide/study/areas/07-solar.tab.png new file mode 100644 index 0000000000..039c63fd42 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/07-solar.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/08-st-storages.form.png b/docs/assets/media/user-guide/study/areas/08-st-storages.form.png new file mode 100644 index 0000000000..a7be6236c1 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/08-st-storages.form.png differ diff --git a/docs/assets/media/user-guide/study/areas/08-st-storages.list.png b/docs/assets/media/user-guide/study/areas/08-st-storages.list.png new file mode 100644 index 0000000000..213befc8e3 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/08-st-storages.list.png differ diff --git a/docs/assets/media/user-guide/study/areas/08-st-storages.series.png b/docs/assets/media/user-guide/study/areas/08-st-storages.series.png new file mode 100644 index 0000000000..c8cfba6194 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/08-st-storages.series.png differ diff --git a/docs/assets/media/user-guide/study/areas/08-st-storages.tab.png b/docs/assets/media/user-guide/study/areas/08-st-storages.tab.png new file mode 100644 index 0000000000..bc745bd177 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/08-st-storages.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/09-reserves.series.png b/docs/assets/media/user-guide/study/areas/09-reserves.series.png new file mode 100644 index 0000000000..b3fbbd9700 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/09-reserves.series.png differ diff --git a/docs/assets/media/user-guide/study/areas/09-reserves.tab.png b/docs/assets/media/user-guide/study/areas/09-reserves.tab.png new file mode 100644 index 0000000000..440f759ee3 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/09-reserves.tab.png differ diff --git a/docs/assets/media/user-guide/study/areas/10-misc-gen.series.png b/docs/assets/media/user-guide/study/areas/10-misc-gen.series.png new file mode 100644 index 0000000000..eff8687574 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/10-misc-gen.series.png differ diff --git a/docs/assets/media/user-guide/study/areas/10-misc-gen.tab.png b/docs/assets/media/user-guide/study/areas/10-misc-gen.tab.png new file mode 100644 index 0000000000..4b6804a2d2 Binary files /dev/null and b/docs/assets/media/user-guide/study/areas/10-misc-gen.tab.png differ diff --git a/docs/user-guide/0-introduction.md b/docs/user-guide/0-introduction.md index c136573c31..99fa5637ac 100644 --- a/docs/user-guide/0-introduction.md +++ b/docs/user-guide/0-introduction.md @@ -2,7 +2,8 @@ ![](../assets/antares.png) -This package works along with RTE's adequacy software [Antares Simulator](https://antares-simulator.org) that is also [hosted on github][antares-github] +This package works along with RTE's adequacy software [Antares Simulator](https://antares-simulator.org) +that is also [hosted on github][antares-github] `antares-web` is a server api interfacing Antares Simulator studies. It provides a web application to manage studies adding more features to simple edition. @@ -17,10 +18,9 @@ This brings: > > - **user accounts** : add user management and permission system - ## Variant manager `antares-web` brings an edition event store that provides a way to edit a study while keeping track of changes. It eases the creation of "variants" of a study and allow an explicit diff change between studies. -You can read more information in [using the variant manager here](./2-variant_manager.md) \ No newline at end of file +You can read more information in [using the variant manager here](./3-variant_manager.md) diff --git a/docs/user-guide/1-interface.md b/docs/user-guide/1-interface.md index 3d42c95f78..ca002c205d 100644 --- a/docs/user-guide/1-interface.md +++ b/docs/user-guide/1-interface.md @@ -116,7 +116,7 @@ Example of the detailed view of a matrix node (txt data files): ## Data management The data view display dataset which are list of matrices. -These matrices can then be used as argument in [variant manager commands](./2-variant_manager.md#base-commands). +These matrices can then be used as argument in [variant manager commands](./3-variant_manager.md#base-commands). ![](../assets/media/img/userguide_dataset_listing.png) diff --git a/docs/user-guide/2-study.md b/docs/user-guide/2-study.md new file mode 100644 index 0000000000..12713241ef --- /dev/null +++ b/docs/user-guide/2-study.md @@ -0,0 +1,16 @@ +# Study Configuration + +This page is dedicated to configuring the study in the Antares Web application. + +To access the configuration of the study: + +1. From the "Study" view, click on the "MODELIZATION" tab. + +## Main Topics + +- [Map](study/01-map.md) +- [Areas](study/02-areas.md) +- [Links](study/03-links.md) +- [Binding Constraints](study/04-binding-constraints.md) +- [Debug](study/05-debug.md) +- [Table Mode](study/06-table-mode.md) diff --git a/docs/user-guide/2-variant_manager.md b/docs/user-guide/3-variant_manager.md similarity index 100% rename from docs/user-guide/2-variant_manager.md rename to docs/user-guide/3-variant_manager.md diff --git a/docs/user-guide/study/01-map.md b/docs/user-guide/study/01-map.md new file mode 100644 index 0000000000..8cbfc94685 --- /dev/null +++ b/docs/user-guide/study/01-map.md @@ -0,0 +1,13 @@ +# Study Map + +[⬅ Study Configuration](../2-study.md) + +This page allows you to sketch the production and consumption network of your study. +You can add, edit, and delete nodes and links. + +To access the map of the study: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "MAP" tab to access the page dedicated to the map. + +![01-map.tab.png](../../assets/media/user-guide/study/01-map.tab.png) diff --git a/docs/user-guide/study/02-areas.md b/docs/user-guide/study/02-areas.md new file mode 100644 index 0000000000..10cb737302 --- /dev/null +++ b/docs/user-guide/study/02-areas.md @@ -0,0 +1,25 @@ +# Area Configuration + +[⬅ Study Configuration](../2-study.md) + +This page is dedicated to configuring areas in the Antares Web application. + +To access the configuration of areas: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab to access the page dedicated to areas. + +![02-areas.tab.png](../../assets/media/user-guide/study/02-areas.tab.png) + +## Main Topics + +- [General Properties](areas/01-properties.md) +- [Load](areas/02-load.md) +- [Thermal Clusters](areas/03-thermals.md) +- [Renewables Clusters](areas/04-renewables.md) +- [Hydraulic Generators](areas/05-hydro.md) +- [Wind Generators](areas/06-wind.md) +- [Solar Generators](areas/07-solar.md) +- [Short-Term Storages](areas/08-st-storages.md) +- [Reserves](areas/09-reserves.md) +- [Miscellaneous Generators](areas/10-misc-gen.md) diff --git a/docs/user-guide/study/03-links.md b/docs/user-guide/study/03-links.md new file mode 100644 index 0000000000..1c53563170 --- /dev/null +++ b/docs/user-guide/study/03-links.md @@ -0,0 +1,13 @@ +# Links Configuration + +[⬅ Study Configuration](../2-study.md) + +This page is dedicated to configuring links in the Antares Web application. + +To access the configuration of areas: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab to access the page dedicated to links. + +![03-links.tab.png](../../assets/media/user-guide/study/03-links.tab.png) + diff --git a/docs/user-guide/study/04-binding-constraints.md b/docs/user-guide/study/04-binding-constraints.md new file mode 100644 index 0000000000..ff0b78c926 --- /dev/null +++ b/docs/user-guide/study/04-binding-constraints.md @@ -0,0 +1,12 @@ +# Binding Constraints Configuration + +[⬅ Study Configuration](../2-study.md) + +This page is dedicated to configuring binding constraints in the Antares Web application. + +To access the configuration of binding constraints: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "BINDING CONSTRAINTS" tab to access the page dedicated to binding constraints. + +![04-binding-constraints.tab.png](../../assets/media/user-guide/study/04-binding-constraints.tab.png) diff --git a/docs/user-guide/study/05-debug.md b/docs/user-guide/study/05-debug.md new file mode 100644 index 0000000000..8c2caaaa44 --- /dev/null +++ b/docs/user-guide/study/05-debug.md @@ -0,0 +1,12 @@ +# Debug View + +[⬅ Study Configuration](../2-study.md) + +This page is dedicated to the debugging of the study in the Antares Web application. + +To access the debug view: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "DEBUG" tab to access the page dedicated to debugging. + +![05-debug.tab.png](../../assets/media/user-guide/study/05-debug.tab.png) diff --git a/docs/user-guide/study/06-table-mode.md b/docs/user-guide/study/06-table-mode.md new file mode 100644 index 0000000000..53c97ecea2 --- /dev/null +++ b/docs/user-guide/study/06-table-mode.md @@ -0,0 +1,12 @@ +# Table Mode + +[⬅ Study Configuration](../2-study.md) + +This page is dedicated to the table mode, which allows you to edit the study properties in a tables. + +To access the table mode: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "TABLE MODE" tab to access the page dedicated to the table mode. + +![06-table-mode.tab.png](../../assets/media/user-guide/study/06-table-mode.tab.png) diff --git a/docs/user-guide/study/areas/01-properties.md b/docs/user-guide/study/areas/01-properties.md new file mode 100644 index 0000000000..f5605ba153 --- /dev/null +++ b/docs/user-guide/study/areas/01-properties.md @@ -0,0 +1,23 @@ +# Area General Properties + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring the general properties of an area in the Antares Web application. + +To access the configuration of an area: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "PROPERTIES" tab to access the page dedicated to the general properties. + +![01-properties-tab.png](../../../assets/media/user-guide/study/areas/01-properties-tab.png) + +## Area Properties Form + +![01-properties-form.png](../../../assets/media/user-guide/study/areas/01-properties-form.png) + +The area properties form allows you to configure the following elements: + +> TODO diff --git a/docs/user-guide/study/areas/02-load.md b/docs/user-guide/study/areas/02-load.md new file mode 100644 index 0000000000..c2bf484dde --- /dev/null +++ b/docs/user-guide/study/areas/02-load.md @@ -0,0 +1,35 @@ +# Load Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring Load in the Antares Web application. + +To access the configuration of Load: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "LOAD" tab to access the page dedicated to loads. + +![02-load.tab.png](../../../assets/media/user-guide/study/areas/02-load.tab.png) + +## Time Series Matrix + +![02-load.series.png](../../../assets/media/user-guide/study/areas/02-load.series.png) + +On the load page, you will find the time series matrices composed of 8760 rows (hourly for a simulation year) +and 1 column for each Monte-Carlo year. + +The available commands are: + +- **Assign a Matrix:** Search and assign a matrix from the matrix store to the load. +- **Import:** Drag and drop a TSV file to update the time series matrices. +- **Export:** Download the current TSV file using the "Export" button. + +You can edit a cell and confirm with the "Enter" key. +You can also edit a group of cells or an entire column and confirm with the "Ctrl+Enter" key combination. + +The detailed configuration is available in the [Antares Simulator documentation](https://antares-simulator.readthedocs.io/en/latest/reference-guide/04-active_windows/#load). + +Explore these features to customize the ST Storage settings according to the specific needs of your study. diff --git a/docs/user-guide/study/areas/03-thermals.md b/docs/user-guide/study/areas/03-thermals.md new file mode 100644 index 0000000000..7e1d3f29ad --- /dev/null +++ b/docs/user-guide/study/areas/03-thermals.md @@ -0,0 +1,68 @@ +# Thermal Clusters Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring Thermal Clusters (Thermals) in the Antares Web application. + +To access the configuration of Thermals: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "THERMALS" tab to access the page dedicated to Thermals. + +![03-thermals.tab.png](../../../assets/media/user-guide/study/areas/03-thermals.tab.png) + +## Thermals List + +![03-thermals.list.png](../../../assets/media/user-guide/study/areas/03-thermals.list.png) + +On the Thermals page, you will find the following elements: + +- **Command Bar:** Add, duplicate, or delete thermal clusters using the "Add," "Duplicate," and "Delete" buttons. +- **Toolbar:** Use the toolbar to filter and quickly search in the thermal clusters table. +- **Selection and Actions:** Click on a row to select a thermal cluster. You can then delete or duplicate it. + +The thermal clusters table displays the following columns: + +- **Group:** Name of the group to which the thermal cluster belongs. +- **Name:** Name of the thermal cluster (link to the properties form). +- **Enabled:** Indicates whether the thermal cluster is enabled. +- **Must run:** Indicates whether the thermal cluster must run. +- **Unit Count:** Number of units in the thermal cluster. +- **Nominal Capacity (MW):** Nominal capacity of the thermal cluster. +- **Enabled/Installed (MW):** Enabled/Installed capacity of the thermal cluster. +- **Market Bid (€/MWh):** Market bid of the thermal cluster. + +The **Total** row displays the sum of the values in the **Unit Count** and **Enabled/Installed** columns. + + +## Configuration Form + +Click on the name of a thermal cluster to open the properties form. + +![03-thermals.form.png](../../../assets/media/user-guide/study/areas/03-thermals.form.png) + +You will find the following elements: + +- Click on the "Return" link to go back to the list of thermal clusters. +- Modify the values and click "Save" to confirm the changes. +- Use the "↶" buttons to undo changes and "↷" to redo them, confirm the modification with "Save." + + +## Time Series Matrices + +In the tabs, you will find time series matrices composed of 8760 rows (hourly for a simulation year). + +![03-thermals.series.png](../../../assets/media/user-guide/study/areas/03-thermals.series.png) + +The available commands are: + +- **Assign a Matrix:** Search and assign a matrix from the matrix store to Thermal Clusters. +- **Import:** Drag and drop a TSV file to update the time series matrices. +- **Export:** Download the current TSV file using the "Export" button. + +You can edit a cell and confirm with the "Enter" key. You can also edit a group of cells or an entire column and confirm with the "Ctrl+Enter" key combination. + +The detailed configuration is available in the [Antares Simulator documentation](https://antares-simulator.readthedocs.io/en/latest/reference-guide/04-active_windows/#thermal). diff --git a/docs/user-guide/study/areas/04-renewables.md b/docs/user-guide/study/areas/04-renewables.md new file mode 100644 index 0000000000..f7298783c0 --- /dev/null +++ b/docs/user-guide/study/areas/04-renewables.md @@ -0,0 +1,67 @@ +# Renewable Clusters Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring Renewable Clusters (Renewables) in the Antares Web application. + +To access the configuration of Renewables: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "RENEWABLE" tab to access the page dedicated to Renewables. + +![04-renewables.tab.png](../../../assets/media/user-guide/study/areas/04-renewables.tab.png) + +## Renewable List + +![04-renewables.list.png](../../../assets/media/user-guide/study/areas/04-renewables.list.png) + +On the Renewables page, you will find the following elements: + +- **Command Bar:** Add, duplicate, or delete renewable clusters using the "Add," "Duplicate," and "Delete" buttons. +- **Toolbar:** Use the toolbar to filter and quickly search in the renewable clusters table. +- **Selection and Actions:** Click on a row to select a renewable cluster. You can then delete or duplicate it. + +The renewable clusters table displays the following columns: + +- **Group:** Name of the group to which the renewable cluster belongs. +- **Name:** Name of the renewable cluster (link to the properties form). +- **Enabled:** Indicates whether the renewable cluster is enabled. +- **TS Interpretation:** Indicates whether the time series are interpreted as a production or a consumption. +- **Unit Count:** Number of units in the renewable cluster. +- **Nominal Capacity (MW):** Nominal capacity of the renewable cluster. +- **Enabled/Installed (MW):** Enabled/Installed capacity of the renewable cluster. + +The **Total** row displays the sum of the values in the **Unit Count** and **Enabled/Installed** columns. + + +## Configuration Form + +Click on the name of a renewable cluster to open the properties form. + +![04-renewables.form.png](../../../assets/media/user-guide/study/areas/04-renewables.form.png) + +You will find the following elements: + +- Click on the "Return" link to go back to the list of renewable clusters. +- Modify the values and click "Save" to confirm the changes. +- Use the "↶" buttons to undo changes and "↷" to redo them, confirm the modification with "Save." + + +## Time Series Matrices + +In the tabs, you will find time series matrices composed of 8760 rows (hourly for a simulation year). + +![04-renewables.series.png](../../../assets/media/user-guide/study/areas/04-renewables.series.png) + +The available commands are: + +- **Assign a Matrix:** Search and assign a matrix from the matrix store to Renewable Clusters. +- **Import:** Drag and drop a TSV file to update the time series matrices. +- **Export:** Download the current TSV file using the "Export" button. + +You can edit a cell and confirm with the "Enter" key. You can also edit a group of cells or an entire column and confirm with the "Ctrl+Enter" key combination. + +The detailed configuration is available in the [Antares Simulator documentation](https://antares-simulator.readthedocs.io/en/latest/reference-guide/04-active_windows/#renewable). diff --git a/docs/user-guide/study/areas/05-hydro.md b/docs/user-guide/study/areas/05-hydro.md new file mode 100644 index 0000000000..40a02d4eca --- /dev/null +++ b/docs/user-guide/study/areas/05-hydro.md @@ -0,0 +1,73 @@ +# Hydraulic Generators Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring hydraulic generators (Hydro) in the Antares Web application. + +To access the configuration of Hydro: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "HYDRO" tab to access the page dedicated to hydraulic generators. + +![05-hydro.tab.png](../../../assets/media/user-guide/study/areas/05-hydro.tab.png) + +## Management options + +This tab allows you to configure the management options of the hydraulic generators. + +![05-hydro.management-options.form.png](../../../assets/media/user-guide/study/areas/05-hydro.management-options.form.png) + +## Inflow Structure + +This tab allows you to configure the inflow pattern time series and the overall monthly hydro parameters. + +> TODO + +## Allocation + +This tab allows you to configure the allocation coefficients for each area. + +> TODO + +## Correlation + +This tab allows you to configure the correlation coefficients between the current area and the other areas. + +> TODO + +## Daily Power + +This tab allows you to configure the daily generating power and the pumping power of the hydraulic generators. + +> TODO + +## Energy Credits + +This tab allows you to configure the energy credits time series of the hydraulic generators. + +> TODO + +## Reservoir Levels + +This tab allows you to configure the reservoir levels time series of the hydraulic generators. + +> TODO + +## Water Values + +This tab allows you to configure the water values time series of the hydraulic generators. + +> TODO + +## Hydro Storage + +This tab allows you to configure the hydro storage time series of the hydraulic generators. + +> TODO + +## Run of River + +This tab allows you to configure the run of river time series of the hydraulic generators. diff --git a/docs/user-guide/study/areas/06-wind.md b/docs/user-guide/study/areas/06-wind.md new file mode 100644 index 0000000000..176ea212de --- /dev/null +++ b/docs/user-guide/study/areas/06-wind.md @@ -0,0 +1,15 @@ +# Wind Generators Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring wind generators (Wind) in the Antares Web application. + +To access the configuration of Wind: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "WIND" tab to access the page dedicated to wind generators. + +![06-wind.tab.png](../../../assets/media/user-guide/study/areas/06-wind.tab.png) diff --git a/docs/user-guide/study/areas/07-solar.md b/docs/user-guide/study/areas/07-solar.md new file mode 100644 index 0000000000..5a55b3071d --- /dev/null +++ b/docs/user-guide/study/areas/07-solar.md @@ -0,0 +1,15 @@ +# Solar Generators Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring solar generators (Solar) in the Antares Web application. + +To access the configuration of Solar: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "SOLAR" tab to access the page dedicated to solar generators. + +![07-solar.tab.png](../../../assets/media/user-guide/study/areas/07-solar.tab.png) diff --git a/docs/user-guide/study/areas/08-st-storages.md b/docs/user-guide/study/areas/08-st-storages.md new file mode 100644 index 0000000000..11f1135a74 --- /dev/null +++ b/docs/user-guide/study/areas/08-st-storages.md @@ -0,0 +1,70 @@ +# Short-Term Storage Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring short-term storage (ST Storage) in the Antares Web application. +Please note that this feature is available starting from version 8.6 of the studies. + +To access the configuration of ST storages: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "STORAGES" tab to access the page dedicated to ST storages. + +![08-st-storages.tab.png](../../../assets/media/user-guide/study/areas/08-st-storages.tab.png) + +## ST Storage List + +![08-st-storages.list.png](../../../assets/media/user-guide/study/areas/08-st-storages.list.png) + +On the ST storages page, you will find the following elements: + +- **Command Bar:** Add, duplicate, or delete storages using the "Add," "Duplicate," and "Delete" buttons. +- **Toolbar:** Use the toolbar to filter and quickly search in the storages table. +- **Selection and Actions:** Click on a row to select a storage. You can then delete or duplicate it. + +The storages table displays the following information: + +- **Group:** Name of the group to which the storage belongs. +- **Name:** Name of the storage (link to the properties form). +- **Withdrawal (MW):** Withdrawal power of the storage. +- **Injection (MW):** Injection power of the storage. +- **Reservoir (MWh):** Reservoir capacity of the storage. +- **Efficiency (%):** Efficiency of the storage. +- **Initial Level (%):** Initial level of the storage. +- **Initial Level Optimized:** Indicates whether the initial level of the storage is optimized. + +The **Total** row displays the sum of the values in the **Withdrawal** and **Injection** columns. + + +## Configuration Form + +Click on the name of a storage to open the properties form. + +![08-st-storages.form.png](../../../assets/media/user-guide/study/areas/08-st-storages.form.png) + +You will find the following elements: + +- Click on the "Return" link to go back to the list of storages. +- Modify the values and click "Save" to confirm the changes. +- Use the "↶" buttons to undo changes and "↷" to redo them, confirm the modification with "Save." + +The detailed configuration is available in the [Antares Simulator documentation](https://antares-simulator.readthedocs.io/en/latest/reference-guide/13-file-format/#short-term-storage_1). + +## Time Series Matrices + +In the tabs, you will find time series matrices composed of 8760 rows (hourly for a simulation year). + +![08-st-storages.series.png](../../../assets/media/user-guide/study/areas/08-st-storages.series.png) + +The available commands are: + +- **Assign a Matrix:** Search and assign a matrix from the matrix store to short-term storage. +- **Import:** Drag and drop a TSV file to update the time series matrices. +- **Export:** Download the current TSV file using the "Export" button. + +You can edit a cell and confirm with the "Enter" key. You can also edit a group of cells or an entire column and confirm with the "Ctrl+Enter" key combination. + +The detailed configuration is available in the [Antares Simulator documentation](https://antares-simulator.readthedocs.io/en/latest/reference-guide/13-file-format/#short-term-storage_1). diff --git a/docs/user-guide/study/areas/09-reserves.md b/docs/user-guide/study/areas/09-reserves.md new file mode 100644 index 0000000000..421e3a69e7 --- /dev/null +++ b/docs/user-guide/study/areas/09-reserves.md @@ -0,0 +1,34 @@ +# Reserves Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring Reserves in the Antares Web application. + +To access the configuration of Reserves: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "RESERVES" tab to access the page dedicated to reserves. + +![09-reserves.tab.png](../../../assets/media/user-guide/study/areas/09-reserves.tab.png) + +## Time Series Matrices + +In the tabs, you will find time series matrices composed of 8760 rows (hourly for a simulation year). + +![09-reserves.series.png](../../../assets/media/user-guide/study/areas/09-reserves.series.png) + +The available commands are: + +- **Assign a Matrix:** Search and assign a matrix from the matrix store to reserves. +- **Import:** Drag and drop a TSV file to update the time series matrices. +- **Export:** Download the current TSV file using the "Export" button. + +You can edit a cell and confirm with the "Enter" key. +You can also edit a group of cells or an entire column and confirm with the "Ctrl+Enter" key combination. + +The detailed configuration is available in the [Antares Simulator documentation](https://antares-simulator.readthedocs.io/en/latest/reference-guide/04-active_windows/#reserves-dsm). + +Explore these features to customize the ST Storage settings according to the specific needs of your study. diff --git a/docs/user-guide/study/areas/10-misc-gen.md b/docs/user-guide/study/areas/10-misc-gen.md new file mode 100644 index 0000000000..f3689f1e18 --- /dev/null +++ b/docs/user-guide/study/areas/10-misc-gen.md @@ -0,0 +1,34 @@ +# Miscellaneous Generators Configuration + +[⬅ Area Configuration](../02-areas.md) + +## Introduction + +This documentation is dedicated to configuring miscellaneous generators (Misc. Gen.) in the Antares Web application. + +To access the configuration of Misc. Gen.: + +1. From the "Study" view, click on the "MODELIZATION" tab. +2. Click on the "AREAS" tab, then choose an area from the sidebar. +3. Next, click on the "MISC. GEN." tab to access the page dedicated to miscellaneous generators. + +![10-misc-gen.tab.png](../../../assets/media/user-guide/study/areas/10-misc-gen.tab.png) + +## Time Series Matrices + +In the tabs, you will find time series matrices composed of 8760 rows (hourly for a simulation year). + +![10-misc-gen.series.png](../../../assets/media/user-guide/study/areas/10-misc-gen.series.png) + +The available commands are: + +- **Assign a Matrix:** Search and assign a matrix from the matrix store to the miscellaneous generators. +- **Import:** Drag and drop a TSV file to update the time series matrices. +- **Export:** Download the current TSV file using the "Export" button. + +You can edit a cell and confirm with the "Enter" key. +You can also edit a group of cells or an entire column and confirm with the "Ctrl+Enter" key combination. + +The detailed configuration is available in the [Antares Simulator documentation](https://antares-simulator.readthedocs.io/en/latest/reference-guide/04-active_windows/#misc-gen). + +Explore these features to customize the ST Storage settings according to the specific needs of your study. diff --git a/examples/studies/STA-mini.zip b/examples/studies/STA-mini.zip index 0de2e32809..1df2db61f8 100644 Binary files a/examples/studies/STA-mini.zip and b/examples/studies/STA-mini.zip differ diff --git a/mkdocs.yml b/mkdocs.yml index 48558e8f0e..6a0c971cb6 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -28,11 +28,35 @@ theme: name: Switch to light mode nav: - - Home: index.md + - Home: 'index.md' - 'User guide': - 'Introduction': 'user-guide/0-introduction.md' - 'User interface': 'user-guide/1-interface.md' - - 'Variant manager': 'user-guide/2-variant_manager.md' + - 'Study Configuration': + - 'Main Topics': 'user-guide/2-study.md' + - 'Map': + - 'Main Topics': 'user-guide/study/01-map.md' + - 'Area Configuration': + - 'Main Topics': 'user-guide/study/02-areas.md' + - 'General Properties': 'user-guide/study/areas/01-properties.md' + - 'Load': 'user-guide/study/areas/02-load.md' + - 'Thermal Clusters': 'user-guide/study/areas/03-thermals.md' + - 'Renewables Clusters': 'user-guide/study/areas/04-renewables.md' + - 'Hydraulic Generators': 'user-guide/study/areas/05-hydro.md' + - 'Wind Generators': 'user-guide/study/areas/06-wind.md' + - 'Solar Generators': 'user-guide/study/areas/07-solar.md' + - 'Short-Term Storages': 'user-guide/study/areas/08-st-storages.md' + - 'Reserves': 'user-guide/study/areas/09-reserves.md' + - 'Miscellaneous Generators': 'user-guide/study/areas/10-misc-gen.md' + - 'Links': + - 'Main Topics': 'user-guide/study/03-links.md' + - 'Binding Constraints': + - 'Main Topics': 'user-guide/study/04-binding-constraints.md' + - 'Debug': + - 'Main Topics': 'user-guide/study/05-debug.md' + - 'Table Mode': + - 'Main Topics': 'user-guide/study/06-table-mode.md' + - 'Variant manager': 'user-guide/3-variant_manager.md' - 'How to': - 'Import a study': 'how-to/studies-import.md' - 'Upgrade a study': 'how-to/studies-upgrade.md' @@ -69,8 +93,8 @@ markdown_extensions: permalink: true toc_depth: 3 - pymdownx.emoji: - emoji_index: !!python/name:materialx.emoji.twemoji - emoji_generator: !!python/name:materialx.emoji.to_svg + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg - admonition - pymdownx.details - pymdownx.superfences diff --git a/pyproject.toml b/pyproject.toml index c426bb8f0f..a715fb2753 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,3 +21,77 @@ extend_skip_glob = [ "scripts/*", "webapp/*", ] + +[tool.ruff] +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", +] + +# Same as Black. +line-length = 120 +indent-width = 4 + +# Assume Python 3.8 +target-version = "py38" + +[tool.ruff.lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or +# McCabe complexity (`C901`) by default. +select = ["E4", "E7", "E9", "F"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = true + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" diff --git a/requirements-doc.txt b/requirements-doc.txt index 9a8a4ca477..f4214a10d6 100644 --- a/requirements-doc.txt +++ b/requirements-doc.txt @@ -1,2 +1,4 @@ mkdocs mkdocs-material +mkdocs-material-extensions +mike # deploy several mkdocs site versions to github pages diff --git a/requirements-test.txt b/requirements-test.txt index c752146feb..10e44592a1 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,5 +1,4 @@ -r requirements.txt checksumdir~=1.2.0 -locust~=2.7.0 pytest~=6.2.5 pytest-cov~=4.0.0 \ No newline at end of file diff --git a/scripts/rollback.sh b/scripts/rollback.sh index 8d353593fb..822bb7507c 100755 --- a/scripts/rollback.sh +++ b/scripts/rollback.sh @@ -12,5 +12,5 @@ CUR_DIR=$(cd "$(dirname "$0")" && pwd) BASE_DIR=$(dirname "$CUR_DIR") cd "$BASE_DIR" -alembic downgrade e65e0c04606b +alembic downgrade d495746853cc cd - diff --git a/setup.py b/setup.py index 1760ecac46..db663998a7 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name="AntaREST", - version="2.16.1", + version="2.16.2", description="Antares Server", long_description=Path("README.md").read_text(encoding="utf-8"), long_description_content_type="text/markdown", diff --git a/sonar-project.properties b/sonar-project.properties index 69dd022476..770a971f0c 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -6,5 +6,5 @@ sonar.exclusions=antarest/gui.py,antarest/main.py sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3.8 sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info -sonar.projectVersion=2.16.1 +sonar.projectVersion=2.16.2 sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/* \ No newline at end of file diff --git a/antarest/study/storage/rawstudy/io/__init__.py b/tests/core/tasks/__init__.py similarity index 100% rename from antarest/study/storage/rawstudy/io/__init__.py rename to tests/core/tasks/__init__.py diff --git a/tests/core/tasks/test_model.py b/tests/core/tasks/test_model.py new file mode 100644 index 0000000000..763aa36db8 --- /dev/null +++ b/tests/core/tasks/test_model.py @@ -0,0 +1,86 @@ +import uuid + +import pytest +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session + +from antarest.core.tasks.model import TaskJob, TaskJobLog +from antarest.login.model import Password, User +from antarest.study.model import RawStudy + + +class TestTaskJob: + def test_database_constraints(self, db_session: Session) -> None: + # Data insertion example + with db_session: + task_job = TaskJob(id=str(uuid.uuid4()), name="TaskJob 1") + log1 = TaskJobLog(job=task_job, message="'message 1'") + log2 = TaskJobLog(job=task_job, message="'message 2'") + db_session.add_all([task_job, log1, log2]) + db_session.commit() + + # Check that a TaskJobLog cannot be inserted without a TaskJob + with db_session: + with pytest.raises(IntegrityError, match="NOT NULL constraint failed"): + db_session.add(TaskJobLog(message="'message 3'")) + db_session.commit() + + # Delete a TaskJob object (the corresponding TaskJobLog will be deleted in cascade) + with db_session: + db_session.delete(task_job) + db_session.commit() + + # Check that a TaskJob and its TaskJobLog are deleted in cascade + with db_session: + assert db_session.query(TaskJob).count() == 0 + assert db_session.query(TaskJobLog).count() == 0 + + def test_owner_constraints(self, db_session: Session) -> None: + # Insert a user and attach several TaskJob objects to him + with db_session: + db_session.add(User(id=0o007, name="James Bond", password=Password("007"))) + db_session.commit() + + with db_session: + task_job_1 = TaskJob(id=str(uuid.uuid4()), name="TaskJob 1", owner_id=0o007) + task_job_2 = TaskJob(id=str(uuid.uuid4()), name="TaskJob 2", owner_id=0o007) + task_job_3 = TaskJob(id=str(uuid.uuid4()), name="TaskJob 3", owner_id=0o007) + db_session.add_all([task_job_1, task_job_2, task_job_3]) + db_session.commit() + + # Delete a User object (the corresponding TaskJob will not be deleted in cascade) + # Instead, the owner_id will be set to NULL + with db_session: + user = db_session.query(User).first() + db_session.delete(user) + db_session.commit() + + # Check that the owner_id of the TaskJob objects has been set to NULL + with db_session: + assert db_session.query(TaskJob).filter(TaskJob.owner_id == 0o007).count() == 0 + # noinspection PyUnresolvedReferences + assert db_session.query(TaskJob).filter(TaskJob.owner_id.is_(None)).count() == 3 + + def test_study_constraints(self, db_session: Session) -> None: + # Insert a Study object and attach several TaskJob objects to it + with db_session: + study_id = str(uuid.uuid4()) + db_session.add(RawStudy(id=study_id, name="Study 1")) + db_session.commit() + + with db_session: + task_job_1 = TaskJob(id=str(uuid.uuid4()), name="TaskJob 1", ref_id=study_id) + task_job_2 = TaskJob(id=str(uuid.uuid4()), name="TaskJob 2", ref_id=study_id) + task_job_3 = TaskJob(id=str(uuid.uuid4()), name="TaskJob 3", ref_id=study_id) + db_session.add_all([task_job_1, task_job_2, task_job_3]) + db_session.commit() + + # Delete a Study object (the corresponding TaskJob must be deleted in cascade) + with db_session: + study = db_session.query(RawStudy).first() + db_session.delete(study) + db_session.commit() + + # Check that the owner_id of the TaskJob objects has been set to NULL + with db_session: + assert db_session.query(TaskJob).count() == 0 diff --git a/tests/core/test_tasks.py b/tests/core/test_tasks.py index cc730bf0ea..7edc25ec6f 100644 --- a/tests/core/test_tasks.py +++ b/tests/core/test_tasks.py @@ -232,7 +232,7 @@ def test_repository(db_session: Session) -> None: assert new_task.owner_id == user1_id assert new_task.creation_date >= now - second_task = TaskJob(owner_id=user2_id, ref_id=study_id) + second_task = TaskJob(name="bar", owner_id=user2_id, ref_id=study_id) second_task = task_job_repo.save(second_task) result = task_job_repo.list(TaskListFilter(type=[TaskType.COPY])) @@ -296,8 +296,8 @@ def test_repository(db_session: Session) -> None: def test_cancel(core_config: Config, event_bus: IEventBus) -> None: # Create a TaskJobService and add tasks task_job_repo = TaskJobRepository() - task_job_repo.save(TaskJob(id="a")) - task_job_repo.save(TaskJob(id="b")) + task_job_repo.save(TaskJob(id="a", name="foo")) + task_job_repo.save(TaskJob(id="b", name="foo")) # Create a TaskJobService service = TaskJobService(config=core_config, repository=task_job_repo, event_bus=event_bus) @@ -361,6 +361,7 @@ def test_cancel_orphan_tasks( completion_date: datetime.datetime = datetime.datetime.utcnow() task_job = TaskJob( id=test_id, + name="test", status=status, result_status=result_status, result_msg=result_msg, diff --git a/tests/integration/assets/STA-mini.7z b/tests/integration/assets/STA-mini.7z index 6f462f7700..10a4d3fdbf 100644 Binary files a/tests/integration/assets/STA-mini.7z and b/tests/integration/assets/STA-mini.7z differ diff --git a/tests/integration/assets/STA-mini.zip b/tests/integration/assets/STA-mini.zip index 0de2e32809..f9ce08baf6 100644 Binary files a/tests/integration/assets/STA-mini.zip and b/tests/integration/assets/STA-mini.zip differ diff --git a/tests/integration/studies_blueprint/test_disk_usage.py b/tests/integration/studies_blueprint/test_disk_usage.py new file mode 100644 index 0000000000..89d3b70e74 --- /dev/null +++ b/tests/integration/studies_blueprint/test_disk_usage.py @@ -0,0 +1,23 @@ +from starlette.testclient import TestClient + + +class TestDiskUsage: + def test_disk_usage_endpoint( + self, + client: TestClient, + user_access_token: str, + study_id: str, + ) -> None: + """ + Verify the functionality of the disk usage endpoint: + + - Ensure a successful response is received. + - Confirm that the JSON response is an integer which represent a (big enough) directory size. + """ + res = client.get( + f"/v1/studies/{study_id}/disk-usage", + headers={"Authorization": f"Bearer {user_access_token}"}, + ) + assert res.status_code == 200, res.json() + disk_usage = res.json() # currently: 7.47 Mio on Ubuntu + assert 7 * 1024 * 1024 < disk_usage < 8 * 1024 * 1024 diff --git a/tests/integration/studies_blueprint/test_study_matrix_index.py b/tests/integration/studies_blueprint/test_study_matrix_index.py new file mode 100644 index 0000000000..69880cb357 --- /dev/null +++ b/tests/integration/studies_blueprint/test_study_matrix_index.py @@ -0,0 +1,100 @@ +from starlette.testclient import TestClient + + +class TestStudyMatrixIndex: + """ + The goal of this test is to check that the API allows to retrieve + information about the data matrices of a study. + + The values are used byt the frontend to display the time series + with the right time column. + """ + + def test_get_study_matrix_index( + self, + client: TestClient, + user_access_token: str, + study_id: str, + ) -> None: + user_access_token = {"Authorization": f"Bearer {user_access_token}"} + + # Check the matrix index for Thermal clusters + # =========================================== + + # Check the Common matrix index + res = client.get( + f"/v1/studies/{study_id}/matrixindex", + headers=user_access_token, + params={"path": "input/thermal/prepro/fr/01_solar/modulation"}, + ) + assert res.status_code == 200, res.json() + actual = res.json() + # We expect to have an "hourly" time series with 8760 hours + expected = { + "first_week_size": 7, + "level": "hourly", + "start_date": "2001-01-01 00:00:00", + "steps": 8760, + } + assert actual == expected + + # Check the TS Generator matrix index + res = client.get( + f"/v1/studies/{study_id}/matrixindex", + headers=user_access_token, + params={"path": "input/thermal/prepro/fr/01_solar/data"}, + ) + assert res.status_code == 200, res.json() + actual = res.json() + # We expect to have a "daily" time series with 365 days + expected = { + "first_week_size": 7, + "level": "daily", + "start_date": "2001-01-01 00:00:00", + "steps": 365, + } + assert actual == expected + + # Check the time series + res = client.get( + f"/v1/studies/{study_id}/matrixindex", + headers=user_access_token, + params={"path": "input/thermal/series/fr/01_solar/series"}, + ) + assert res.status_code == 200, res.json() + actual = res.json() + # We expect to have an "hourly" time series with 8760 hours + expected = { + "first_week_size": 7, + "level": "hourly", + "start_date": "2001-01-01 00:00:00", + "steps": 8760, + } + assert actual == expected + + # Check the default matrix index + # ============================== + + res = client.get(f"/v1/studies/{study_id}/matrixindex", headers=user_access_token) + assert res.status_code == 200 + actual = res.json() + expected = { + "first_week_size": 7, + "start_date": "2001-01-01 00:00:00", + "steps": 8760, + "level": "hourly", + } + assert actual == expected + + # Check the matrix index of a daily time series stored in the output folder + # ========================================================================= + + res = client.get( + f"/v1/studies/{study_id}/matrixindex", + headers=user_access_token, + params={"path": "output/20201014-1427eco/economy/mc-all/areas/es/details-daily"}, + ) + assert res.status_code == 200 + actual = res.json() + expected = {"first_week_size": 7, "start_date": "2001-01-01 00:00:00", "steps": 7, "level": "daily"} + assert actual == expected diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py new file mode 100644 index 0000000000..f0f08049d3 --- /dev/null +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -0,0 +1,310 @@ +import pytest +from starlette.testclient import TestClient + + +@pytest.mark.unit_test +class TestSTStorage: + """ + Test the end points related to binding constraints. + """ + + def test_lifecycle__nominal(self, client: TestClient, user_access_token: str) -> None: + user_headers = {"Authorization": f"Bearer {user_access_token}"} + + res = client.post( + "/v1/studies", + headers=user_headers, + params={"name": "foo"}, + ) + assert res.status_code == 201, res.json() + study_id = res.json() + + area1_name = "area1" + area2_name = "area2" + res = client.post( + f"/v1/studies/{study_id}/areas", + headers=user_headers, + json={ + "name": area1_name, + "type": "AREA", + "metadata": {"country": "FR"}, + }, + ) + assert res.status_code == 200, res.json() + + res = client.post( + f"/v1/studies/{study_id}/areas", + headers=user_headers, + json={ + "name": area2_name, + "type": "AREA", + "metadata": {"country": "DE"}, + }, + ) + assert res.status_code == 200, res.json() + + res = client.post( + f"/v1/studies/{study_id}/links", + headers=user_headers, + json={ + "area1": area1_name, + "area2": area2_name, + }, + ) + assert res.status_code == 200, res.json() + + # Create Variant + res = client.post( + f"/v1/studies/{study_id}/variants", + headers=user_headers, + params={"name": "Variant 1"}, + ) + assert res.status_code == 200, res.json() + variant_id = res.json() + + # Create Binding constraints + res = client.post( + f"/v1/studies/{variant_id}/commands", + json=[ + { + "action": "create_binding_constraint", + "args": { + "name": "binding_constraint_1", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "coeffs": {}, + "comments": "", + }, + } + ], + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + res = client.post( + f"/v1/studies/{variant_id}/commands", + json=[ + { + "action": "create_binding_constraint", + "args": { + "name": "binding_constraint_2", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "coeffs": {}, + "comments": "", + }, + } + ], + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + # Get Binding Constraint list + res = client.get(f"/v1/studies/{variant_id}/bindingconstraints", headers=user_headers) + binding_constraints_list = res.json() + assert res.status_code == 200, res.json() + assert len(binding_constraints_list) == 2 + assert binding_constraints_list[0]["id"] == "binding_constraint_1" + assert binding_constraints_list[1]["id"] == "binding_constraint_2" + + bc_id = binding_constraints_list[0]["id"] + + # Update element of Binding constraint + new_comment = "We made it !" + res = client.put( + f"v1/studies/{variant_id}/bindingconstraints/{bc_id}", + json={"key": "comments", "value": new_comment}, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + # Get Binding Constraint + res = client.get( + f"/v1/studies/{variant_id}/bindingconstraints/{bc_id}", + headers=user_headers, + ) + binding_constraint = res.json() + comments = binding_constraint["comments"] + assert res.status_code == 200, res.json() + assert comments == new_comment + + # Add Constraint term + area1_name = "area1" + area2_name = "area2" + + res = client.post( + f"/v1/studies/{variant_id}/bindingconstraints/{bc_id}/term", + json={ + "weight": 1, + "offset": 2, + "data": {"area1": area1_name, "area2": area2_name}, + }, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + # Get Binding Constraint + res = client.get( + f"/v1/studies/{variant_id}/bindingconstraints/{bc_id}", + headers=user_headers, + ) + binding_constraint = res.json() + constraints = binding_constraint["constraints"] + assert res.status_code == 200, res.json() + assert binding_constraint["id"] == bc_id + assert len(constraints) == 1 + assert constraints[0]["id"] == f"{area1_name}%{area2_name}" + assert constraints[0]["weight"] == 1 + assert constraints[0]["offset"] == 2 + assert constraints[0]["data"]["area1"] == area1_name + assert constraints[0]["data"]["area2"] == area2_name + + # Update Constraint term + res = client.put( + f"/v1/studies/{variant_id}/bindingconstraints/{bc_id}/term", + json={ + "id": f"{area1_name}%{area2_name}", + "weight": 3, + }, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + # Get Binding Constraint + res = client.get( + f"/v1/studies/{variant_id}/bindingconstraints/{bc_id}", + headers=user_headers, + ) + binding_constraint = res.json() + constraints = binding_constraint["constraints"] + assert res.status_code == 200, res.json() + assert binding_constraint["id"] == bc_id + assert len(constraints) == 1 + assert constraints[0]["id"] == f"{area1_name}%{area2_name}" + assert constraints[0]["weight"] == 3 + assert constraints[0]["offset"] is None + assert constraints[0]["data"]["area1"] == area1_name + assert constraints[0]["data"]["area2"] == area2_name + + # Remove Constraint term + res = client.delete( + f"/v1/studies/{variant_id}/bindingconstraints/{bc_id}/term/{area1_name}%{area2_name}", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + # Get Binding Constraint + res = client.get( + f"/v1/studies/{variant_id}/bindingconstraints/{bc_id}", + headers=user_headers, + ) + binding_constraint = res.json() + constraints = binding_constraint["constraints"] + assert res.status_code == 200, res.json() + assert constraints is None + + # Creates a binding constraint with the new API + res = client.post( + f"/v1/studies/{variant_id}/bindingconstraints", + json={ + "name": "binding_constraint_3", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "coeffs": {}, + "comments": "New API", + }, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + # Asserts that creating 2 binding constraints with the same name raises an Exception + res = client.post( + f"/v1/studies/{variant_id}/bindingconstraints", + json={ + "name": "binding_constraint_3", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "coeffs": {}, + "comments": "New API", + }, + headers=user_headers, + ) + assert res.status_code == 409, res.json() + assert res.json() == { + "description": "A binding constraint with the same name already exists: binding_constraint_3.", + "exception": "DuplicateConstraintName", + } + + # Assert empty name + res = client.post( + f"/v1/studies/{variant_id}/bindingconstraints", + json={ + "name": " ", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "coeffs": {}, + "comments": "New API", + }, + headers=user_headers, + ) + assert res.status_code == 400, res.json() + assert res.json() == { + "description": "Invalid binding constraint name: .", + "exception": "InvalidConstraintName", + } + + # Assert invalid special characters + res = client.post( + f"/v1/studies/{variant_id}/bindingconstraints", + json={ + "name": "%%**", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "coeffs": {}, + "comments": "New API", + }, + headers=user_headers, + ) + assert res.status_code == 400, res.json() + assert res.json() == { + "description": "Invalid binding constraint name: %%**.", + "exception": "InvalidConstraintName", + } + + # Asserts that only 3 binding constraints have been created + res = client.get(f"/v1/studies/{variant_id}/bindingconstraints", headers=user_headers) + assert res.status_code == 200, res.json() + assert len(res.json()) == 3 + + # The user change the time_step to daily instead of hourly. + # We must check that the matrix is a daily/weekly matrix. + res = client.put( + f"/v1/studies/{variant_id}/bindingconstraints/{bc_id}", + json={"key": "time_step", "value": "daily"}, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + # Check the last command is a change time_step + res = client.get(f"/v1/studies/{variant_id}/commands", headers=user_headers) + commands = res.json() + args = commands[-1]["args"] + assert args["time_step"] == "daily" + assert args["values"] is not None, "We should have a matrix ID (sha256)" + + # Check that the matrix is a daily/weekly matrix + res = client.get( + f"/v1/studies/{variant_id}/raw", + params={"path": f"input/bindingconstraints/{bc_id}", "depth": 1, "formatted": True}, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + dataframe = res.json() + assert len(dataframe["index"]) == 366 + assert len(dataframe["columns"]) == 3 # less, equal, greater diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 1e9fd99caa..ed4f1ed8af 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -20,11 +20,10 @@ FIELDS_INFO_BY_TYPE, AssetType, BindingConstraintOperator, - BindingConstraintType, TableTemplateType, TransmissionCapacity, ) -from antarest.study.model import MatrixIndex, StudyDownloadLevelDTO +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.renewable import RenewableClusterGroup from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, TimeSeriesGenerationOption from antarest.study.storage.variantstudy.model.command.common import CommandName @@ -92,7 +91,6 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non ) assert len(res.json()) == 1 study_id = next(iter(res.json())) - comments = "Hello" res = client.get( f"/v1/studies/{study_id}/outputs", @@ -172,37 +170,6 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non ) assert res.status_code == 200 - # study matrix index - res = client.get( - f"/v1/studies/{study_id}/matrixindex", - headers={"Authorization": f'Bearer {george_credentials["access_token"]}'}, - ) - assert res.status_code == 200 - assert ( - res.json() - == MatrixIndex( - first_week_size=7, - start_date="2001-01-01 00:00:00", - steps=8760, - level=StudyDownloadLevelDTO.HOURLY, - ).dict() - ) - - res = client.get( - f"/v1/studies/{study_id}/matrixindex?path=output/20201014-1427eco/economy/mc-all/areas/es/details-daily", - headers={"Authorization": f'Bearer {george_credentials["access_token"]}'}, - ) - assert res.status_code == 200 - assert ( - res.json() - == MatrixIndex( - first_week_size=7, - start_date="2001-01-01 00:00:00", - steps=7, - level=StudyDownloadLevelDTO.DAILY, - ).dict() - ) - res = client.delete( f"/v1/studies/{study_id}/outputs/20201014-1427eco", headers={"Authorization": f'Bearer {george_credentials["access_token"]}'}, @@ -543,7 +510,7 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: "args": { "name": "binding constraint 1", "enabled": True, - "time_step": BindingConstraintType.HOURLY.value, + "time_step": BindingConstraintFrequency.HOURLY.value, "operator": BindingConstraintOperator.LESS.value, "coeffs": {"area 1.cluster 1": [2.0, 4]}, }, @@ -561,7 +528,7 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: "args": { "name": "binding constraint 2", "enabled": True, - "time_step": BindingConstraintType.HOURLY.value, + "time_step": BindingConstraintFrequency.HOURLY.value, "operator": BindingConstraintOperator.LESS.value, "coeffs": {}, }, @@ -1304,7 +1271,7 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: # --- TableMode START --- - table_mode_url = f"/v1/studies/{study_id}/tablemode/form" + table_mode_url = f"/v1/studies/{study_id}/tablemode" # Table Mode - Area @@ -1684,12 +1651,12 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: assert res_table_data_json == { "binding constraint 1": { "enabled": True, - "type": BindingConstraintType.HOURLY.value, + "type": BindingConstraintFrequency.HOURLY.value, "operator": BindingConstraintOperator.LESS.value, }, "binding constraint 2": { "enabled": True, - "type": BindingConstraintType.HOURLY.value, + "type": BindingConstraintFrequency.HOURLY.value, "operator": BindingConstraintOperator.LESS.value, }, } @@ -1706,7 +1673,7 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: "operator": BindingConstraintOperator.BOTH.value, }, "binding constraint 2": { - "type": BindingConstraintType.WEEKLY.value, + "type": BindingConstraintFrequency.WEEKLY.value, "operator": BindingConstraintOperator.EQUAL.value, }, }, @@ -1723,12 +1690,12 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: assert res_table_data_json == { "binding constraint 1": { "enabled": False, - "type": BindingConstraintType.HOURLY.value, + "type": BindingConstraintFrequency.HOURLY.value, "operator": BindingConstraintOperator.BOTH.value, }, "binding constraint 2": { "enabled": True, - "type": BindingConstraintType.WEEKLY.value, + "type": BindingConstraintFrequency.WEEKLY.value, "operator": BindingConstraintOperator.EQUAL.value, }, } @@ -2008,230 +1975,6 @@ def set_maintenance(value: bool) -> None: assert res.json() == message -def test_binding_constraint_manager(client: TestClient, admin_access_token: str, study_id: str) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} - - created = client.post("/v1/studies?name=foo", headers=admin_headers) - study_id = created.json() - - area1_name = "area1" - area2_name = "area2" - res = client.post( - f"/v1/studies/{study_id}/areas", - headers=admin_headers, - json={ - "name": area1_name, - "type": AreaType.AREA.value, - "metadata": {"country": "FR"}, - }, - ) - assert res.status_code == 200 - - res = client.post( - f"/v1/studies/{study_id}/areas", - headers=admin_headers, - json={ - "name": area2_name, - "type": AreaType.AREA.value, - "metadata": {"country": "DE"}, - }, - ) - assert res.status_code == 200 - - res = client.post( - f"/v1/studies/{study_id}/links", - headers=admin_headers, - json={ - "area1": area1_name, - "area2": area2_name, - }, - ) - assert res.status_code == 200 - - # Create Variant - res = client.post(f"/v1/studies/{study_id}/variants?name=foo", headers=admin_headers) - variant_id = res.json() - - # Create Binding constraints - res = client.post( - f"/v1/studies/{variant_id}/commands", - json=[ - { - "action": "create_binding_constraint", - "args": { - "name": "binding_constraint_1", - "enabled": True, - "time_step": "hourly", - "operator": "less", - "coeffs": {}, - "comments": "", - }, - } - ], - headers=admin_headers, - ) - assert res.status_code == 200 - - res = client.post( - f"/v1/studies/{variant_id}/commands", - json=[ - { - "action": "create_binding_constraint", - "args": { - "name": "binding_constraint_2", - "enabled": True, - "time_step": "hourly", - "operator": "less", - "coeffs": {}, - "comments": "", - }, - } - ], - headers=admin_headers, - ) - assert res.status_code == 200 - - # Get Binding Constraint list - res = client.get(f"/v1/studies/{variant_id}/bindingconstraints", headers=admin_headers) - binding_constraints_list = res.json() - assert res.status_code == 200 - assert len(binding_constraints_list) == 2 - assert binding_constraints_list[0]["id"] == "binding_constraint_1" - assert binding_constraints_list[1]["id"] == "binding_constraint_2" - - binding_constraint_id = binding_constraints_list[0]["id"] - - # Update element of Binding constraint - new_comment = "We made it !" - res = client.put( - f"v1/studies/{variant_id}/bindingconstraints/{binding_constraint_id}", - json={"key": "comments", "value": new_comment}, - headers=admin_headers, - ) - assert res.status_code == 200 - - # Get Binding Constraint - res = client.get( - f"/v1/studies/{variant_id}/bindingconstraints/{binding_constraint_id}", - headers=admin_headers, - ) - binding_constraint = res.json() - comments = binding_constraint["comments"] - assert res.status_code == 200 - assert comments == new_comment - - # Add Constraint term - res = client.post( - f"/v1/studies/{variant_id}/bindingconstraints/{binding_constraint_id}/term", - json={ - "weight": 1, - "offset": 2, - "data": {"area1": area1_name, "area2": area2_name}, - }, - headers=admin_headers, - ) - assert res.status_code == 200 - - # Get Binding Constraint - res = client.get( - f"/v1/studies/{variant_id}/bindingconstraints/{binding_constraint_id}", - headers=admin_headers, - ) - binding_constraint = res.json() - constraints = binding_constraint["constraints"] - assert res.status_code == 200 - assert binding_constraint["id"] == binding_constraint_id - assert len(constraints) == 1 - assert constraints[0]["id"] == f"{area1_name}%{area2_name}" - assert constraints[0]["weight"] == 1 - assert constraints[0]["offset"] == 2 - assert constraints[0]["data"]["area1"] == area1_name - assert constraints[0]["data"]["area2"] == area2_name - - # Update Constraint term - res = client.put( - f"/v1/studies/{variant_id}/bindingconstraints/{binding_constraint_id}/term", - json={ - "id": f"{area1_name}%{area2_name}", - "weight": 3, - }, - headers=admin_headers, - ) - assert res.status_code == 200 - - # Get Binding Constraint - res = client.get( - f"/v1/studies/{variant_id}/bindingconstraints/{binding_constraint_id}", - headers=admin_headers, - ) - binding_constraint = res.json() - constraints = binding_constraint["constraints"] - assert res.status_code == 200 - assert binding_constraint["id"] == binding_constraint_id - assert len(constraints) == 1 - assert constraints[0]["id"] == f"{area1_name}%{area2_name}" - assert constraints[0]["weight"] == 3 - assert constraints[0]["offset"] is None - assert constraints[0]["data"]["area1"] == area1_name - assert constraints[0]["data"]["area2"] == area2_name - - # Remove Constraint term - res = client.delete( - f"/v1/studies/{variant_id}/bindingconstraints/{binding_constraint_id}/term/{area1_name}%{area2_name}", - headers=admin_headers, - ) - assert res.status_code == 200 - - # Get Binding Constraint - res = client.get( - f"/v1/studies/{variant_id}/bindingconstraints/{binding_constraint_id}", - headers=admin_headers, - ) - binding_constraint = res.json() - constraints = binding_constraint["constraints"] - assert res.status_code == 200 - assert constraints is None - - # Creates a binding constraint with the new API - res = client.post( - f"/v1/studies/{variant_id}/bindingconstraints", - json={ - "name": "binding_constraint_3", - "enabled": True, - "time_step": "hourly", - "operator": "less", - "coeffs": {}, - "comments": "New API", - }, - headers=admin_headers, - ) - assert res.status_code == 200 - - # Asserts that creating 2 binding constraints with the same name raises an Exception - res = client.post( - f"/v1/studies/{variant_id}/bindingconstraints", - json={ - "name": "binding_constraint_3", - "enabled": True, - "time_step": "hourly", - "operator": "less", - "coeffs": {}, - "comments": "New API", - }, - headers=admin_headers, - ) - assert res.status_code == 409 - assert res.json() == { - "description": "A binding constraint with the same name already exists: binding_constraint_3.", - "exception": "DuplicateConstraintName", - } - - # Asserts that only 3 binding constraint have been created - res = client.get(f"/v1/studies/{variant_id}/bindingconstraints", headers=admin_headers) - assert res.status_code == 200 - assert len(res.json()) == 3 - - def test_import(client: TestClient, admin_access_token: str, study_id: str) -> None: admin_headers = {"Authorization": f"Bearer {admin_access_token}"} diff --git a/tests/integration/test_integration_variantmanager_tool.py b/tests/integration/test_integration_variantmanager_tool.py index 0c53a03ffb..f381cab3c9 100644 --- a/tests/integration/test_integration_variantmanager_tool.py +++ b/tests/integration/test_integration_variantmanager_tool.py @@ -9,7 +9,7 @@ from fastapi import FastAPI from starlette.testclient import TestClient -from antarest.study.storage.rawstudy.io.reader import IniReader, MultipleSameKeysIniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.variantstudy.model.command.common import CommandName from antarest.study.storage.variantstudy.model.model import CommandDTO, GenerationResultInfoDTO from antarest.tools.lib import ( @@ -178,8 +178,8 @@ def test_parse_commands(tmp_path: str, app: FastAPI) -> None: elif item_relpath in fixed_8_cols_empty_items: assert (generated_study_path / item_relpath).read_text() == fixed_8_columns_empty_data elif file_path.suffix == ".ini": - actual = MultipleSameKeysIniReader().read(study_path / item_relpath) - expected = MultipleSameKeysIniReader().read(generated_study_path / item_relpath) + actual = IniReader().read(study_path / item_relpath) + expected = IniReader().read(generated_study_path / item_relpath) assert actual == expected, f"Invalid configuration: '{item_relpath}'" else: actual = (study_path / item_relpath).read_text() @@ -219,8 +219,8 @@ def test_diff_local(tmp_path: Path) -> None: continue item_relpath = file_path.relative_to(variant_study_path).as_posix() if file_path.suffix == ".ini": - actual = MultipleSameKeysIniReader().read(variant_study_path / item_relpath) - expected = MultipleSameKeysIniReader().read(output_study_path / item_relpath) + actual = IniReader().read(variant_study_path / item_relpath) + expected = IniReader().read(output_study_path / item_relpath) assert actual == expected, f"Invalid configuration: '{item_relpath}'" else: actual = (variant_study_path / item_relpath).read_text() diff --git a/tests/integration/test_integration_xpansion.py b/tests/integration/test_integration_xpansion.py deleted file mode 100644 index fa4de2e0af..0000000000 --- a/tests/integration/test_integration_xpansion.py +++ /dev/null @@ -1,405 +0,0 @@ -import io -from pathlib import Path - -from starlette.testclient import TestClient - -from antarest.study.business.area_management import AreaType -from antarest.study.business.xpansion_management import XpansionCandidateDTO - - -def test_integration_xpansion(client: TestClient, tmp_path: Path, admin_access_token: str): - headers = {"Authorization": f"Bearer {admin_access_token}"} - - created = client.post( - "/v1/studies?name=foo", - headers=headers, - ) - study_id = created.json() - - xpansion_base_url = f"/v1/studies/{study_id}/extensions/xpansion" - - filename_constraints1 = "filename_constraints1.txt" - filename_constraints2 = "filename_constraints2.txt" - filename_constraints3 = "filename_constraints3.txt" - content_constraints1 = "content_constraints1\n" - content_constraints2 = "content_constraints2\n" - content_constraints3 = "content_constraints3\n" - area1_name = "area1" - area2_name = "area2" - area3_name = "area3" - - client.post( - f"/v1/studies/{study_id}/areas", - headers=headers, - json={ - "name": area1_name, - "type": AreaType.AREA.value, - "metadata": {"country": "FR"}, - }, - ) - client.post( - f"/v1/studies/{study_id}/areas", - headers=headers, - json={ - "name": area2_name, - "type": AreaType.AREA.value, - "metadata": {"country": "DE"}, - }, - ) - client.post( - f"/v1/studies/{study_id}/areas", - headers=headers, - json={ - "name": area3_name, - "type": AreaType.AREA.value, - "metadata": {"country": "DE"}, - }, - ) - - client.post( - f"/v1/studies/{study_id}/links", - headers=headers, - json={ - "area1": area1_name, - "area2": area2_name, - }, - ) - - # Xpansion - res = client.post( - xpansion_base_url, - headers=headers, - ) - assert res.status_code == 200 - - expansion_path = tmp_path / "internal_workspace" / study_id / "user" / "expansion" - assert expansion_path.exists() - - res = client.get( - f"{xpansion_base_url}/settings", - headers=headers, - ) - assert res.status_code == 200 - assert res.json() == { - "additional-constraints": None, - "ampl.presolve": None, - "ampl.solve_bounds_frequency": None, - "ampl.solver": None, - "batch_size": 0, - "cut-type": None, - "log_level": 0, - "master": "integer", - "max_iteration": "+Inf", - "optimality_gap": 1.0, - "relative_gap": 1e-12, - "relaxed-optimality-gap": None, - "sensitivity_config": {"capex": False, "epsilon": 10000.0, "projection": []}, - "separation_parameter": 0.5, - "solver": "Cbc", - "timelimit": 1e12, - "uc_type": "expansion_fast", - "yearly-weights": None, - } - - res = client.put( - f"{xpansion_base_url}/settings", - headers=headers, - json={"optimality_gap": 42}, - ) - assert res.status_code == 200 - assert res.json() == { - "additional-constraints": None, - "ampl.presolve": None, - "ampl.solve_bounds_frequency": None, - "ampl.solver": None, - "batch_size": 0, - "cut-type": None, - "log_level": 0, - "master": "integer", - "max_iteration": "+Inf", - "optimality_gap": 42.0, - "relative_gap": None, - "relaxed-optimality-gap": None, - "sensitivity_config": None, - "separation_parameter": 0.5, - "solver": None, - "timelimit": 1000000000000, - "uc_type": "expansion_fast", - "yearly-weights": None, - } - res = client.put( - f"{xpansion_base_url}/settings", - headers=headers, - json={"additional-constraints": 42}, - ) - assert res.status_code == 404 - - res = client.put( - f"{xpansion_base_url}/settings/additional-constraints?filename=42", - headers=headers, - ) - assert res.status_code == 404 - - files = { - "file": ( - filename_constraints1, - io.BytesIO(content_constraints1.encode("utf-8")), - "image/jpeg", - ) - } - res = client.post( - f"{xpansion_base_url}/resources/constraints", - headers=headers, - files=files, - ) - assert res.status_code == 200 - actual_path = expansion_path / "constraints" / filename_constraints1 - assert actual_path.read_text() == content_constraints1 - - files = { - "file": ( - filename_constraints1, - io.BytesIO(content_constraints1.encode("utf-8")), - "image/jpeg", - ), - } - - res = client.post( - f"{xpansion_base_url}/resources/constraints", - headers=headers, - files=files, - ) - assert res.status_code == 409 - - files = { - "file": ( - filename_constraints2, - io.BytesIO(content_constraints2.encode("utf-8")), - "image/jpeg", - ), - } - res = client.post( - f"{xpansion_base_url}/resources/constraints", - headers=headers, - files=files, - ) - res.raise_for_status() - - files = { - "file": ( - filename_constraints3, - io.BytesIO(content_constraints3.encode("utf-8")), - "image/jpeg", - ), - } - res = client.post( - f"{xpansion_base_url}/resources/constraints", - headers=headers, - files=files, - ) - res.raise_for_status() - - res = client.get( - f"{xpansion_base_url}/resources/constraints/{filename_constraints1}", - headers=headers, - ) - assert res.status_code == 200 - assert res.json() == content_constraints1 - - res = client.get( - f"{xpansion_base_url}/resources/constraints/", - headers=headers, - ) - assert res.status_code == 200 - assert res.json() == [ - filename_constraints1, - filename_constraints2, - filename_constraints3, - ] - - res = client.put( - f"{xpansion_base_url}/settings/additional-constraints?filename={filename_constraints1}", - headers=headers, - ) - assert res.status_code == 200 - - res = client.delete( - f"{xpansion_base_url}/resources/constraints/{filename_constraints1}", - headers=headers, - ) - assert res.status_code == 409 - - res = client.put( - f"{xpansion_base_url}/settings/additional-constraints", - headers=headers, - ) - assert res.status_code == 200 - - res = client.delete( - f"{xpansion_base_url}/resources/constraints/{filename_constraints1}", - headers=headers, - ) - assert res.status_code == 200 - - candidate1 = { - "name": "candidate1", - "link": f"{area1_name} - {area2_name}", - "annual-cost-per-mw": 1, - "max-investment": 1.0, - } - res = client.post(f"{xpansion_base_url}/candidates", headers=headers, json=candidate1) - assert res.status_code == 200 - - candidate2 = { - "name": "candidate2", - "link": f"{area1_name} - {area3_name}", - "annual-cost-per-mw": 1, - "max-investment": 1.0, - } - res = client.post(f"{xpansion_base_url}/candidates", headers=headers, json=candidate2) - assert res.status_code == 404 - - candidate3 = { - "name": "candidate3", - "link": f"non_existent_area - {area3_name}", - "annual-cost-per-mw": 1, - "max-investment": 1.0, - } - res = client.post(f"{xpansion_base_url}/candidates", headers=headers, json=candidate3) - assert res.status_code == 404 - - filename_capa1 = "filename_capa1.txt" - filename_capa2 = "filename_capa2.txt" - filename_capa3 = "filename_capa3.txt" - content_capa1 = "0" - content_capa2 = "1" - content_capa3 = "2" - files = { - "file": ( - filename_capa1, - io.BytesIO(content_capa1.encode("utf-8")), - "txt/csv", - ) - } - res = client.post( - f"{xpansion_base_url}/resources/capacities", - headers=headers, - files=files, - ) - assert res.status_code == 200 - actual_path = expansion_path / "capa" / filename_capa1 - assert actual_path.read_text() == content_capa1 - - res = client.post( - f"{xpansion_base_url}/resources/capacities", - headers=headers, - files=files, - ) - assert res.status_code == 409 - - files = { - "file": ( - filename_capa2, - io.BytesIO(content_capa2.encode("utf-8")), - "txt/csv", - ) - } - res = client.post( - f"{xpansion_base_url}/resources/capacities", - headers=headers, - files=files, - ) - assert res.status_code == 200 - - files = { - "file": ( - filename_capa3, - io.BytesIO(content_capa3.encode("utf-8")), - "txt/csv", - ) - } - res = client.post( - f"{xpansion_base_url}/resources/capacities", - headers=headers, - files=files, - ) - assert res.status_code == 200 - - # get single capa - res = client.get( - f"{xpansion_base_url}/resources/capacities/{filename_capa1}", - headers=headers, - ) - assert res.status_code == 200 - assert res.json() == { - "columns": [0], - "data": [[0.0]], - "index": [0], - } - - res = client.get( - f"{xpansion_base_url}/resources/capacities", - headers=headers, - ) - assert res.status_code == 200 - assert res.json() == [filename_capa1, filename_capa2, filename_capa3] - - candidate4 = { - "name": "candidate4", - "link": f"{area1_name} - {area2_name}", - "annual-cost-per-mw": 1, - "max-investment": 1.0, - "link-profile": filename_capa1, - } - res = client.post(f"{xpansion_base_url}/candidates", headers=headers, json=candidate4) - assert res.status_code == 200 - - res = client.get( - f"{xpansion_base_url}/candidates/{candidate1['name']}", - headers=headers, - ) - assert res.status_code == 200 - assert res.json() == XpansionCandidateDTO.parse_obj(candidate1).dict(by_alias=True) - - res = client.get( - f"{xpansion_base_url}/candidates", - headers=headers, - ) - assert res.status_code == 200 - assert res.json() == [ - XpansionCandidateDTO.parse_obj(candidate1).dict(by_alias=True), - XpansionCandidateDTO.parse_obj(candidate4).dict(by_alias=True), - ] - - res = client.delete( - f"{xpansion_base_url}/resources/capacities/{filename_capa1}", - headers=headers, - ) - assert res.status_code == 409 - - candidate5 = { - "name": "candidate4", - "link": f"{area1_name} - {area2_name}", - "annual-cost-per-mw": 1, - "max-investment": 1.0, - } - res = client.put( - f"{xpansion_base_url}/candidates/{candidate4['name']}", - headers=headers, - json=candidate5, - ) - assert res.status_code == 200 - - res = client.delete( - f"{xpansion_base_url}/resources/capacities/{filename_capa1}", - headers=headers, - ) - assert res.status_code == 200 - - res = client.delete( - f"/v1/studies/{study_id}/extensions/xpansion", - headers=headers, - ) - assert res.status_code == 200 - - assert not expansion_path.exists() diff --git a/tests/integration/variant_blueprint/test_renewable_cluster.py b/tests/integration/variant_blueprint/test_renewable_cluster.py index bdefbb402f..3dab4a946b 100644 --- a/tests/integration/variant_blueprint/test_renewable_cluster.py +++ b/tests/integration/variant_blueprint/test_renewable_cluster.py @@ -4,7 +4,9 @@ import pytest from starlette.testclient import TestClient +from antarest.core.tasks.model import TaskStatus from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id +from tests.integration.utils import wait_task_completion # noinspection SpellCheckingInspection @@ -23,6 +25,22 @@ def test_lifecycle( ) -> None: # sourcery skip: extract-duplicate-method + # ======================= + # Study version upgrade + # ======================= + + # We have an "old" study that we need to upgrade to version 810 + min_study_version = 810 + res = client.put( + f"/v1/studies/{study_id}/upgrade", + headers={"Authorization": f"Bearer {user_access_token}"}, + params={"target_version": min_study_version}, + ) + res.raise_for_status() + task_id = res.json() + task = wait_task_completion(client, user_access_token, task_id) + assert task.status == TaskStatus.COMPLETED, task + # ===================== # General Data Update # ===================== diff --git a/antarest/study/storage/rawstudy/io/writer/__init__.py b/tests/integration/xpansion_studies_blueprint/__init__.py similarity index 100% rename from antarest/study/storage/rawstudy/io/writer/__init__.py rename to tests/integration/xpansion_studies_blueprint/__init__.py diff --git a/tests/integration/xpansion_studies_blueprint/test_integration_xpansion.py b/tests/integration/xpansion_studies_blueprint/test_integration_xpansion.py new file mode 100644 index 0000000000..b3a563a71e --- /dev/null +++ b/tests/integration/xpansion_studies_blueprint/test_integration_xpansion.py @@ -0,0 +1,355 @@ +import io +import re +import typing as t +from pathlib import Path +from urllib.parse import urljoin + +from starlette.testclient import TestClient + +from antarest.study.business.xpansion_management import XpansionCandidateDTO + + +def _create_area( + client: TestClient, + headers: t.Mapping[str, str], + study_id: str, + area_name: str, + *, + country: str, +) -> str: + res = client.post( + f"/v1/studies/{study_id}/areas", + headers=headers, + json={"name": area_name, "type": "AREA", "metadata": {"country": country}}, + ) + assert res.status_code in {200, 201}, res.json() + return t.cast(str, res.json()["id"]) + + +def _create_link( + client: TestClient, + headers: t.Mapping[str, str], + study_id: str, + src_area_id: str, + dst_area_id: str, +) -> None: + res = client.post( + f"/v1/studies/{study_id}/links", + headers=headers, + json={"area1": src_area_id, "area2": dst_area_id}, + ) + assert res.status_code in {200, 201}, res.json() + + +def test_integration_xpansion(client: TestClient, tmp_path: Path, admin_access_token: str) -> None: + headers = {"Authorization": f"Bearer {admin_access_token}"} + + res = client.post("/v1/studies", headers=headers, params={"name": "foo", "version": "860"}) + assert res.status_code == 201, res.json() + study_id = res.json() + + area1_id = _create_area(client, headers, study_id, "area1", country="FR") + area2_id = _create_area(client, headers, study_id, "area2", country="DE") + area3_id = _create_area(client, headers, study_id, "area3", country="DE") + _create_link(client, headers, study_id, area1_id, area2_id) + + res = client.post(f"/v1/studies/{study_id}/extensions/xpansion", headers=headers) + assert res.status_code in {200, 201}, res.json() + + expansion_path = tmp_path / "internal_workspace" / study_id / "user" / "expansion" + assert expansion_path.exists() + + # Create a client for Xpansion with the xpansion URL + xpansion_base_url = f"/v1/studies/{study_id}/extensions/xpansion/" + xp_client = TestClient(client.app, base_url=urljoin(client.base_url, xpansion_base_url)) + + res = xp_client.get("settings", headers=headers) + assert res.status_code == 200 + assert res.json() == { + "master": "integer", + "uc_type": "expansion_fast", + "optimality_gap": 1.0, + "relative_gap": 1e-06, + "relaxed_optimality_gap": 1e-05, + "max_iteration": 1000, + "solver": "Xpress", + "log_level": 0, + "separation_parameter": 0.5, + "batch_size": 96, + "yearly-weights": "", + "additional-constraints": "", + "timelimit": 1000000000000, + "sensitivity_config": {"epsilon": 0.0, "projection": [], "capex": False}, + } + + res = xp_client.put("settings", headers=headers, json={"optimality_gap": 42}) + assert res.status_code == 200 + assert res.json() == { + "master": "integer", + "uc_type": "expansion_fast", + "optimality_gap": 42, + "relative_gap": 1e-06, + "relaxed_optimality_gap": 1e-05, + "max_iteration": 1000, + "solver": "Xpress", + "log_level": 0, + "separation_parameter": 0.5, + "batch_size": 96, + "yearly-weights": "", + "additional-constraints": "", + "timelimit": 1000000000000, + "sensitivity_config": {"epsilon": 0.0, "projection": [], "capex": False}, + } + + res = xp_client.put("settings", headers=headers, json={"additional-constraints": "missing.txt"}) + assert res.status_code == 404 + err_obj = res.json() + assert re.search(r"file 'missing.txt' does not exist", err_obj["description"]) + assert err_obj["exception"] == "XpansionFileNotFoundError" + + res = xp_client.put("settings/additional-constraints", headers=headers, params={"filename": "missing.txt"}) + assert res.status_code == 404 + err_obj = res.json() + assert re.search(r"file 'missing.txt' does not exist", err_obj["description"]) + assert err_obj["exception"] == "XpansionFileNotFoundError" + + filename_constraints1 = "filename_constraints1.txt" + filename_constraints2 = "filename_constraints2.txt" + filename_constraints3 = "filename_constraints3.txt" + content_constraints1 = "content_constraints1\n" + content_constraints2 = "content_constraints2\n" + content_constraints3 = "content_constraints3\n" + + files = { + "file": ( + filename_constraints1, + io.BytesIO(content_constraints1.encode("utf-8")), + "image/jpeg", + ) + } + res = xp_client.post("resources/constraints", headers=headers, files=files) + assert res.status_code in {200, 201} + actual_path = expansion_path / "constraints" / filename_constraints1 + assert actual_path.read_text() == content_constraints1 + + files = { + "file": ( + filename_constraints1, + io.BytesIO(content_constraints1.encode("utf-8")), + "image/jpeg", + ), + } + + res = xp_client.post("resources/constraints", headers=headers, files=files) + assert res.status_code == 409 + err_obj = res.json() + assert re.search( + rf"File '{filename_constraints1}' already exists", + err_obj["description"], + flags=re.IGNORECASE, + ) + assert err_obj["exception"] == "FileAlreadyExistsError" + + files = { + "file": ( + filename_constraints2, + io.BytesIO(content_constraints2.encode("utf-8")), + "image/jpeg", + ), + } + res = xp_client.post("resources/constraints", headers=headers, files=files) + assert res.status_code in {200, 201} + + files = { + "file": ( + filename_constraints3, + io.BytesIO(content_constraints3.encode("utf-8")), + "image/jpeg", + ), + } + res = xp_client.post("resources/constraints", headers=headers, files=files) + assert res.status_code in {200, 201} + + res = xp_client.get(f"resources/constraints/{filename_constraints1}", headers=headers) + assert res.status_code == 200 + assert res.json() == content_constraints1 + + res = xp_client.get("resources/constraints/", headers=headers) + assert res.status_code == 200 + assert res.json() == [ + filename_constraints1, + filename_constraints2, + filename_constraints3, + ] + + res = xp_client.put( + "settings/additional-constraints", + headers=headers, + params={"filename": filename_constraints1}, + ) + assert res.status_code == 200 + + res = xp_client.delete(f"resources/constraints/{filename_constraints1}", headers=headers) + assert res.status_code == 409 + err_obj = res.json() + assert re.search( + rf"File '{filename_constraints1}' is still used", + err_obj["description"], + flags=re.IGNORECASE, + ) + assert err_obj["exception"] == "FileCurrentlyUsedInSettings" + + res = xp_client.put("settings/additional-constraints", headers=headers) + assert res.status_code == 200 + + res = xp_client.delete(f"resources/constraints/{filename_constraints1}", headers=headers) + assert res.status_code == 200 + + candidate1 = { + "name": "candidate1", + "link": f"{area1_id} - {area2_id}", + "annual-cost-per-mw": 1, + "max-investment": 1.0, + } + res = xp_client.post("candidates", headers=headers, json=candidate1) + assert res.status_code in {200, 201} + + candidate2 = { + "name": "candidate2", + "link": f"{area1_id} - {area3_id}", + "annual-cost-per-mw": 1, + "max-investment": 1.0, + } + res = xp_client.post("candidates", headers=headers, json=candidate2) + assert res.status_code == 404 + err_obj = res.json() + assert re.search( + rf"link from '{area1_id}' to '{area3_id}' not found", + err_obj["description"], + flags=re.IGNORECASE, + ) + assert err_obj["exception"] == "LinkNotFound" + + candidate3 = { + "name": "candidate3", + "link": f"non_existent_area - {area3_id}", + "annual-cost-per-mw": 1, + "max-investment": 1.0, + } + res = xp_client.post("candidates", headers=headers, json=candidate3) + assert res.status_code == 404 + err_obj = res.json() + assert re.search( + rf"link from '{area3_id}' to 'non_existent_area' not found", + err_obj["description"], + flags=re.IGNORECASE, + ) + assert err_obj["exception"] == "LinkNotFound" + + filename_capa1 = "filename_capa1.txt" + filename_capa2 = "filename_capa2.txt" + filename_capa3 = "filename_capa3.txt" + content_capa1 = "0" + content_capa2 = "1" + content_capa3 = "2" + files = { + "file": ( + filename_capa1, + io.BytesIO(content_capa1.encode("utf-8")), + "txt/csv", + ) + } + res = xp_client.post("resources/capacities", headers=headers, files=files) + assert res.status_code in {200, 201} + actual_path = expansion_path / "capa" / filename_capa1 + assert actual_path.read_text() == content_capa1 + + res = xp_client.post("resources/capacities", headers=headers, files=files) + assert res.status_code == 409 + err_obj = res.json() + assert re.search( + rf"File '{filename_capa1}' already exists", + err_obj["description"], + flags=re.IGNORECASE, + ) + assert err_obj["exception"] == "FileAlreadyExistsError" + + files = { + "file": ( + filename_capa2, + io.BytesIO(content_capa2.encode("utf-8")), + "txt/csv", + ) + } + res = xp_client.post("resources/capacities", headers=headers, files=files) + assert res.status_code in {200, 201} + + files = { + "file": ( + filename_capa3, + io.BytesIO(content_capa3.encode("utf-8")), + "txt/csv", + ) + } + res = xp_client.post("resources/capacities", headers=headers, files=files) + assert res.status_code in {200, 201} + + # get single capa + res = xp_client.get(f"resources/capacities/{filename_capa1}", headers=headers) + assert res.status_code == 200 + assert res.json() == { + "columns": [0], + "data": [[0.0]], + "index": [0], + } + + res = xp_client.get("resources/capacities", headers=headers) + assert res.status_code == 200 + assert res.json() == [filename_capa1, filename_capa2, filename_capa3] + + candidate4 = { + "name": "candidate4", + "link": f"{area1_id} - {area2_id}", + "annual-cost-per-mw": 1, + "max-investment": 1.0, + "link-profile": filename_capa1, + } + res = xp_client.post("candidates", headers=headers, json=candidate4) + assert res.status_code in {200, 201} + + res = xp_client.get(f"candidates/{candidate1['name']}", headers=headers) + assert res.status_code == 200 + assert res.json() == XpansionCandidateDTO.parse_obj(candidate1).dict(by_alias=True) + + res = xp_client.get("candidates", headers=headers) + assert res.status_code == 200 + assert res.json() == [ + XpansionCandidateDTO.parse_obj(candidate1).dict(by_alias=True), + XpansionCandidateDTO.parse_obj(candidate4).dict(by_alias=True), + ] + + res = xp_client.delete(f"resources/capacities/{filename_capa1}", headers=headers) + assert res.status_code == 409 + err_obj = res.json() + assert re.search( + rf"capacities file '{filename_capa1}' is still used", + err_obj["description"], + flags=re.IGNORECASE, + ) + + candidate5 = { + "name": "candidate4", + "link": f"{area1_id} - {area2_id}", + "annual-cost-per-mw": 1, + "max-investment": 1.0, + } + res = xp_client.put(f"candidates/{candidate4['name']}", headers=headers, json=candidate5) + assert res.status_code == 200 + + res = xp_client.delete(f"resources/capacities/{filename_capa1}", headers=headers) + assert res.status_code == 200 + + res = client.delete(f"/v1/studies/{study_id}/extensions/xpansion", headers=headers) + assert res.status_code == 200 + + assert not expansion_path.exists() diff --git a/tests/launcher/test_service.py b/tests/launcher/test_service.py index 5f0f1bd9b7..a8e20283c8 100644 --- a/tests/launcher/test_service.py +++ b/tests/launcher/test_service.py @@ -23,6 +23,7 @@ ) from antarest.core.exceptions import StudyNotFoundError from antarest.core.filetransfer.model import FileDownload, FileDownloadDTO, FileDownloadTaskDTO +from antarest.core.interfaces.cache import ICache from antarest.core.interfaces.eventbus import Event, EventType from antarest.core.jwt import DEFAULT_ADMIN_USER, JWTUser from antarest.core.model import PermissionInfo @@ -38,8 +39,10 @@ LauncherService, ) from antarest.login.auth import Auth -from antarest.login.model import User +from antarest.login.model import Identity from antarest.study.model import OwnerInfo, PublicMode, Study, StudyMetadataDTO +from antarest.study.repository import StudyMetadataRepository +from antarest.study.service import StudyService class TestLauncherService: @@ -204,9 +207,10 @@ def test_service_get_result_from_database(self) -> None: ) @pytest.mark.unit_test - def test_service_get_jobs_from_database(self) -> None: + def test_service_get_jobs_from_database(self, db_session) -> None: launcher_mock = Mock() now = datetime.utcnow() + identity_instance = Identity(id=1) fake_execution_result = [ JobResult( id=str(uuid4()), @@ -214,7 +218,7 @@ def test_service_get_jobs_from_database(self) -> None: job_status=JobStatus.SUCCESS, msg="Hello, World!", exit_code=0, - owner_id=1, + owner=identity_instance, ) ] returned_faked_execution_results = [ @@ -225,7 +229,7 @@ def test_service_get_jobs_from_database(self) -> None: msg="Hello, World!", exit_code=0, creation_date=now, - owner_id=1, + owner=identity_instance, ), JobResult( id="2", @@ -234,7 +238,7 @@ def test_service_get_jobs_from_database(self) -> None: msg="Hello, World!", exit_code=0, creation_date=now, - owner_id=1, + owner=identity_instance, ), ] all_faked_execution_results = returned_faked_execution_results + [ @@ -245,7 +249,7 @@ def test_service_get_jobs_from_database(self) -> None: msg="Hello, World!", exit_code=0, creation_date=now - timedelta(days=ORPHAN_JOBS_VISIBILITY_THRESHOLD + 1), - owner_id=1, + owner=identity_instance, ) ] launcher_mock.get_result.return_value = None @@ -256,17 +260,11 @@ def test_service_get_jobs_from_database(self) -> None: repository.find_by_study.return_value = fake_execution_result repository.get_all.return_value = all_faked_execution_results - study_service = Mock() - study_service.repository = Mock() - study_service.repository.get_list.return_value = [ - Mock( - spec=Study, - id="b", - groups=[], - owner=User(id=2), - public_mode=PublicMode.NONE, - ) - ] + study_service = Mock(spec=StudyService) + study_service.repository = StudyMetadataRepository(cache_service=Mock(spec=ICache), session=db_session) + db_session.add_all(fake_execution_result) + db_session.add_all(all_faked_execution_results) + db_session.commit() launcher_service = LauncherService( config=Config(), diff --git a/tests/locust/__init__.py b/tests/locust/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/locust/locustfile.py b/tests/locust/locustfile.py deleted file mode 100644 index 53ad82bf5f..0000000000 --- a/tests/locust/locustfile.py +++ /dev/null @@ -1,20 +0,0 @@ -import time - -from locust import HttpUser, between, task - - -class AdminUser(HttpUser): - wait_time = between(1, 2.5) - - @task - def get_studies(self): - res = self.client.get("/studies") - studies = res.json() - for study_id in studies: - self.client.get(f"/studies/{study_id}", params={"depth": -1}) - time.sleep(0.2) - - def on_start(self): - res = self.client.post("/login", data={"username": "admin", "password": "admin"}) - credentials = res.json() - self.client.headers.update({"Authorization": f'Bearer {credentials["access_token"]}'}) diff --git a/tests/storage/business/test_autoarchive_service.py b/tests/storage/business/test_autoarchive_service.py index 3f63f41776..48e2c537b7 100644 --- a/tests/storage/business/test_autoarchive_service.py +++ b/tests/storage/business/test_autoarchive_service.py @@ -4,9 +4,12 @@ from antarest.core.config import Config, StorageConfig, WorkspaceConfig from antarest.core.exceptions import TaskAlreadyRunning +from antarest.core.interfaces.cache import ICache from antarest.core.jwt import DEFAULT_ADMIN_USER from antarest.core.requests import RequestParameters from antarest.study.model import DEFAULT_WORKSPACE_NAME, RawStudy +from antarest.study.repository import StudyMetadataRepository +from antarest.study.service import StudyService from antarest.study.storage.auto_archive_service import AutoArchiveService from antarest.study.storage.variantstudy.model.dbmodel import VariantStudy from tests.helpers import with_db_context @@ -16,53 +19,68 @@ def test_auto_archival(tmp_path: Path): workspace_path = tmp_path / "workspace_test" auto_archive_service = AutoArchiveService( - Mock(), + Mock(spec=StudyService), Config(storage=StorageConfig(workspaces={"test": WorkspaceConfig(path=workspace_path)})), ) now = datetime.datetime.now() - auto_archive_service.study_service.repository = Mock() - auto_archive_service.study_service.repository.get_all.return_value = [ - RawStudy( - id="a", - workspace="not default", - updated_at=now - datetime.timedelta(days=61), - ), - RawStudy( - id="b", - workspace=DEFAULT_WORKSPACE_NAME, - updated_at=now - datetime.timedelta(days=59), - ), - RawStudy( - id="c", - workspace=DEFAULT_WORKSPACE_NAME, - updated_at=now - datetime.timedelta(days=61), - archived=True, - ), - RawStudy( - id="d", - workspace=DEFAULT_WORKSPACE_NAME, - updated_at=now - datetime.timedelta(days=61), - archived=False, - ), - VariantStudy(id="e", updated_at=now - datetime.timedelta(days=61)), - ] - auto_archive_service.study_service.storage_service = Mock() - auto_archive_service.study_service.storage_service.variant_study_service = Mock() - auto_archive_service.study_service.archive.return_value = TaskAlreadyRunning - auto_archive_service.study_service.get_study.return_value = VariantStudy( - id="e", updated_at=now - datetime.timedelta(days=61) + repository = StudyMetadataRepository(cache_service=Mock(spec=ICache)) + + # Add some studies in the database + db_session = repository.session + db_session.add_all( + [ + RawStudy( + id="a", + workspace="not default", + updated_at=now - datetime.timedelta(days=61), + ), + RawStudy( + id="b", + workspace=DEFAULT_WORKSPACE_NAME, + updated_at=now - datetime.timedelta(days=59), + ), + RawStudy( + id="c", + workspace=DEFAULT_WORKSPACE_NAME, + updated_at=now - datetime.timedelta(days=61), + archived=True, + ), + RawStudy( + id="d", + workspace=DEFAULT_WORKSPACE_NAME, + updated_at=now - datetime.timedelta(days=61), + archived=False, + ), + VariantStudy( + id="e", + updated_at=now - datetime.timedelta(days=61), + ), + ] ) + db_session.commit() + + study_service = auto_archive_service.study_service + study_service.repository = repository + + study_service.storage_service = Mock() + study_service.storage_service.variant_study_service = Mock() + study_service.archive.side_effect = TaskAlreadyRunning + study_service.get_study = repository.get auto_archive_service._try_archive_studies() - auto_archive_service.study_service.archive.assert_called_once_with( - "d", params=RequestParameters(DEFAULT_ADMIN_USER) - ) - auto_archive_service.study_service.storage_service.variant_study_service.clear_snapshot.assert_called_once_with( - VariantStudy(id="e", updated_at=now - datetime.timedelta(days=61)) - ) - auto_archive_service.study_service.archive_outputs.assert_called_once_with( - "e", params=RequestParameters(DEFAULT_ADMIN_USER) - ) + # Check that the raw study "d" was about to be archived but failed because the task was already running + study_service.archive.assert_called_once_with("d", params=RequestParameters(DEFAULT_ADMIN_USER)) + + # Check that the snapshot of the variant study "e" is cleared + study_service.storage_service.variant_study_service.clear_snapshot.assert_called_once() + calls = study_service.storage_service.variant_study_service.clear_snapshot.call_args_list + assert len(calls) == 1 + clear_snapshot_call = calls[0] + actual_study = clear_snapshot_call[0][0] + assert actual_study.id == "e" + + # Check that the variant outputs are deleted for the variant study "e" + study_service.archive_outputs.assert_called_once_with("e", params=RequestParameters(DEFAULT_ADMIN_USER)) diff --git a/tests/storage/business/test_raw_study_service.py b/tests/storage/business/test_raw_study_service.py index cec56dad6f..2b5c4b2dfd 100644 --- a/tests/storage/business/test_raw_study_service.py +++ b/tests/storage/business/test_raw_study_service.py @@ -17,7 +17,6 @@ from antarest.study.model import DEFAULT_WORKSPACE_NAME, RawStudy, StudyAdditionalData from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.rawstudy.raw_study_service import RawStudyService -from antarest.study.storage.utils import get_default_workspace_path def build_config( @@ -139,7 +138,7 @@ def test_check_errors(): metadata = RawStudy( id="study", workspace=DEFAULT_WORKSPACE_NAME, - path=str(get_default_workspace_path(config) / "study"), + path=str(config.get_workspace_path() / "study"), ) assert study_service.check_errors(metadata) == ["Hello"] @@ -218,7 +217,7 @@ def test_create(tmp_path: Path, project_path: Path) -> None: metadata = RawStudy( id="study1", workspace=DEFAULT_WORKSPACE_NAME, - path=str(get_default_workspace_path(config) / "study1"), + path=str(config.get_workspace_path() / "study1"), version="720", created_at=datetime.datetime.now(), updated_at=datetime.datetime.now(), @@ -257,7 +256,7 @@ def create_study(version: str): metadata = RawStudy( id=f"study{version}", workspace=DEFAULT_WORKSPACE_NAME, - path=str(get_default_workspace_path(config) / f"study{version}"), + path=str(config.get_workspace_path() / f"study{version}"), version=version, created_at=datetime.datetime.now(), updated_at=datetime.datetime.now(), diff --git a/tests/storage/business/test_study_version_upgrader.py b/tests/storage/business/test_study_version_upgrader.py index edeab34415..357869b850 100644 --- a/tests/storage/business/test_study_version_upgrader.py +++ b/tests/storage/business/test_study_version_upgrader.py @@ -9,7 +9,7 @@ import pandas import pytest -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS from antarest.study.storage.study_upgrader import UPGRADE_METHODS, InvalidUpgrade, upgrade_study @@ -90,7 +90,7 @@ def assert_study_antares_file_is_updated(tmp_path: Path, target_version: str) -> def assert_settings_are_updated(tmp_path: Path, old_values: List[str]) -> None: general_data_path = tmp_path / "settings" / "generaldata.ini" - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(general_data_path) general = data["general"] optimization = data["optimization"] @@ -119,7 +119,7 @@ def assert_settings_are_updated(tmp_path: Path, old_values: List[str]) -> None: def get_old_settings_values(tmp_path: Path) -> List[str]: general_data_path = tmp_path / "settings" / "generaldata.ini" - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(general_data_path) filtering_value = data["general"]["filtering"] custom_ts_value = data["general"]["custom-ts-numbers"] @@ -170,7 +170,7 @@ def assert_inputs_are_updated(tmp_path: Path, dico: dict) -> None: for folder in areas: folder_path = Path(folder) if folder_path.is_dir(): - reader = MultipleSameKeysIniReader(DUPLICATE_KEYS) + reader = IniReader(DUPLICATE_KEYS) data = reader.read(folder_path / "adequacy_patch.ini") assert data["adequacy-patch"]["adequacy-patch-mode"] == "outside" diff --git a/tests/storage/business/test_xpansion_manager.py b/tests/storage/business/test_xpansion_manager.py index 1dc88ff31e..1703325e8b 100644 --- a/tests/storage/business/test_xpansion_manager.py +++ b/tests/storage/business/test_xpansion_manager.py @@ -12,19 +12,16 @@ from antarest.core.model import JSON from antarest.study.business.xpansion_management import ( - CutType, FileCurrentlyUsedInSettings, LinkNotFound, Master, - MaxIteration, Solver, UcType, + UpdateXpansionSettings, XpansionCandidateDTO, XpansionFileNotFoundError, XpansionManager, XpansionResourceFileType, - XpansionSensitivitySettingsDTO, - XpansionSettingsDTO, ) from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.files import build @@ -86,46 +83,30 @@ def make_link_and_areas(empty_study: FileStudy) -> None: @pytest.mark.unit_test @pytest.mark.parametrize( - "version,expected_output", + "version, expected_output", [ ( - 720, + 810, { - "settings": { - "optimality_gap": 1, - "max_iteration": "+Inf", - "uc_type": "expansion_fast", - "master": "integer", - "relaxed-optimality-gap": 1e6, - "cut-type": "yearly", - "ampl.solver": "cbc", - "ampl.presolve": 0, - "ampl.solve_bounds_frequency": 1000000, - }, - "sensitivity": {"sensitivity_in": {}}, "candidates": {}, "capa": {}, "constraints": {}, - "weights": {}, - }, - ), - ( - 810, - { + "sensitivity": {"sensitivity_in": {}}, "settings": { - "optimality_gap": 1, - "max_iteration": "+Inf", - "uc_type": "expansion_fast", "master": "integer", - "relative_gap": 1e-12, - "solver": "Cbc", - "batch_size": 0, + "uc_type": "expansion_fast", + "optimality_gap": 1, + "relative_gap": 1e-06, + "relaxed_optimality_gap": 1e-05, + "max_iteration": 1000, + "solver": "Xpress", + "log_level": 0, "separation_parameter": 0.5, + "batch_size": 96, + "yearly-weights": "", + "additional-constraints": "", + "timelimit": int(1e12), }, - "sensitivity": {"sensitivity_in": {}}, - "candidates": {}, - "capa": {}, - "constraints": {}, "weights": {}, }, ), @@ -144,7 +125,8 @@ def test_create_configuration(tmp_path: Path, version: int, expected_output: JSO xpansion_manager.create_xpansion_configuration(study) - assert empty_study.tree.get(["user", "expansion"], expanded=True, depth=9) == expected_output + actual = empty_study.tree.get(["user", "expansion"], expanded=True, depth=9) + assert actual == expected_output @pytest.mark.unit_test @@ -173,50 +155,23 @@ def test_delete_xpansion_configuration(tmp_path: Path) -> None: @pytest.mark.parametrize( "version, expected_output", [ - ( - 720, - { - "additional-constraints": None, - "ampl.presolve": 0, - "ampl.solve_bounds_frequency": 1000000, - "ampl.solver": "cbc", - "batch_size": 0, - "cut-type": CutType.YEARLY, - "log_level": 0, - "master": Master.INTEGER, - "max_iteration": MaxIteration.INF, - "optimality_gap": 1.0, - "relative_gap": None, - "relaxed-optimality-gap": 1000000.0, - "sensitivity_config": {"capex": False, "epsilon": 10000.0, "projection": []}, - "separation_parameter": 0.5, - "solver": None, - "timelimit": 1000000000000, - "uc_type": UcType.EXPANSION_FAST, - "yearly-weights": None, - }, - ), ( 810, { - "additional-constraints": None, - "ampl.presolve": None, - "ampl.solve_bounds_frequency": None, - "ampl.solver": None, - "batch_size": 0, - "cut-type": None, - "log_level": 0, "master": Master.INTEGER, - "max_iteration": MaxIteration.INF, + "uc_type": UcType.EXPANSION_FAST, "optimality_gap": 1.0, - "relative_gap": 1e-12, - "relaxed-optimality-gap": None, - "sensitivity_config": {"capex": False, "epsilon": 10000.0, "projection": []}, + "relative_gap": 1e-06, + "relaxed_optimality_gap": 1e-05, + "max_iteration": 1000, + "solver": Solver.XPRESS, + "log_level": 0, "separation_parameter": 0.5, - "solver": Solver.CBC, - "timelimit": 1000000000000, - "uc_type": UcType.EXPANSION_FAST, - "yearly-weights": None, + "batch_size": 96, + "yearly-weights": "", + "additional-constraints": "", + "timelimit": int(1e12), + "sensitivity_config": {"epsilon": 0, "projection": [], "capex": False}, }, ), ], @@ -237,39 +192,6 @@ def test_get_xpansion_settings(tmp_path: Path, version: int, expected_output: JS assert actual.dict(by_alias=True) == expected_output -@pytest.mark.unit_test -def test_xpansion_sensitivity_settings(tmp_path: Path) -> None: - """ - Test that attribute projection in sensitivity_config is optional - """ - - empty_study = make_empty_study(tmp_path, 720) - study = RawStudy(id="1", path=empty_study.config.study_path, version=720) - xpansion_manager = make_xpansion_manager(empty_study) - - xpansion_manager.create_xpansion_configuration(study) - expected_settings = XpansionSettingsDTO.parse_obj( - { - "optimality_gap": 1, - "max_iteration": "+Inf", - "uc_type": "expansion_fast", - "master": "integer", - "yearly_weight": None, - "additional-constraints": None, - "relaxed-optimality-gap": None, - "cut-type": None, - "ampl.solver": None, - "ampl.presolve": None, - "ampl.solve_bounds_frequency": None, - "relative_gap": 1e-12, - "solver": "Cbc", - "sensitivity_config": XpansionSensitivitySettingsDTO(epsilon=0.1, capex=False), - } - ) - xpansion_manager.update_xpansion_settings(study, expected_settings) - assert xpansion_manager.get_xpansion_settings(study) == expected_settings - - @pytest.mark.unit_test def test_update_xpansion_settings(tmp_path: Path) -> None: """ @@ -282,32 +204,43 @@ def test_update_xpansion_settings(tmp_path: Path) -> None: xpansion_manager.create_xpansion_configuration(study) - expected = { + new_settings_obj = { "optimality_gap": 4.0, "max_iteration": 123, "uc_type": UcType.EXPANSION_FAST, "master": Master.INTEGER, - "yearly-weights": None, - "additional-constraints": None, - "relaxed-optimality-gap": "1.2%", - "cut-type": None, - "ampl.solver": None, - "ampl.presolve": None, - "ampl.solve_bounds_frequency": None, + "yearly-weights": "", + "additional-constraints": "", + "relaxed_optimality_gap": "1.2%", # percentage "relative_gap": 1e-12, "batch_size": 4, "separation_parameter": 0.5, "solver": Solver.CBC, - "timelimit": 1000000000000, + "timelimit": int(1e12), "log_level": 0, - "sensitivity_config": {"epsilon": 10000.0, "projection": [], "capex": False}, + "sensitivity_config": {"epsilon": 10500.0, "projection": ["foo"], "capex": False}, } - new_settings = XpansionSettingsDTO(**expected) + new_settings = UpdateXpansionSettings(**new_settings_obj) - xpansion_manager.update_xpansion_settings(study, new_settings) + actual = xpansion_manager.update_xpansion_settings(study, new_settings) - actual = xpansion_manager.get_xpansion_settings(study) + expected = { + "master": Master.INTEGER, + "uc_type": UcType.EXPANSION_FAST, + "optimality_gap": 4.0, + "relative_gap": 1e-12, + "relaxed_optimality_gap": 1.2, + "max_iteration": 123, + "solver": Solver.CBC, + "log_level": 0, + "separation_parameter": 0.5, + "batch_size": 4, + "yearly-weights": "", + "additional-constraints": "", + "timelimit": int(1e12), + "sensitivity_config": {"epsilon": 10500.0, "projection": ["foo"], "capex": False}, + } assert actual.dict(by_alias=True) == expected @@ -318,7 +251,8 @@ def test_add_candidate(tmp_path: Path) -> None: xpansion_manager = make_xpansion_manager(empty_study) xpansion_manager.create_xpansion_configuration(study) - assert empty_study.tree.get(["user", "expansion", "candidates"]) == {} + actual = empty_study.tree.get(["user", "expansion", "candidates"]) + assert actual == {} new_candidate = XpansionCandidateDTO.parse_obj( { @@ -352,12 +286,14 @@ def test_add_candidate(tmp_path: Path) -> None: candidates = {"1": new_candidate.dict(by_alias=True, exclude_none=True)} - assert empty_study.tree.get(["user", "expansion", "candidates"]) == candidates + actual = empty_study.tree.get(["user", "expansion", "candidates"]) + assert actual == candidates xpansion_manager.add_candidate(study, new_candidate2) candidates["2"] = new_candidate2.dict(by_alias=True, exclude_none=True) - assert empty_study.tree.get(["user", "expansion", "candidates"]) == candidates + actual = empty_study.tree.get(["user", "expansion", "candidates"]) + assert actual == candidates @pytest.mark.unit_test @@ -519,12 +455,11 @@ def test_update_constraints(tmp_path: Path) -> None: empty_study.tree.save({"user": {"expansion": {"constraints": {"constraints.txt": b"0"}}}}) - xpansion_manager.update_xpansion_constraints_settings(study=study, constraints_file_name="constraints.txt") - - assert xpansion_manager.get_xpansion_settings(study).additional_constraints == "constraints.txt" + actual_settings = xpansion_manager.update_xpansion_constraints_settings(study, "constraints.txt") + assert actual_settings.additional_constraints == "constraints.txt" - xpansion_manager.update_xpansion_constraints_settings(study=study, constraints_file_name=None) - assert xpansion_manager.get_xpansion_settings(study).additional_constraints is None + actual_settings = xpansion_manager.update_xpansion_constraints_settings(study, "") + assert actual_settings.additional_constraints == "" @pytest.mark.unit_test @@ -571,11 +506,15 @@ def test_add_resources(tmp_path: Path) -> None: settings = xpansion_manager.get_xpansion_settings(study) settings.yearly_weights = filename3 - xpansion_manager.update_xpansion_settings(study, settings) + update_settings = UpdateXpansionSettings(**settings.dict()) + xpansion_manager.update_xpansion_settings(study, update_settings) + with pytest.raises(FileCurrentlyUsedInSettings): xpansion_manager.delete_resource(study, XpansionResourceFileType.WEIGHTS, filename3) - settings.yearly_weights = None - xpansion_manager.update_xpansion_settings(study, settings) + + settings.yearly_weights = "" + update_settings = UpdateXpansionSettings(**settings.dict()) + xpansion_manager.update_xpansion_settings(study, update_settings) xpansion_manager.delete_resource(study, XpansionResourceFileType.WEIGHTS, filename3) diff --git a/tests/storage/integration/test_STA_mini.py b/tests/storage/integration/test_STA_mini.py index 5672740939..e83cda5847 100644 --- a/tests/storage/integration/test_STA_mini.py +++ b/tests/storage/integration/test_STA_mini.py @@ -432,7 +432,8 @@ def test_sta_mini_output(storage_service, url: str, expected_output: dict): "master": '"integer"', "yearly-weights": "None", "additional-constraints": "None", - "relaxed-optimality-gap": 1000000.0, + "relaxed_optimality_gap": 0.00001, + # legacy attributes from version < 800 "cut-type": '"average"', "ampl.solver": '"cbc"', "ampl.presolve": 0, @@ -459,8 +460,6 @@ def test_sta_mini_expansion(storage_service, url: str, expected_output: dict): @pytest.mark.integration_test def test_sta_mini_copy(storage_service) -> None: - input_link = "input/links/de/fr.txt" - source_study_name = "STA-mini" destination_study_name = "copy-STA-mini" diff --git a/tests/storage/rawstudies/samples/__init__.py b/tests/storage/rawstudies/samples/__init__.py new file mode 100644 index 0000000000..773f16ec60 --- /dev/null +++ b/tests/storage/rawstudies/samples/__init__.py @@ -0,0 +1,3 @@ +from pathlib import Path + +ASSETS_DIR = Path(__file__).parent.resolve() diff --git a/tests/storage/rawstudies/samples/v810/sample1/study.antares b/tests/storage/rawstudies/samples/v810/sample1/study.antares index ae9f93dd07..a57109d3e4 100644 --- a/tests/storage/rawstudies/samples/v810/sample1/study.antares +++ b/tests/storage/rawstudies/samples/v810/sample1/study.antares @@ -1,5 +1,5 @@ [antares] -version = 800 +version = 810 caption = renewable-2-clusters-ts-prod-factor created = 1618413128 lastsave = 1625583204 diff --git a/tests/storage/rawstudies/test_factory.py b/tests/storage/rawstudies/test_factory.py index b3fd356f4f..e2cd4391b3 100644 --- a/tests/storage/rawstudies/test_factory.py +++ b/tests/storage/rawstudies/test_factory.py @@ -1,4 +1,3 @@ -from pathlib import Path from unittest.mock import Mock from antarest.core.interfaces.cache import CacheConstants @@ -7,10 +6,11 @@ from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.factory import StudyFactory from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree +from tests.storage.rawstudies.samples import ASSETS_DIR -def test_renewable_subtree(): - path = Path(__file__).parent / "samples/v810/sample1" +def test_renewable_subtree() -> None: + path = ASSETS_DIR / "v810/sample1" context: ContextServer = Mock(specs=ContextServer) config = build(path, "") assert config.get_renewable_ids("area") == ["la_rochelle", "oleron"] @@ -41,8 +41,8 @@ def test_renewable_subtree(): } -def test_factory_cache(): - path = Path(__file__).parent / "samples/v810/sample1" +def test_factory_cache() -> None: + path = ASSETS_DIR / "v810/sample1" cache = Mock() factory = StudyFactory(matrix=Mock(), resolver=Mock(), cache=cache) diff --git a/tests/storage/repository/antares_io/reader/test_ini_reader.py b/tests/storage/repository/antares_io/reader/test_ini_reader.py index 83b78c86b6..fb87061ff7 100644 --- a/tests/storage/repository/antares_io/reader/test_ini_reader.py +++ b/tests/storage/repository/antares_io/reader/test_ini_reader.py @@ -1,15 +1,12 @@ +import io import textwrap from pathlib import Path -import pytest - -from antarest.study.storage.rawstudy.io.reader import IniReader -from antarest.study.storage.rawstudy.io.reader.ini_reader import MultipleSameKeysIniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader, SimpleKeyValueReader class TestIniReader: - @pytest.mark.unit_test - def test_read(self, tmp_path: Path) -> None: + def test_read__nominal_case(self, tmp_path: Path) -> None: path = Path(tmp_path) / "test.ini" path.write_text( textwrap.dedent( @@ -17,14 +14,17 @@ def test_read(self, tmp_path: Path) -> None: [part1] key_int = 1 key_float = 2.1 + key_big_float = 1e16 + key_small_float = -1e-16 + key_inf = +Inf key_str = value1 - + key_empty = + [part2] key_bool = True - key_bool2 = False - - [[allocation]] - my_area = 2.718 + key_bool2 = false + # key_foo = False + ; key_bar = 3.14 """ ) ) @@ -33,9 +33,19 @@ def test_read(self, tmp_path: Path) -> None: actual = reader.read(path) expected = { - "part1": {"key_int": 1, "key_str": "value1", "key_float": 2.1}, - "part2": {"key_bool": True, "key_bool2": False}, - "[allocation]": {"my_area": 2.718}, + "part1": { + "key_int": 1, + "key_str": "value1", + "key_float": 2.1, + "key_big_float": 1e16, + "key_small_float": -1e-16, + "key_inf": "+Inf", # note: `+Inf` is not supported by JSON + "key_empty": "", + }, + "part2": { + "key_bool": True, + "key_bool2": False, + }, } assert actual == expected @@ -43,64 +53,222 @@ def test_read(self, tmp_path: Path) -> None: actual_from_bytes = reader.read(f) assert actual_from_bytes == expected + def test_read__without_section(self) -> None: + """ + If the file has no section, then the default section name is used. + This case is required to parse Xpansion `user/expansion/settings.ini` files + (using `SimpleKeyValueReader` subclass). + """ + reader = IniReader(section_name="config") + actual = reader.read( + io.StringIO( + """ + key_int = 1 + key_float = 2.1 + key_str = value1 + key_inf = +Inf + """ + ) + ) + expected = { + "config": { + "key_int": 1, + "key_str": "value1", + "key_float": 2.1, + "key_inf": "+Inf", # note: `+Inf` is not supported by JSON + }, + } + assert actual == expected -class TestMultipleSameKeysIniReader: - def test_read_sets_init(self, tmp_path: Path) -> None: - path = Path(tmp_path) / "test.ini" - path.write_text( - textwrap.dedent( + def test_read__duplicate_sections(self) -> None: + """ + If the file has duplicate sections, then the values are merged. + This case is required when the end-user produced an ill-formed `.ini` file. + """ + reader = IniReader() + actual = reader.read( + io.StringIO( """ [part1] key_int = 1 key_float = 2.1 key_str = value1 - + + [part1] + key_str = value2 + key_bool = True + """ + ) + ) + expected = { + "part1": { + "key_int": 1, + "key_str": "value2", + "key_float": 2.1, + "key_bool": True, + }, + } + assert actual == expected + + def test_read__duplicate_keys(self) -> None: + """ + If a section has duplicate keys, then the values are merged. + This case is required, for instance, to parse `settings/generaldata.ini` files which + has duplicate keys like "playlist_year_weight", "playlist_year +", "playlist_year -", + "select_var -", "select_var +", in the `[playlist]` section. + In this case, duplicate keys must be declared in the `special_keys` argument, + to parse them as list. + """ + reader = IniReader( + special_keys=[ + "playlist_year_weight", + "playlist_year +", + "playlist_year -", + "select_var -", + "select_var +", + ] + ) + actual = reader.read( + io.StringIO( + """ + [part1] + key_int = 1 + key_int = 2 + + [playlist] + playlist_reset = false + playlist_year_weight = 1 + playlist_year + = 2015 + playlist_year + = 2016 + playlist_year + = 2017 + playlist_year - = 2018 + playlist_year - = 2019 + playlist_year - = 2020 + select_var - = 1 + select_var + = 2 + """ + ) + ) + expected = { + "part1": {"key_int": 2}, # last value is kept + "playlist": { + "playlist_reset": False, + "playlist_year_weight": [1], + "playlist_year +": [2015, 2016, 2017], + "playlist_year -": [2018, 2019, 2020], + "select_var -": [1], + "select_var +": [2], + }, + } + assert actual == expected + + def test_read__no_key(self) -> None: + """ + If a section has no key, then an empty dictionary is returned. + This case is required to parse `input/hydro/prepro/correlation.ini` files. + """ + reader = IniReader() + actual = reader.read( + io.StringIO( + """ + [part1] + key_int = 1 + [part2] - key_bool = true - key_bool = false + """ + ) + ) + expected = { + "part1": {"key_int": 1}, + "part2": {}, + } + assert actual == expected + def test_read__with_square_brackets(self) -> None: + """ + If a section name has square brackets, then they are preserved. + This case is required to parse `input/hydro/allocation/{area-id}.ini` files. + """ + reader = IniReader() + actual = reader.read( + io.StringIO( + """ + [part1] + key_int = 1 + [[allocation]] my_area = 2.718 """ ) ) - - reader = MultipleSameKeysIniReader() - actual = reader.read(path) - expected = { - "part1": {"key_int": 1, "key_str": "value1", "key_float": 2.1}, - "part2": {"key_bool": [True, False]}, + "part1": {"key_int": 1}, "[allocation]": {"my_area": 2.718}, } - assert actual == expected - with path.open() as f: - actual_from_bytes = reader.read(f) - assert actual_from_bytes == expected - - def test_read__with_special_keys(self, tmp_path: Path) -> None: - path = Path(tmp_path) / "test.ini" - path.write_text( - textwrap.dedent( + def test_read__sets(self) -> None: + """ + It is also required to parse `input/areas/sets.ini` files which have keys like "+" or "-". + """ + reader = IniReader(["+", "-"]) + actual = reader.read( + io.StringIO( """ - [chap] - + = areaA - + = areaB + [all areas] + caption = All areas + comments = Spatial aggregates on all areas + + = east + + = west """ ) ) + expected = { + "all areas": { + "caption": "All areas", + "comments": "Spatial aggregates on all areas", + "+": ["east", "west"], + }, + } + assert actual == expected - reader = MultipleSameKeysIniReader(special_keys=["+"]) - actual = reader.read(path) + +class TestSimpleKeyValueReader: + def test_read(self) -> None: + # sample extracted from `user/expansion/settings.ini` + settings = textwrap.dedent( + """ + master=relaxed + uc_type=expansion_fast + optimality_gap=10001.0 + relative_gap=1e-06 + relaxed_optimality_gap=1e-05 + max_iteration=20 + solver=Xpress + log_level=3 + separation_parameter=0.66 + batch_size=102 + yearly-weights= + additional-constraints=constraintsFile.txt + timelimit=1000000000000 + """ + ) + ini_reader = SimpleKeyValueReader(section_name="dummy") + actual = ini_reader.read(io.StringIO(settings)) expected = { - "chap": {"+": ["areaA", "areaB"]}, + "master": "relaxed", + "uc_type": "expansion_fast", + "optimality_gap": 10001.0, + "relative_gap": 1e-06, + "relaxed_optimality_gap": 1e-05, + "max_iteration": 20, + "solver": "Xpress", + "log_level": 3, + "separation_parameter": 0.66, + "batch_size": 102, + "yearly-weights": "", + "additional-constraints": "constraintsFile.txt", + "timelimit": 1000000000000, } - assert actual == expected - - with path.open() as f: - actual_from_bytes = reader.read(f) - assert actual_from_bytes == expected diff --git a/tests/storage/repository/antares_io/writer/test_ini_writer.py b/tests/storage/repository/antares_io/writer/test_ini_writer.py index 7481adc412..b762469b7a 100644 --- a/tests/storage/repository/antares_io/writer/test_ini_writer.py +++ b/tests/storage/repository/antares_io/writer/test_ini_writer.py @@ -3,7 +3,7 @@ import pytest -from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter +from antarest.study.storage.rawstudy.ini_writer import IniWriter @pytest.mark.unit_test diff --git a/tests/storage/repository/filesystem/config/test_config_files.py b/tests/storage/repository/filesystem/config/test_config_files.py index 1b87692900..4f88115291 100644 --- a/tests/storage/repository/filesystem/config/test_config_files.py +++ b/tests/storage/repository/filesystem/config/test_config_files.py @@ -363,7 +363,7 @@ def test_parse_thermal_860(tmp_path: Path, version, caplog) -> None: def test_parse_renewables(tmp_path: Path) -> None: study_path = build_empty_files(tmp_path) - study_path.joinpath("study.antares").write_text("[antares] \n version = 700") + study_path.joinpath("study.antares").write_text("[antares] \n version = 810") ini_path = study_path.joinpath("input/renewables/clusters/fr/list.ini") # Error case: `input/renewables/clusters/fr` directory is missing. diff --git a/tests/storage/study_upgrader/test_upgrade_710.py b/tests/storage/study_upgrader/test_upgrade_710.py index 7ec0660135..a389178999 100644 --- a/tests/storage/study_upgrader/test_upgrade_710.py +++ b/tests/storage/study_upgrader/test_upgrade_710.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.study_upgrader import upgrade_710 from tests.storage.study_upgrader.conftest import StudyAssets diff --git a/tests/storage/study_upgrader/test_upgrade_800.py b/tests/storage/study_upgrader/test_upgrade_800.py index 07924cc23a..b2c72fcee1 100644 --- a/tests/storage/study_upgrader/test_upgrade_800.py +++ b/tests/storage/study_upgrader/test_upgrade_800.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.study_upgrader import upgrade_800 from tests.storage.study_upgrader.conftest import StudyAssets diff --git a/tests/storage/study_upgrader/test_upgrade_810.py b/tests/storage/study_upgrader/test_upgrade_810.py index ca84677c41..9639beb94f 100644 --- a/tests/storage/study_upgrader/test_upgrade_810.py +++ b/tests/storage/study_upgrader/test_upgrade_810.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.study_upgrader import upgrade_810 from tests.storage.business.test_study_version_upgrader import are_same_dir from tests.storage.study_upgrader.conftest import StudyAssets diff --git a/tests/storage/study_upgrader/test_upgrade_820.py b/tests/storage/study_upgrader/test_upgrade_820.py index 56b2ae7ac5..730b7c0127 100644 --- a/tests/storage/study_upgrader/test_upgrade_820.py +++ b/tests/storage/study_upgrader/test_upgrade_820.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.study_upgrader import upgrade_820 from tests.storage.business.test_study_version_upgrader import are_same_dir from tests.storage.study_upgrader.conftest import StudyAssets diff --git a/tests/storage/study_upgrader/test_upgrade_830.py b/tests/storage/study_upgrader/test_upgrade_830.py index 29503c9ab8..d0612ef889 100644 --- a/tests/storage/study_upgrader/test_upgrade_830.py +++ b/tests/storage/study_upgrader/test_upgrade_830.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.study_upgrader import upgrade_830 from tests.storage.business.test_study_version_upgrader import are_same_dir from tests.storage.study_upgrader.conftest import StudyAssets diff --git a/tests/storage/study_upgrader/test_upgrade_840.py b/tests/storage/study_upgrader/test_upgrade_840.py index abb981fda5..4f95cef265 100644 --- a/tests/storage/study_upgrader/test_upgrade_840.py +++ b/tests/storage/study_upgrader/test_upgrade_840.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.study_upgrader import upgrade_840 from tests.storage.study_upgrader.conftest import StudyAssets diff --git a/tests/storage/study_upgrader/test_upgrade_850.py b/tests/storage/study_upgrader/test_upgrade_850.py index 42ece33279..362cf1ab50 100644 --- a/tests/storage/study_upgrader/test_upgrade_850.py +++ b/tests/storage/study_upgrader/test_upgrade_850.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.study_upgrader import upgrade_850 from tests.storage.study_upgrader.conftest import StudyAssets diff --git a/tests/storage/test_service.py b/tests/storage/test_service.py index 60b35e2b5e..b509445052 100644 --- a/tests/storage/test_service.py +++ b/tests/storage/test_service.py @@ -67,6 +67,7 @@ from antarest.study.storage.variantstudy.model.dbmodel import VariantStudy from antarest.study.storage.variantstudy.variant_study_service import VariantStudyService from antarest.worker.archive_worker import ArchiveTaskArgs +from tests.db_statement_recorder import DBStatementRecorder from tests.helpers import with_db_context @@ -116,9 +117,8 @@ def study_to_dto(study: Study) -> StudyMetadataDTO: ) -# noinspection PyArgumentList @pytest.mark.unit_test -def test_study_listing() -> None: +def test_study_listing(db_session: Session) -> None: bob = User(id=2, name="bob") alice = User(id=3, name="alice") @@ -159,9 +159,9 @@ def test_study_listing() -> None: additional_data=StudyAdditionalData(), ) - # Mock - repository = Mock() - repository.get_all.return_value = [a, b, c] + # Add some studies in the database + db_session.add_all([a, b, c]) + db_session.commit() raw_study_service = Mock(spec=RawStudyService) raw_study_service.get_study_information.side_effect = study_to_dto @@ -170,40 +170,59 @@ def test_study_listing() -> None: cache.get.return_value = None config = Config(storage=StorageConfig(workspaces={DEFAULT_WORKSPACE_NAME: WorkspaceConfig()})) + repository = StudyMetadataRepository(cache_service=Mock(spec=ICache), session=db_session) service = build_study_service(raw_study_service, repository, config, cache_service=cache) - studies = service.get_studies_information( - managed=False, - name=None, - workspace=None, - folder=None, - params=RequestParameters(user=JWTUser(id=2, impersonator=2, type="users")), - ) + # use the db recorder to check that: + # 1- retrieving studies information requires only 1 query + # 2- having an exact total of queries equals to 1 + with DBStatementRecorder(db_session.bind) as db_recorder: + studies = service.get_studies_information( + managed=False, + name=None, + workspace=None, + folder=None, + params=RequestParameters(user=JWTUser(id=2, impersonator=2, type="users")), + ) + assert len(db_recorder.sql_statements) == 1, str(db_recorder) + # verify that we get the expected studies information expected_result = {e.id: e for e in map(lambda x: study_to_dto(x), [a, c])} assert expected_result == studies cache.get.return_value = {e.id: e for e in map(lambda x: study_to_dto(x), [a, b, c])} - studies = service.get_studies_information( - managed=False, - name=None, - workspace=None, - folder=None, - params=RequestParameters(user=JWTUser(id=2, impersonator=2, type="users")), - ) + # check that: + # 1- retrieving studies information requires no query at all (cache is used) + # 2- the `put` method of `cache` was used once + with DBStatementRecorder(db_session.bind) as db_recorder: + studies = service.get_studies_information( + managed=False, + name=None, + workspace=None, + folder=None, + params=RequestParameters(user=JWTUser(id=2, impersonator=2, type="users")), + ) + assert len(db_recorder.sql_statements) == 0, str(db_recorder) + cache.put.assert_called_once() + # verify that we get the expected studies information assert expected_result == studies - cache.put.assert_called_once() cache.get.return_value = None - studies = service.get_studies_information( - managed=True, - name=None, - workspace=None, - folder=None, - params=RequestParameters(user=JWTUser(id=2, impersonator=2, type="users")), - ) + # use the db recorder to check that: + # 1- retrieving studies information requires only 1 query (cache reset to None) + # 2- having an exact total of queries equals to 1 + with DBStatementRecorder(db_session.bind) as db_recorder: + studies = service.get_studies_information( + managed=True, + name=None, + workspace=None, + folder=None, + params=RequestParameters(user=JWTUser(id=2, impersonator=2, type="users")), + ) + assert len(db_recorder.sql_statements) == 1, str(db_recorder) + # verify that we get the expected studies information expected_result = {e.id: e for e in map(lambda x: study_to_dto(x), [a])} assert expected_result == studies diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index 87a98d1eaf..61731718b5 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -19,7 +19,7 @@ from antarest.login.model import Group, User from antarest.study.business.areas.st_storage_management import STStorageManager from antarest.study.model import RawStudy, Study, StudyContentStatus -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorageGroup from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree diff --git a/tests/study/test_repository.py b/tests/study/test_repository.py new file mode 100644 index 0000000000..be1a763602 --- /dev/null +++ b/tests/study/test_repository.py @@ -0,0 +1,68 @@ +import typing as t +from datetime import datetime +from unittest.mock import Mock + +import pytest +from sqlalchemy.orm import Session # type: ignore + +from antarest.core.interfaces.cache import ICache +from antarest.study.model import DEFAULT_WORKSPACE_NAME, RawStudy +from antarest.study.repository import StudyMetadataRepository +from antarest.study.storage.variantstudy.model.dbmodel import VariantStudy +from tests.db_statement_recorder import DBStatementRecorder + + +@pytest.mark.parametrize( + "managed, studies_ids, exists, expected_ids", + [ + (None, None, False, {"1", "2", "3", "4", "5", "6", "7", "8"}), + (None, None, True, {"1", "2", "3", "4", "7", "8"}), + (None, [1, 3, 5, 7], False, {"1", "3", "5", "7"}), + (None, [1, 3, 5, 7], True, {"1", "3", "7"}), + (True, None, False, {"1", "2", "3", "4", "5", "8"}), + (True, None, True, {"1", "2", "3", "4", "8"}), + (True, [1, 3, 5, 7], False, {"1", "3", "5"}), + (True, [1, 3, 5, 7], True, {"1", "3"}), + (True, [2, 4, 6, 8], True, {"2", "4", "8"}), + (False, None, False, {"6", "7"}), + (False, None, True, {"7"}), + (False, [1, 3, 5, 7], False, {"7"}), + (False, [1, 3, 5, 7], True, {"7"}), + ], +) +def test_repository_get_all( + db_session: Session, + managed: t.Union[bool, None], + studies_ids: t.Union[t.List[str], None], + exists: bool, + expected_ids: set, +): + test_workspace = "test-repository" + icache: Mock = Mock(spec=ICache) + repository = StudyMetadataRepository(cache_service=icache, session=db_session) + + study_1 = VariantStudy(id=1) + study_2 = VariantStudy(id=2) + study_3 = VariantStudy(id=3) + study_4 = VariantStudy(id=4) + study_5 = RawStudy(id=5, missing=datetime.now(), workspace=DEFAULT_WORKSPACE_NAME) + study_6 = RawStudy(id=6, missing=datetime.now(), workspace=test_workspace) + study_7 = RawStudy(id=7, missing=None, workspace=test_workspace) + study_8 = RawStudy(id=8, missing=None, workspace=DEFAULT_WORKSPACE_NAME) + + db_session.add_all([study_1, study_2, study_3, study_4, study_5, study_6, study_7, study_8]) + db_session.commit() + + # use the db recorder to check that: + # 1- retrieving all studies requires only 1 query + # 2- accessing studies attributes does require additional queries to db + # 3- having an exact total of queries equals to 1 + with DBStatementRecorder(db_session.bind) as db_recorder: + all_studies = repository.get_all(managed=managed, studies_ids=studies_ids, exists=exists) + _ = [s.owner for s in all_studies] + _ = [s.groups for s in all_studies] + _ = [s.additional_data for s in all_studies] + assert len(db_recorder.sql_statements) == 1, str(db_recorder) + + if expected_ids is not None: + assert set([s.id for s in all_studies]) == expected_ids diff --git a/tests/study/test_service.py b/tests/study/test_service.py new file mode 100644 index 0000000000..274525021f --- /dev/null +++ b/tests/study/test_service.py @@ -0,0 +1,39 @@ +from pathlib import Path + +import pytest + +from antarest.study.service import get_disk_usage + + +def test_get_disk_usage__nominal_case(tmp_path: Path) -> None: + """ + This test ensures that the 'get_disk_usage' function handles a typical directory structure correctly. + """ + tmp_path.joinpath("input").mkdir() + ini_data = b"[config]\nkey = value" + tmp_path.joinpath("input/params.ini").write_bytes(ini_data) + tmp_path.joinpath("input/series").mkdir() + series_data = b"10\n20\n" + tmp_path.joinpath("input/series/data.tsv").write_bytes(series_data) + assert get_disk_usage(tmp_path) == len(ini_data) + len(series_data) + + +@pytest.mark.parametrize("suffix", [".zip", ".7z", ".ZIP"]) +def test_get_disk__usage_archive(tmp_path: Path, suffix: str) -> None: + """ + This test ensures that the 'get_disk_usage' function correctly handles archive files (.zip, .7z). + """ + compressed_path = tmp_path.joinpath("study").with_suffix(suffix) + compressed_data = b"dummy archive content" + compressed_path.write_bytes(compressed_data) + assert get_disk_usage(tmp_path) == len(compressed_data) + + +def test_gest_disk_usage__unknown_format(tmp_path: Path) -> None: + """ + This test ensures that the 'get_disk_usage' function handles unknown directory formats appropriately. + """ + path = tmp_path.joinpath("study.dat") + path.touch() + with pytest.raises(NotADirectoryError): + get_disk_usage(path) diff --git a/tests/variantstudy/model/command/test_create_link.py b/tests/variantstudy/model/command/test_create_link.py index 133d8d2b7d..413e97038d 100644 --- a/tests/variantstudy/model/command/test_create_link.py +++ b/tests/variantstudy/model/command/test_create_link.py @@ -5,7 +5,7 @@ from pydantic import ValidationError from antarest.study.common.default_values import FilteringOptions, LinkProperties -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index 3387db8e6d..c28b50b69d 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -2,7 +2,7 @@ import numpy as np -from antarest.study.storage.rawstudy.io.reader import IniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_extractor import CommandExtractor diff --git a/tests/variantstudy/model/command/test_manage_district.py b/tests/variantstudy/model/command/test_manage_district.py index fee8be82fe..78d30bb19f 100644 --- a/tests/variantstudy/model/command/test_manage_district.py +++ b/tests/variantstudy/model/command/test_manage_district.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.files import build from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy @@ -55,7 +55,7 @@ def test_manage_district(empty_study: FileStudy, command_context: CommandContext study_data=empty_study, ) assert output_d1.status - sets_config = MultipleSameKeysIniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") + sets_config = IniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") set_config = sets_config.get("two added zone") assert set(set_config["+"]) == {area1_id, area2_id} assert set_config["output"] @@ -71,7 +71,7 @@ def test_manage_district(empty_study: FileStudy, command_context: CommandContext study_data=empty_study, ) assert output_d2.status - sets_config = MultipleSameKeysIniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") + sets_config = IniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") set_config = sets_config.get("one subtracted zone") assert set_config["-"] == [area1_id] assert set_config["apply-filter"] == "add-all" @@ -85,7 +85,7 @@ def test_manage_district(empty_study: FileStudy, command_context: CommandContext output_ud2 = update_district2_command.apply(study_data=empty_study) assert output_ud2.status - sets_config = MultipleSameKeysIniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") + sets_config = IniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") set_config = sets_config.get("one subtracted zone") assert set_config["+"] == [area2_id] assert set_config["apply-filter"] == "remove-all" @@ -100,7 +100,7 @@ def test_manage_district(empty_study: FileStudy, command_context: CommandContext ) assert output_d3.status assert output_d2.status - sets_config = MultipleSameKeysIniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") + sets_config = IniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") set_config = sets_config.get("empty district without output") assert not set_config["output"] @@ -115,13 +115,13 @@ def test_manage_district(empty_study: FileStudy, command_context: CommandContext remove_district3_command: ICommand = RemoveDistrict( id="empty district without output", command_context=command_context ) - sets_config = MultipleSameKeysIniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") + sets_config = IniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") assert len(sets_config.keys()) == 4 remove_output_d3 = remove_district3_command.apply( study_data=empty_study, ) assert remove_output_d3.status - sets_config = MultipleSameKeysIniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") + sets_config = IniReader(["+", "-"]).read(empty_study.config.study_path / "input/areas/sets.ini") assert len(sets_config.keys()) == 3 diff --git a/tests/variantstudy/model/command/test_update_config.py b/tests/variantstudy/model/command/test_update_config.py index 0b78fe41bd..99c71bd6d7 100644 --- a/tests/variantstudy/model/command/test_update_config.py +++ b/tests/variantstudy/model/command/test_update_config.py @@ -3,7 +3,7 @@ import pytest -from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError @@ -34,7 +34,7 @@ def test_update_config(empty_study: FileStudy, command_context: CommandContext): ) output = update_settings_command.apply(empty_study) assert output.status - generaldata = MultipleSameKeysIniReader().read(study_path / "settings/generaldata.ini") + generaldata = IniReader().read(study_path / "settings/generaldata.ini") assert generaldata["optimization"]["simplex-range"] == "day" assert generaldata["optimization"]["transmission-capacities"] @@ -45,7 +45,7 @@ def test_update_config(empty_study: FileStudy, command_context: CommandContext): ) output = update_settings_command.apply(empty_study) assert output.status - area_config = MultipleSameKeysIniReader().read(study_path / f"input/areas/{area1_id}/optimization.ini") + area_config = IniReader().read(study_path / f"input/areas/{area1_id}/optimization.ini") assert not area_config["nodal optimization"]["other-dispatchable-power"] # test UpdateConfig with byte object which is necessary with the API PUT /v1/studies/{uuid}/raw @@ -56,7 +56,7 @@ def test_update_config(empty_study: FileStudy, command_context: CommandContext): command_context=command_context, ) command.apply(empty_study) - layers = MultipleSameKeysIniReader().read(study_path / "layers/layers.ini") + layers = IniReader().read(study_path / "layers/layers.ini") assert layers == {"first_layer": {"0": "Nothing"}} new_data = json.dumps({"1": False}).encode("utf-8") command = UpdateConfig( @@ -65,7 +65,7 @@ def test_update_config(empty_study: FileStudy, command_context: CommandContext): command_context=command_context, ) command.apply(empty_study) - layers = MultipleSameKeysIniReader().read(study_path / "layers/layers.ini") + layers = IniReader().read(study_path / "layers/layers.ini") assert layers == {"first_layer": {"1": False}} diff --git a/webapp/package-lock.json b/webapp/package-lock.json index 6ce088d97a..fdeb700d83 100644 --- a/webapp/package-lock.json +++ b/webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "antares-web", - "version": "2.16.1", + "version": "2.16.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "antares-web", - "version": "2.16.1", + "version": "2.16.2", "dependencies": { "@emotion/react": "11.11.1", "@emotion/styled": "11.11.0", @@ -43,6 +43,7 @@ "i18next-xhr-backend": "3.2.2", "immer": "10.0.3", "js-cookie": "3.0.5", + "jsoneditor": "9.10.4", "jwt-decode": "3.1.2", "lodash": "4.17.21", "material-react-table": "2.0.5", @@ -82,9 +83,11 @@ "xml-js": "1.6.11" }, "devDependencies": { + "@babel/plugin-proposal-private-property-in-object": "7.21.11", "@total-typescript/ts-reset": "0.5.1", "@types/debug": "4.1.9", "@types/js-cookie": "3.0.4", + "@types/jsoneditor": "9.9.5", "@types/lodash": "4.14.199", "@types/ramda": "0.29.5", "@types/react-beautiful-dnd": "13.1.5", @@ -107,6 +110,7 @@ "eslint-plugin-react": "7.33.2", "eslint-plugin-react-hooks": "4.6.0", "husky": "8.0.3", + "immutable": "3.8.2", "jest-sonar-reporter": "2.0.0", "prettier": "3.0.3", "process": "0.11.10", @@ -875,9 +879,17 @@ } }, "node_modules/@babel/plugin-proposal-private-property-in-object": { - "version": "7.21.0-placeholder-for-preset-env.2", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", - "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", + "version": "7.21.11", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.11.tgz", + "integrity": "sha512-0QZ8qP/3RLDVBwBFoWAwCtgcDZJVwA5LUJRZU8x2YFfKNuFq161wK3cuGrALu5yiPu+vzwTAg/sMWVNeWeNyaw==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-private-property-in-object instead.", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-create-class-features-plugin": "^7.21.0", + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5" + }, "engines": { "node": ">=6.9.0" }, @@ -1727,12 +1739,12 @@ } }, "node_modules/@babel/plugin-transform-private-property-in-object": { - "version": "7.22.11", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.22.11.tgz", - "integrity": "sha512-sSCbqZDBKHetvjSwpyWzhuHkmW5RummxJBVbYLkGkaiTOWGxml7SXt0iWa03bzxFIx7wOj3g/ILRd0RcJKBeSQ==", + "version": "7.23.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.23.4.tgz", + "integrity": "sha512-9G3K1YqTq3F4Vt88Djx1UZ79PDyj+yKRnUy7cZGSMe+a7jkwD259uKKuUzQlPkGam7R+8RJwh5z4xO27fA1o2A==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-create-class-features-plugin": "^7.22.11", + "@babel/helper-create-class-features-plugin": "^7.22.15", "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-private-property-in-object": "^7.14.5" }, @@ -2128,6 +2140,17 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/preset-env/node_modules/@babel/plugin-proposal-private-property-in-object": { + "version": "7.21.0-placeholder-for-preset-env.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", + "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/preset-env/node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -2736,6 +2759,26 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/@eslint/eslintrc/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, "node_modules/@eslint/js": { "version": "8.50.0", "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.50.0.tgz", @@ -4086,6 +4129,11 @@ "@sinonjs/commons": "^1.7.0" } }, + "node_modules/@sphinxxxx/color-conversion": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@sphinxxxx/color-conversion/-/color-conversion-2.2.2.tgz", + "integrity": "sha512-XExJS3cLqgrmNBIP3bBw6+1oQ1ksGjFh0+oClDKFYpCCqx/hlqwWO5KO/S63fzUo67SxI9dMrF0y5T/Ey7h8Zw==" + }, "node_modules/@surma/rollup-plugin-off-main-thread": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz", @@ -4825,6 +4873,12 @@ "url": "https://opencollective.com/turf" } }, + "node_modules/@types/ace": { + "version": "0.0.52", + "resolved": "https://registry.npmjs.org/@types/ace/-/ace-0.0.52.tgz", + "integrity": "sha512-YPF9S7fzpuyrxru+sG/rrTpZkC6gpHBPF14W3x70kqVOD+ks6jkYLapk4yceh36xej7K4HYxcyz9ZDQ2lTvwgQ==", + "dev": true + }, "node_modules/@types/babel__core": { "version": "7.20.2", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.2.tgz", @@ -5151,6 +5205,14 @@ "immutable": "~3.7.4" } }, + "node_modules/@types/draft-js/node_modules/immutable": { + "version": "3.7.6", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.7.6.tgz", + "integrity": "sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/@types/draftjs-to-html": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/@types/draftjs-to-html/-/draftjs-to-html-0.8.2.tgz", @@ -5289,6 +5351,38 @@ "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" }, + "node_modules/@types/jsoneditor": { + "version": "9.9.5", + "resolved": "https://registry.npmjs.org/@types/jsoneditor/-/jsoneditor-9.9.5.tgz", + "integrity": "sha512-+Wex7QCirPcG90WA8/CmvDO21KUjz63/G7Yk52Yx/NhWHw5DyeET/L+wjZHAeNeNCCnMOTEtVX5gc3F4UXwXMQ==", + "dev": true, + "dependencies": { + "@types/ace": "*", + "ajv": "^6.12.0" + } + }, + "node_modules/@types/jsoneditor/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@types/jsoneditor/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, "node_modules/@types/lodash": { "version": "4.14.199", "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.199.tgz", @@ -6091,6 +6185,11 @@ "node": ">= 0.6" } }, + "node_modules/ace-builds": { + "version": "1.32.2", + "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.32.2.tgz", + "integrity": "sha512-mnJAc803p+7eeDt07r6XI7ufV7VdkpPq4gJZT8Jb3QsowkaBTVy4tdBgPrVT0WbXLm0toyEQXURKSVNj/7dfJQ==" + }, "node_modules/acorn": { "version": "8.10.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", @@ -6178,13 +6277,13 @@ } }, "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", "dependencies": { "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", "uri-js": "^4.2.2" }, "funding": { @@ -6208,34 +6307,6 @@ } } }, - "node_modules/ajv-formats/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ajv-formats/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, - "node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "peerDependencies": { - "ajv": "^6.9.1" - } - }, "node_modules/almost-equal": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/almost-equal/-/almost-equal-1.1.0.tgz", @@ -6702,6 +6773,34 @@ "webpack": ">=2" } }, + "node_modules/babel-loader/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/babel-loader/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/babel-loader/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, "node_modules/babel-loader/node_modules/schema-utils": { "version": "2.7.1", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", @@ -8295,21 +8394,6 @@ } } }, - "node_modules/css-minimizer-webpack-plugin/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, "node_modules/css-minimizer-webpack-plugin/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", @@ -8321,11 +8405,6 @@ "ajv": "^8.8.2" } }, - "node_modules/css-minimizer-webpack-plugin/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, "node_modules/css-minimizer-webpack-plugin/node_modules/schema-utils": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz", @@ -9472,6 +9551,14 @@ "react-dom": "^15.0.2 || ^16.0.0-rc || ^16.0.0 || ^17.0.0 || ^18.0.0" } }, + "node_modules/draft-convert/node_modules/immutable": { + "version": "3.7.6", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.7.6.tgz", + "integrity": "sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/draft-js": { "version": "0.11.7", "resolved": "https://registry.npmjs.org/draft-js/-/draft-js-0.11.7.tgz", @@ -9486,6 +9573,14 @@ "react-dom": ">=0.14.0" } }, + "node_modules/draft-js/node_modules/immutable": { + "version": "3.7.6", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.7.6.tgz", + "integrity": "sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/draftjs-to-html": { "version": "0.9.1", "resolved": "https://registry.npmjs.org/draftjs-to-html/-/draftjs-to-html-0.9.1.tgz", @@ -10723,21 +10818,6 @@ "webpack": "^5.0.0" } }, - "node_modules/eslint-webpack-plugin/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, "node_modules/eslint-webpack-plugin/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", @@ -10762,11 +10842,6 @@ "node": "^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0" } }, - "node_modules/eslint-webpack-plugin/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, "node_modules/eslint-webpack-plugin/node_modules/schema-utils": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz", @@ -10799,6 +10874,26 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/eslint/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/eslint/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, "node_modules/espree": { "version": "9.6.1", "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", @@ -11556,6 +11651,29 @@ } } }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/cosmiconfig": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", @@ -11585,6 +11703,11 @@ "node": ">=10" } }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/schema-utils": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz", @@ -12888,11 +13011,11 @@ } }, "node_modules/immutable": { - "version": "3.7.6", - "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.7.6.tgz", - "integrity": "sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.8.2.tgz", + "integrity": "sha512-15gZoQ38eYjEjxkorfbcgBKBL6R7T459OuK+CpcWt7O3KF4uPCx2tD0uFETlUDIyo+1789crbMhTvQBSR5yBMg==", "engines": { - "node": ">=0.8.0" + "node": ">=0.10.0" } }, "node_modules/import-fresh": { @@ -13685,6 +13808,11 @@ "node": ">=10" } }, + "node_modules/javascript-natural-sort": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz", + "integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==" + }, "node_modules/jest": { "version": "27.5.1", "resolved": "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz", @@ -14545,6 +14673,14 @@ "jiti": "bin/jiti.js" } }, + "node_modules/jmespath": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", + "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", + "engines": { + "node": ">= 0.6.0" + } + }, "node_modules/js-cookie": { "version": "3.0.5", "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-3.0.5.tgz", @@ -14683,9 +14819,14 @@ "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" }, "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "node_modules/json-source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/json-source-map/-/json-source-map-0.6.1.tgz", + "integrity": "sha512-1QoztHPsMQqhDq0hlXY5ZqcEdUzxQEIxgFkKl4WUp2pgShObl+9ovi4kRh2TfvAfxAoHOJ9vIMEqk3k4iex7tg==" }, "node_modules/json-stable-stringify": { "version": "1.0.2", @@ -14714,6 +14855,42 @@ "node": ">=6" } }, + "node_modules/jsoneditor": { + "version": "9.10.4", + "resolved": "https://registry.npmjs.org/jsoneditor/-/jsoneditor-9.10.4.tgz", + "integrity": "sha512-tr7dSARLHM65OQTE81zo5fQAjLzijLl+u/z+pcJaeaFzgkey59Gi8TDCYIejQ/plvm6RLVmuEeqgDhsQdayhiQ==", + "dependencies": { + "ace-builds": "^1.31.1", + "ajv": "^6.12.6", + "javascript-natural-sort": "^0.7.1", + "jmespath": "^0.16.0", + "json-source-map": "^0.6.1", + "jsonrepair": "^3.4.0", + "mobius1-selectr": "^2.4.13", + "picomodal": "^3.0.0", + "vanilla-picker": "^2.12.2" + } + }, + "node_modules/jsoneditor/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/jsoneditor/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -14868,6 +15045,14 @@ "node": ">=0.10.0" } }, + "node_modules/jsonrepair": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/jsonrepair/-/jsonrepair-3.5.0.tgz", + "integrity": "sha512-SavvDsUP9Xnqo2MoC6Wl6zNyX3f+I5199hRbXBtAITyP2NTPyAgyx5xM0bgcIljRjzsIvOBANbgfWe8XXlyeLA==", + "bin": { + "jsonrepair": "bin/cli.js" + } + }, "node_modules/jsx-ast-utils": { "version": "3.3.5", "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", @@ -15396,21 +15581,6 @@ "webpack": "^5.0.0" } }, - "node_modules/mini-css-extract-plugin/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, "node_modules/mini-css-extract-plugin/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", @@ -15422,11 +15592,6 @@ "ajv": "^8.8.2" } }, - "node_modules/mini-css-extract-plugin/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, "node_modules/mini-css-extract-plugin/node_modules/schema-utils": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz", @@ -15502,6 +15667,11 @@ "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", "optional": true }, + "node_modules/mobius1-selectr": { + "version": "2.4.13", + "resolved": "https://registry.npmjs.org/mobius1-selectr/-/mobius1-selectr-2.4.13.tgz", + "integrity": "sha512-Mk9qDrvU44UUL0EBhbAA1phfQZ7aMZPjwtL7wkpiBzGh8dETGqfsh50mWoX9EkjDlkONlErWXArHCKfoxVg0Bw==" + }, "node_modules/moment": { "version": "2.29.4", "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", @@ -16476,6 +16646,11 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/picomodal": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/picomodal/-/picomodal-3.0.0.tgz", + "integrity": "sha512-FoR3TDfuLlqUvcEeK5ifpKSVVns6B4BQvc8SDF6THVMuadya6LLtji0QgUDSStw0ZR2J7I6UGi5V2V23rnPWTw==" + }, "node_modules/pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", @@ -19700,6 +19875,34 @@ "url": "https://opencollective.com/webpack" } }, + "node_modules/schema-utils/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/schema-utils/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/schema-utils/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, "node_modules/screenfull": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/screenfull/-/screenfull-5.2.0.tgz", @@ -22151,6 +22354,14 @@ "node": ">= 8" } }, + "node_modules/vanilla-picker": { + "version": "2.12.2", + "resolved": "https://registry.npmjs.org/vanilla-picker/-/vanilla-picker-2.12.2.tgz", + "integrity": "sha512-dk0gNeNL9fQFGd1VEhNDQfFlbCqAiksRh1H2tVPlavkH88n/a/y30rXi9PPKrYPTK5kEfPO4xcldt4ts/1wIAg==", + "dependencies": { + "@sphinxxxx/color-conversion": "^2.2.2" + } + }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -22328,21 +22539,6 @@ "webpack": "^4.0.0 || ^5.0.0" } }, - "node_modules/webpack-dev-middleware/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, "node_modules/webpack-dev-middleware/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", @@ -22354,11 +22550,6 @@ "ajv": "^8.8.2" } }, - "node_modules/webpack-dev-middleware/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, "node_modules/webpack-dev-middleware/node_modules/schema-utils": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz", @@ -22435,21 +22626,6 @@ } } }, - "node_modules/webpack-dev-server/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, "node_modules/webpack-dev-server/node_modules/ajv-keywords": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", @@ -22461,11 +22637,6 @@ "ajv": "^8.8.2" } }, - "node_modules/webpack-dev-server/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, "node_modules/webpack-dev-server/node_modules/schema-utils": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz", @@ -22778,21 +22949,6 @@ "node": ">=10.0.0" } }, - "node_modules/workbox-build/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, "node_modules/workbox-build/node_modules/fs-extra": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", @@ -22807,11 +22963,6 @@ "node": ">=10" } }, - "node_modules/workbox-build/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, "node_modules/workbox-build/node_modules/source-map": { "version": "0.8.0-beta.0", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz", diff --git a/webapp/package.json b/webapp/package.json index 6b687cb3ae..1fd7eea24d 100644 --- a/webapp/package.json +++ b/webapp/package.json @@ -1,6 +1,6 @@ { "name": "antares-web", - "version": "2.16.1", + "version": "2.16.2", "private": true, "engines": { "node": "18.16.1" @@ -41,6 +41,7 @@ "i18next-xhr-backend": "3.2.2", "immer": "10.0.3", "js-cookie": "3.0.5", + "jsoneditor": "9.10.4", "jwt-decode": "3.1.2", "lodash": "4.17.21", "material-react-table": "2.0.5", @@ -102,9 +103,11 @@ "proxy": "http://localhost:8080", "homepage": "/", "devDependencies": { + "@babel/plugin-proposal-private-property-in-object": "7.21.11", "@total-typescript/ts-reset": "0.5.1", "@types/debug": "4.1.9", "@types/js-cookie": "3.0.4", + "@types/jsoneditor": "9.9.5", "@types/lodash": "4.14.199", "@types/ramda": "0.29.5", "@types/react-beautiful-dnd": "13.1.5", @@ -127,6 +130,7 @@ "eslint-plugin-react": "7.33.2", "eslint-plugin-react-hooks": "4.6.0", "husky": "8.0.3", + "immutable": "3.8.2", "jest-sonar-reporter": "2.0.0", "prettier": "3.0.3", "process": "0.11.10", diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 5c72efdac2..cd3534f6c6 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -190,8 +190,6 @@ "launcher.autoUnzip": "Automatically unzip", "launcher.xpress": "Xpress (>= 8.3)", "launcher.xpansion.sensitivityMode": "Sensitivity mode", - "launcher.xpansion.versionCpp": "Version C++", - "launcher.xpansion.versionR": "Version R (<= 7.x)", "study.runStudy": "Study launch", "study.otherOptions": "Other options", "study.archiveOutputMode": "Archive mode", @@ -268,13 +266,6 @@ "study.modelization.links.matrix.columns.loopFlow": "Loop flow", "study.modelization.links.matrix.columns.pShiftMin": "P.Shift Min", "study.modelization.links.matrix.columns.pShiftMax": "P.Shift Max", - "study.modelization.tableMode": "Table Mode", - "study.modelization.tableMode.template.economicOpt": "Economic Opt.", - "study.modelization.tableMode.template.geographicTrimmingAreas": "Geographic Trimming (areas)", - "study.modelization.tableMode.template.geographicTrimmingLinks": "Geographic Trimming (links)", - "study.modelization.tableMode.dialog.add.title": "Add table", - "study.modelization.tableMode.dialog.edit.title": "Edit table", - "study.modelization.tableMode.dialog.delete.text": "Are you sure you want to delete '{{0}}' table?", "study.configuration.general.legend.simulation": "Simulation", "study.configuration.general.legend.calendar": "Calendar", "study.configuration.general.legend.monteCarloScenarios": "Monte-Carlo Scenarios", @@ -339,7 +330,8 @@ "study.configuration.optimization.exportMps": "Export MPS", "study.configuration.optimization.unfeasibleProblemBehavior": "Unfeasible problem behavior", "study.configuration.optimization.simplexOptimizationRange": "Simplex optimization range", - "study.configuration.adequacyPatch.legend.general": "General", + "study.configuration.adequacyPatch.tab.general": "General", + "study.configuration.adequacyPatch.tab.perimeter": "Perimeter", "study.configuration.adequacyPatch.legend.localMatchingRule": "Local matching rule", "study.configuration.adequacyPatch.legend.curtailmentSharing": "Curtailment sharing", "study.configuration.adequacyPatch.legend.advanced": "Advanced", @@ -376,6 +368,9 @@ "study.configuration.advancedParameters.unitCommitmentMode": "Unit commitment mode", "study.configuration.advancedParameters.simulationCores": "Simulation cores", "study.configuration.advancedParameters.renewableGenerationModeling": "Renewable generation modeling", + "study.configuration.economicOpt": "Economic Opt.", + "study.configuration.geographicTrimmingAreas": "Geographic Trimming (areas)", + "study.configuration.geographicTrimmingLinks": "Geographic Trimming (links)", "study.modelization.properties": "Properties", "study.modelization.properties.posX": "Position X", "study.modelization.properties.posY": "Position Y", @@ -496,6 +491,10 @@ "study.modelization.bindingConst.offset": "Offset", "study.modelization.bindingConst.question.deleteConstraintTerm": "Are you sure you want to delete this constraint term?", "study.modelization.bindingConst.question.deleteBindingConstraint": "Are you sure you want to delete this binding constraint?", + "study.tableMode": "Table Mode", + "study.tableMode.dialog.add.title": "Add table", + "study.tableMode.dialog.edit.title": "Edit table", + "study.tableMode.dialog.delete.text": "Are you sure you want to delete '{{name}}' table?", "study.results.mc": "Monte-Carlo", "study.results.display": "Display", "study.results.temporality": "Temporality", @@ -503,7 +502,6 @@ "study.error.addConstraintTerm": "Failed to add constraint term", "study.error.missingData": "Error: {{0}} and/or {{1}} is missing", "study.error.termAlreadyExist": "This term already exist", - "study.error.addBindingConst": "Failed to create constraint", "study.error.addCluster": "Failed to add cluster", "study.error.deleteCluster": "Failed to delete cluster", "study.error.deleteConstraint": "Failed to delete constraint", @@ -648,30 +646,27 @@ "xpansion.indirectAlreadyLinkProfile": "Indirect already installed link profile", "xpansion.projection": "Projections", "xpansion.capex": "Compute CAPEX", - "xpansion.epsilon": "Maximum gap with the optimal solution", + "xpansion.epsilon": "Maximum Gap with the Optimal Solution", "xpansion.ucType": "UC Type", - "xpansion.master": "Master", - "xpansion.optimalityGap": "Optimality gap", - "xpansion.maxIteration": "Max iteration", - "xpansion.yearlyWeight": "Yearly weight", - "xpansion.additionalConstraints": "Additional constraints", - "xpansion.relaxedOptimalityGap": "Relaxed optimality gap", - "xpansion.cutType": "Cut type", - "xpansion.amplSolver": "Ampl solver", - "xpansion.amplPresolve": "Ampl presolve", - "xpansion.amplSolverBoundsFrequency": "Ampl solve bounds frequency", - "xpansion.relativeGap": "Relative gap", - "xpansion.batchSize": "Batch size", + "xpansion.master": "Master Problem", + "xpansion.optimalityGap": "Absolute Optimality Gap", + "xpansion.maxIteration": "Max Iterations Count", + "xpansion.yearlyWeight": "Yearly Weight", + "xpansion.additionalConstraints": "Additional Constraints", + "xpansion.relaxedOptimalityGap": "Relaxed Optimality Gap", + "xpansion.relativeGap": "Relative Optimality Gap", + "xpansion.batchSize": "Batch Size", "xpansion.solver": "Solver", - "xpansion.timeLimit": "Time limit (in hours)", - "xpansion.logLevel": "Log level", - "xpansion.separationParameter": "Separation parameter", + "xpansion.timeLimit": "Time Limit (in hours)", + "xpansion.logLevel": "Log Level", + "xpansion.separationParameter": "Separation Parameter", "xpansion.constraints": "Constraints", "xpansion.capacities": "Capacities", "xpansion.weights": "Weights", - "xpansion.fileName": "File name", + "xpansion.fileName": "File Name", "xpansion.options": "Options", - "xpansion.extra": "Extra", + "xpansion.optimization": "Optimization", + "xpansion.extra": "Modeling Add-ons", "xpansion.sensitivity": "Sensitivity mode", "xpansion.newXpansionConfig": "New Xpansion configuration", "xpansion.candidates": "Candidates", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 05c8e0a786..596c5d5c94 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -190,14 +190,11 @@ "launcher.autoUnzip": "Dézippage automatique", "launcher.xpress": "Xpress (>= 8.3)", "launcher.xpansion.sensitivityMode": "Analyse de sensibilité", - "launcher.xpansion.versionCpp": "Version C++", - "launcher.xpansion.versionR": "Version R (<= 7.x)", "study.runStudy": "Lancement d'étude", "study.otherOptions": "Autres options", "study.archiveOutputMode": "Mode archivé", "study.postProcessing": "Post-traitement", "study.timeLimit": "Limite de temps (h)", - "study.timeLimitHelper": "(heures) max: {{max}}h", "study.nbCpu": "Nombre de coeurs", "study.clusterLoad": "Charge du cluster", "study.synthesis": "Synthèse", @@ -269,13 +266,6 @@ "study.modelization.links.matrix.columns.loopFlow": "Loop flow", "study.modelization.links.matrix.columns.pShiftMin": "P.Shift Min", "study.modelization.links.matrix.columns.pShiftMax": "P.Shift Max", - "study.modelization.tableMode": "Table Mode", - "study.modelization.tableMode.template.economicOpt": "Options économiques", - "study.modelization.tableMode.template.geographicTrimmingAreas": "Filtre géographique (zones)", - "study.modelization.tableMode.template.geographicTrimmingLinks": "Filtre géographique (liens)", - "study.modelization.tableMode.dialog.add.title": "Ajouter une table", - "study.modelization.tableMode.dialog.edit.title": "Modifier une table", - "study.modelization.tableMode.dialog.delete.text": "Êtes-vous sûr de vouloir supprimer la table '{{0}}' ?", "study.configuration.general.legend.simulation": "Simulation", "study.configuration.general.legend.calendar": "Calendrier", "study.configuration.general.legend.monteCarloScenarios": "Scénarios Monte-Carlo", @@ -340,7 +330,8 @@ "study.configuration.optimization.exportMps": "Export MPS", "study.configuration.optimization.unfeasibleProblemBehavior": "Unfeasible problem behavior", "study.configuration.optimization.simplexOptimizationRange": "Simplex optimization range", - "study.configuration.adequacyPatch.legend.general": "Générale", + "study.configuration.adequacyPatch.tab.general": "Général", + "study.configuration.adequacyPatch.tab.perimeter": "Périmètre", "study.configuration.adequacyPatch.legend.localMatchingRule": "Règle de correspondance locale", "study.configuration.adequacyPatch.legend.curtailmentSharing": "Partage de réduction", "study.configuration.adequacyPatch.legend.advanced": "Avancée", @@ -377,6 +368,9 @@ "study.configuration.advancedParameters.unitCommitmentMode": "Unit commitment mode", "study.configuration.advancedParameters.simulationCores": "Simulation cores", "study.configuration.advancedParameters.renewableGenerationModeling": "Renewable generation modeling", + "study.configuration.economicOpt": "Options économiques", + "study.configuration.geographicTrimmingAreas": "Filtre géographique (zones)", + "study.configuration.geographicTrimmingLinks": "Filtre géographique (liens)", "study.modelization.properties": "Propriétés", "study.modelization.properties.posX": "Position X", "study.modelization.properties.posY": "Position Y", @@ -497,6 +491,10 @@ "study.modelization.bindingConst.offset": "Décalage", "study.modelization.bindingConst.question.deleteConstraintTerm": "Êtes-vous sûr de vouloir supprimer ce terme ?", "study.modelization.bindingConst.question.deleteBindingConstraint": "Êtes-vous sûr de vouloir supprimer cette contrainte couplante ?", + "study.tableMode": "Table Mode", + "study.tableMode.dialog.add.title": "Ajouter une table", + "study.tableMode.dialog.edit.title": "Modifier une table", + "study.tableMode.dialog.delete.text": "Êtes-vous sûr de vouloir supprimer la table '{{name}}' ?", "study.results.mc": "Monte-Carlo", "study.results.display": "Affichage", "study.results.temporality": "Temporalité", @@ -504,7 +502,6 @@ "study.error.addConstraintTerm": "Échec lors de l'ajout du nouveau terme", "study.error.missingData": "Erreur: {{0}} et/ou {{1}} est manquante", "study.error.termAlreadyExist": "Ce terme existe déjà", - "study.error.addBindingConst": "Échec lors de la création de la contrainte couplante", "study.error.addCluster": "Échec lors de la création du cluster", "study.error.deleteCluster": "Échec lors de la suppression du cluster", "study.error.deleteConstraint": "Échec lors de la suppression de la contrainte couplante", @@ -649,19 +646,15 @@ "xpansion.indirectAlreadyLinkProfile": "Profil de lien déjà installé indirect", "xpansion.projection": "Projections", "xpansion.capex": "Calcul du CAPEX", - "xpansion.epsilon": "Ecart maximum avec la solution optimale", + "xpansion.epsilon": "Écart maximum avec la solution optimale", "xpansion.ucType": "Type UC", - "xpansion.master": "Maître", - "xpansion.optimalityGap": "Écart d'optimalité absolu", - "xpansion.maxIteration": "Iteration max", + "xpansion.master": "Problème maître", + "xpansion.optimalityGap": "Écart d’optimalité absolu", + "xpansion.maxIteration": "Nombre d’itération max", "xpansion.yearlyWeight": "Poids annuel", "xpansion.additionalConstraints": "Contraintes additionnelles", - "xpansion.relaxedOptimalityGap": "Écart d'optimalité (Relaxé)", - "xpansion.cutType": "Type de coupe", - "xpansion.amplSolver": "Solveur Ampl", - "xpansion.amplPresolve": "Ampl presolve", - "xpansion.amplSolverBoundsFrequency": "Ampl solve bounds frequency", - "xpansion.relativeGap": "Gap d'optimalité relatif", + "xpansion.relaxedOptimalityGap": "Écart d’optimalité relaxé", + "xpansion.relativeGap": "Écart d’optimalité relatif", "xpansion.batchSize": "Taille du batch", "xpansion.solver": "Solveur", "xpansion.timeLimit": "Limite de temps (en heures)", @@ -672,7 +665,8 @@ "xpansion.weights": "Poids", "xpansion.fileName": "Nom du fichier", "xpansion.options": "Options", - "xpansion.extra": "Extra", + "xpansion.optimization": "Optimisation", + "xpansion.extra": "Compléments de Modélisation", "xpansion.sensitivity": "Calcul de sensibilité", "xpansion.newXpansionConfig": "Nouvelle configuration d'Xpansion", "xpansion.candidates": "Candidats", diff --git a/webapp/src/components/App/Settings/Groups/index.tsx b/webapp/src/components/App/Settings/Groups/index.tsx index bbabb41bb6..b662134427 100644 --- a/webapp/src/components/App/Settings/Groups/index.tsx +++ b/webapp/src/components/App/Settings/Groups/index.tsx @@ -31,7 +31,7 @@ import Header from "./Header"; import UpdateGroupDialog from "./dialog/UpdateGroupDialog"; import { getAuthUser } from "../../../../redux/selectors"; import useAppSelector from "../../../../redux/hooks/useAppSelector"; -import { isSearchMatching } from "../../../../utils/textUtils"; +import { isSearchMatching } from "../../../../utils/stringUtils"; /** * Types diff --git a/webapp/src/components/App/Settings/Tokens/index.tsx b/webapp/src/components/App/Settings/Tokens/index.tsx index fce914d821..bbbe3d192e 100644 --- a/webapp/src/components/App/Settings/Tokens/index.tsx +++ b/webapp/src/components/App/Settings/Tokens/index.tsx @@ -35,7 +35,7 @@ import Header from "./Header"; import { getAuthUser } from "../../../../redux/selectors"; import TokenInfoDialog from "./dialog/TokenInfoDialog"; import useAppSelector from "../../../../redux/hooks/useAppSelector"; -import { isSearchMatching } from "../../../../utils/textUtils"; +import { isSearchMatching } from "../../../../utils/stringUtils"; /** * Types diff --git a/webapp/src/components/App/Settings/Users/index.tsx b/webapp/src/components/App/Settings/Users/index.tsx index 66d63c40dd..7af7ee658d 100644 --- a/webapp/src/components/App/Settings/Users/index.tsx +++ b/webapp/src/components/App/Settings/Users/index.tsx @@ -29,7 +29,7 @@ import { RESERVED_USER_NAMES } from "../utils"; import { UserDetailsDTO } from "../../../../common/types"; import UpdateUserDialog from "./dialog/UpdateUserDialog"; import { sortByName } from "../../../../services/utils"; -import { isSearchMatching } from "../../../../utils/textUtils"; +import { isSearchMatching } from "../../../../utils/stringUtils"; /** * Types diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/Fields.tsx index e2567b0c08..ebb63fb13b 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/Fields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/Fields.tsx @@ -17,10 +17,7 @@ function Fields() { return ( -
+
(); + const { t } = useTranslation(); //////////////////////////////////////////////////////////////// // Event Handlers @@ -27,16 +31,36 @@ function AdequacyPatch() { //////////////////////////////////////////////////////////////// return ( -
getAdequacyPatchFormFields(study.id), - }} - onSubmit={handleSubmit} - enableUndoRedo - > - - + getAdequacyPatchFormFields(study.id), + }} + onSubmit={handleSubmit} + enableUndoRedo + > + + + ), + }, + { + label: t("study.configuration.adequacyPatch.tab.perimeter"), + content: ( + + ), + }, + ]} + TabListProps={{ sx: { mt: -2 } }} + /> ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ScenarioBuilderDialog/tabs/Table.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ScenarioBuilderDialog/tabs/Table.tsx index aeff62fb0d..61fc35ffe2 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ScenarioBuilderDialog/tabs/Table.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ScenarioBuilderDialog/tabs/Table.tsx @@ -11,7 +11,7 @@ import { getAreas, getLinks, } from "../../../../../../../../../redux/selectors"; -import FormTable from "../../../../../../../../common/FormTable"; +import TableForm from "../../../../../../../../common/TableForm"; import ConfigContext from "../ConfigContext"; import { updateScenarioBuilderConfig } from "../utils"; import { SubmitHandlerPlus } from "../../../../../../../../common/Form/types"; @@ -116,7 +116,7 @@ function Table(props: Props) { //////////////////////////////////////////////////////////////// return ( - - + diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx index f31ec83894..bd56ed29fc 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx @@ -3,6 +3,7 @@ import { Paper } from "@mui/material"; import * as R from "ramda"; import { useMemo, useState } from "react"; import { useOutletContext } from "react-router"; +import { useTranslation } from "react-i18next"; import { StudyMetadata } from "../../../../../common/types"; import UnderConstruction from "../../../../common/page/UnderConstruction"; import PropertiesView from "../../../../common/PropertiesView"; @@ -14,10 +15,12 @@ import General from "./General"; import Optimization from "./Optimization"; import RegionalDistricts from "./RegionalDistricts"; import TimeSeriesManagement from "./TimeSeriesManagement"; +import TableMode from "../../../../common/TableMode"; function Configuration() { const { study } = useOutletContext<{ study: StudyMetadata }>(); const [currentTabIndex, setCurrentTabIndex] = useState(0); + const { t } = useTranslation(); // TODO i18n const tabList = useMemo( @@ -29,8 +32,11 @@ function Configuration() { { id: 3, name: "Optimization preferences" }, Number(study.version) >= 830 && { id: 4, name: "Adequacy Patch" }, { id: 5, name: "Advanced parameters" }, + { id: 6, name: t("study.configuration.economicOpt") }, + { id: 7, name: t("study.configuration.geographicTrimmingAreas") }, + { id: 8, name: t("study.configuration.geographicTrimmingLinks") }, ].filter(Boolean), - [study.version], + [study.version, t], ); return ( @@ -58,6 +64,44 @@ function Configuration() { [R.equals(3), () => ], [R.equals(4), () => ], [R.equals(5), () => ], + [ + R.equals(6), + () => ( + + ), + ], + [ + R.equals(7), + () => ( + + ), + ], + [ + R.equals(8), + () => ( + + ), + ], ])(tabList[currentTabIndex].id)} } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/AreasTab.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/AreasTab.tsx index 14f45e8d03..e1a5c11b73 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/AreasTab.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/AreasTab.tsx @@ -43,7 +43,9 @@ function AreasTab({ renewablesClustering }: Props) { }, [areaId, navigate, location.pathname]); const tabList = useMemo(() => { - const basePath = `/studies/${study.id}/explore/modelization/area/${areaId}`; + const basePath = `/studies/${ + study.id + }/explore/modelization/area/${encodeURI(areaId)}`; const tabs = [ { label: "study.modelization.properties", pathSuffix: "properties" }, diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddBindingConstDialog/AddBindingConstForm.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddBindingConstDialog/AddBindingConstForm.tsx deleted file mode 100644 index feef90508a..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddBindingConstDialog/AddBindingConstForm.tsx +++ /dev/null @@ -1,105 +0,0 @@ -import { useTranslation } from "react-i18next"; -import { Box } from "@mui/material"; -import { FieldPath } from "react-hook-form"; -import { useMemo } from "react"; -import SelectFE from "../../../../../../common/fieldEditors/SelectFE"; -import { Root } from "../style"; -import SwitchFE from "../../../../../../common/fieldEditors/SwitchFE"; -import { CreateBindingConstraint } from "../../../../Commands/Edition/commandTypes"; -import StringFE from "../../../../../../common/fieldEditors/StringFE"; -import { useFormContextPlus } from "../../../../../../common/Form"; - -function AddClusterForm() { - const { control } = useFormContextPlus(); - - const { t } = useTranslation(); - const operatorOptions = useMemo( - () => - ["less", "equal", "greater", "both"].map((item) => ({ - label: t(`study.modelization.bindingConst.operator.${item}`), - value: item, - })), - [t], - ); - - const typeOptions = useMemo( - () => - ["hourly", "daily", "weekly"].map((item) => ({ - label: t(`global.time.${item}`), - value: item, - })), - [t], - ); - - //////////////////////////////////////////////////////////////// - // JSX - //////////////////////////////////////////////////////////////// - - const renderInput = ( - name: FieldPath, - transId: string, - required?: boolean, - ) => ( - - ); - - const renderSelect = ( - name: FieldPath, - options: Array<{ label: string; value: string }>, - transId?: string, - ) => ( - - - - ); - - return ( - - - {renderInput("name", "global.name", true)} - {renderInput("comments", "study.modelization.bindingConst.comments")} - {renderSelect( - "time_step", - typeOptions, - "study.modelization.bindingConst.type", - )} - {renderSelect("operator", operatorOptions)} - - ); -} - -export default AddClusterForm; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddBindingConstDialog/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddBindingConstDialog/index.tsx deleted file mode 100644 index e930e4b966..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddBindingConstDialog/index.tsx +++ /dev/null @@ -1,76 +0,0 @@ -/* eslint-disable camelcase */ -import { AxiosError } from "axios"; -import { useTranslation } from "react-i18next"; -import { useSnackbar } from "notistack"; -import FormDialog, { - FormDialogProps, -} from "../../../../../../common/dialogs/FormDialog"; -import AddBindingConstForm from "./AddBindingConstForm"; -import useEnqueueErrorSnackbar from "../../../../../../../hooks/useEnqueueErrorSnackbar"; -import { appendCommands } from "../../../../../../../services/api/variant"; -import { - BindingConstraintOperator, - CommandEnum, - CreateBindingConstraint, - TimeStep, -} from "../../../../Commands/Edition/commandTypes"; -import { SubmitHandlerPlus } from "../../../../../../common/Form/types"; - -interface Props extends Omit { - studyId: string; -} - -function AddBindingConstDialog(props: Props) { - const [t] = useTranslation(); - const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); - const { enqueueSnackbar } = useSnackbar(); - const { studyId, ...dialogProps } = props; - const { onCancel } = dialogProps; - const defaultValues: CreateBindingConstraint = { - name: "", - enabled: true, - time_step: TimeStep.HOURLY, - operator: BindingConstraintOperator.LESS, - coeffs: {}, - comments: "", - }; - - //////////////////////////////////////////////////////////////// - // Event Handlers - //////////////////////////////////////////////////////////////// - - const handleSubmit = async (data: SubmitHandlerPlus) => { - try { - await appendCommands(studyId, [ - { - action: CommandEnum.CREATE_BINDING_CONSTRAINT, - args: data.values, - }, - ]); - enqueueSnackbar(t("study.success.addBindingConst"), { - variant: "success", - }); - } catch (e) { - enqueueErrorSnackbar(t("study.error.addBindingConst"), e as AxiosError); - } finally { - onCancel(); - } - }; - - //////////////////////////////////////////////////////////////// - // JSX - //////////////////////////////////////////////////////////////// - - return ( - - - - ); -} - -export default AddBindingConstDialog; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx new file mode 100644 index 0000000000..06056b3b4e --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx @@ -0,0 +1,140 @@ +import { useMemo } from "react"; +import { Box } from "@mui/material"; +import { useTranslation } from "react-i18next"; +import { useSnackbar } from "notistack"; +import FormDialog from "../../../../../common/dialogs/FormDialog"; +import { + BindingConstraintOperator, + TimeStep, +} from "../../../Commands/Edition/commandTypes"; +import { SubmitHandlerPlus } from "../../../../../common/Form/types"; +import { BindingConstFields } from "./BindingConstView/utils"; +import { createBindingConstraint } from "../../../../../../services/api/studydata"; +import SelectFE from "../../../../../common/fieldEditors/SelectFE"; +import StringFE from "../../../../../common/fieldEditors/StringFE"; +import SwitchFE from "../../../../../common/fieldEditors/SwitchFE"; +import { StudyMetadata } from "../../../../../../common/types"; + +interface Props { + studyId: StudyMetadata["id"]; + existingConstraints: Array; + open: boolean; + onClose: VoidFunction; +} + +function AddDialog({ studyId, existingConstraints, open, onClose }: Props) { + const [t] = useTranslation(); + const { enqueueSnackbar } = useSnackbar(); + + const defaultValues = { + name: "", + enabled: true, + time_step: TimeStep.HOURLY, + operator: BindingConstraintOperator.LESS, + comments: "", + coeffs: {}, + }; + + const operatorOptions = useMemo( + () => + ["less", "equal", "greater", "both"].map((item) => ({ + label: t(`study.modelization.bindingConst.operator.${item}`), + value: item, + })), + [t], + ); + + const typeOptions = useMemo( + () => + ["hourly", "daily", "weekly"].map((item) => ({ + label: t(`global.time.${item}`), + value: item, + })), + [t], + ); + + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleSubmit = async ( + data: SubmitHandlerPlus, + ) => { + return createBindingConstraint(studyId, data.values); + }; + + const handleSubmitSuccessful = () => { + enqueueSnackbar(t("study.success.addBindingConst"), { + variant: "success", + }); + onClose(); + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + + {({ control }) => ( + + + { + if (v.trim().length <= 0) { + return t("form.field.required"); + } + if (existingConstraints.includes(v.trim().toLowerCase())) { + return t("form.field.duplicate", { 0: v }); + } + }, + }} + /> + + + + + )} + + ); +} + +export default AddDialog; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstPropsView.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstPropsView.tsx index 34a58a1d2a..973d0639fe 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstPropsView.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstPropsView.tsx @@ -1,9 +1,8 @@ -import { useEffect, useState } from "react"; -import { useTranslation } from "react-i18next"; +import { useEffect, useMemo, useState } from "react"; import { StudyMetadata } from "../../../../../../common/types"; import PropertiesView from "../../../../../common/PropertiesView"; import ListElement from "../../common/ListElement"; -import AddBindingConstDialog from "./AddBindingConstDialog"; +import AddDialog from "./AddDialog"; import { BindingConstFields } from "./BindingConstView/utils"; interface Props { @@ -15,7 +14,6 @@ interface Props { function BindingConstPropsView(props: Props) { const { onClick, currentBindingConst, studyId, list } = props; - const [t] = useTranslation(); const [bindingConstNameFilter, setBindingConstNameFilter] = useState(); const [addBindingConst, setAddBindingConst] = useState(false); @@ -37,6 +35,11 @@ function BindingConstPropsView(props: Props) { setFilteredBindingConst(filter()); }, [list, bindingConstNameFilter]); + const existingConstraints = useMemo( + () => list.map(({ name }) => name.toLowerCase()), + [list], + ); + //////////////////////////////////////////////////////////////// // JSX //////////////////////////////////////////////////////////////// @@ -59,11 +62,11 @@ function BindingConstPropsView(props: Props) { onSearchFilterChange={(e) => setBindingConstNameFilter(e as string)} /> {addBindingConst && ( - setAddBindingConst(false)} + existingConstraints={existingConstraints} + onClose={() => setAddBindingConst(false)} /> )} diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/preview.png b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/preview.png deleted file mode 100644 index 650911d87a..0000000000 Binary files a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/preview.png and /dev/null differ diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/style.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/style.ts deleted file mode 100644 index 3a5e2e43cb..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/style.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Box, styled } from "@mui/material"; - -export const Root = styled(Box)(({ theme }) => ({ - display: "flex", - width: "100%", - flex: 1, - flexFlow: "column nowrap", - boxSizing: "border-box", -})); - -export const Content = styled(Box)(({ theme }) => ({ - width: "100%", - display: "flex", - justifyContent: "flex-start", -})); diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Json.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Json.tsx new file mode 100644 index 0000000000..4dce33e2ed --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Json.tsx @@ -0,0 +1,111 @@ +import { useState } from "react"; +import { useTranslation } from "react-i18next"; +import { AxiosError } from "axios"; +import { useSnackbar } from "notistack"; +import SaveIcon from "@mui/icons-material/Save"; +import { Box, Button, Typography } from "@mui/material"; +import { useUpdateEffect } from "react-use"; +import { + editStudy, + getStudyData, +} from "../../../../../../../services/api/study"; +import { Header, Root } from "./style"; +import SimpleLoader from "../../../../../../common/loaders/SimpleLoader"; +import JSONEditor from "../../../../../../common/JSONEditor"; +import usePromiseWithSnackbarError from "../../../../../../../hooks/usePromiseWithSnackbarError"; +import UsePromiseCond from "../../../../../../common/utils/UsePromiseCond"; +import SimpleContent from "../../../../../../common/page/SimpleContent"; +import useEnqueueErrorSnackbar from "../../../../../../../hooks/useEnqueueErrorSnackbar"; + +interface Props { + path: string; + studyId: string; +} + +function Json({ path, studyId }: Props) { + const [t] = useTranslation(); + const { enqueueSnackbar } = useSnackbar(); + const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); + const [jsonData, setJsonData] = useState(null); + const [isSaveAllowed, setSaveAllowed] = useState(false); + + const res = usePromiseWithSnackbarError( + () => getStudyData(studyId, path, -1), + { + errorMessage: t("studies.error.retrieveData"), + deps: [studyId, path], + }, + ); + + /* Reset save button when path changes */ + useUpdateEffect(() => { + setSaveAllowed(false); + }, [studyId, path]); + + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleSaveJson = async () => { + if (isSaveAllowed && jsonData) { + try { + await editStudy(jsonData, studyId, path); + enqueueSnackbar(t("studies.success.saveData"), { + variant: "success", + }); + setSaveAllowed(false); + } catch (e) { + enqueueErrorSnackbar(t("studies.error.saveData"), e as AxiosError); + } + } + }; + + const handleJsonChange = (newJson: string) => { + setJsonData(newJson); + setSaveAllowed(true); + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + +
+ +
+ } + ifResolved={(json) => ( + + + + )} + ifRejected={(error) => } + /> +
+ ); +} + +export default Json; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Matrix.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Matrix.tsx new file mode 100644 index 0000000000..524c6b5117 --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Matrix.tsx @@ -0,0 +1,26 @@ +import { useOutletContext } from "react-router"; +import { MatrixStats, StudyMetadata } from "../../../../../../../common/types"; +import { Root, Content } from "./style"; +import MatrixInput from "../../../../../../common/MatrixInput"; + +interface Props { + path: string; +} + +function Matrix({ path }: Props) { + const { study } = useOutletContext<{ study: StudyMetadata }>(); + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + + + + + + ); +} + +export default Matrix; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Text.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Text.tsx new file mode 100644 index 0000000000..d4f00ac1fc --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/Text.tsx @@ -0,0 +1,91 @@ +import { useState } from "react"; +import { AxiosError } from "axios"; +import { useSnackbar } from "notistack"; +import { useTranslation } from "react-i18next"; +import { Button } from "@mui/material"; +import UploadOutlinedIcon from "@mui/icons-material/UploadOutlined"; +import { + getStudyData, + importFile, +} from "../../../../../../../services/api/study"; +import { Content, Header, Root } from "./style"; +import useEnqueueErrorSnackbar from "../../../../../../../hooks/useEnqueueErrorSnackbar"; +import SimpleLoader from "../../../../../../common/loaders/SimpleLoader"; +import ImportDialog from "../../../../../../common/dialogs/ImportDialog"; +import usePromiseWithSnackbarError from "../../../../../../../hooks/usePromiseWithSnackbarError"; +import SimpleContent from "../../../../../../common/page/SimpleContent"; +import UsePromiseCond from "../../../../../../common/utils/UsePromiseCond"; +import { useDebugContext } from "../DebugContext"; + +interface Props { + studyId: string; + path: string; +} + +function Text({ studyId, path }: Props) { + const [t] = useTranslation(); + const { reloadTreeData } = useDebugContext(); + const { enqueueSnackbar } = useSnackbar(); + const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); + const [openImportDialog, setOpenImportDialog] = useState(false); + + const res = usePromiseWithSnackbarError(() => getStudyData(studyId, path), { + errorMessage: t("studies.error.retrieveData"), + deps: [studyId, path], + }); + + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleImport = async (file: File) => { + try { + await importFile(file, studyId, path); + reloadTreeData(); + enqueueSnackbar(t("studies.success.saveData"), { + variant: "success", + }); + } catch (e) { + enqueueErrorSnackbar(t("studies.error.saveData"), e as AxiosError); + } + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + +
+ +
+ } + ifResolved={(data) => ( + + {data} + + )} + ifRejected={(error) => } + /> + {openImportDialog && ( + setOpenImportDialog(false)} + onImport={handleImport} + /> + )} +
+ ); +} + +export default Text; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/index.tsx new file mode 100644 index 0000000000..0cb75a3eab --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/index.tsx @@ -0,0 +1,24 @@ +import Text from "./Text"; +import Json from "./Json"; +import Matrix from "./Matrix"; +import { FileType } from "../utils"; + +interface Props { + studyId: string; + fileType: FileType; + filePath: string; +} + +const componentByFileType = { + matrix: Matrix, + json: Json, + file: Text, +} as const; + +function Data({ studyId, fileType, filePath }: Props) { + const DataViewer = componentByFileType[fileType]; + + return ; +} + +export default Data; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/style.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/style.ts similarity index 93% rename from webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/style.ts rename to webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/style.ts index 0f2ace5de7..b0143fdec8 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/style.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Data/style.ts @@ -17,7 +17,7 @@ export const Header = styled(Box)(({ theme }) => ({ flexFlow: "row nowrap", justifyContent: "space-between", alignItems: "center", - padding: theme.spacing(0, 2), + marginBottom: theme.spacing(1), })); export const Content = styled(Paper)(({ theme }) => ({ @@ -32,5 +32,3 @@ export const Content = styled(Paper)(({ theme }) => ({ overflow: "auto", position: "relative", })); - -export default {}; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/DebugContext.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/DebugContext.ts new file mode 100644 index 0000000000..52e4c07d7f --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/DebugContext.ts @@ -0,0 +1,21 @@ +import { createContext, useContext } from "react"; +import { FileType, TreeData } from "./utils"; + +interface DebugContextProps { + treeData: TreeData; + onFileSelect: (fileType: FileType, filePath: string) => void; + reloadTreeData: () => void; +} + +const initialDebugContextValue: DebugContextProps = { + treeData: {}, + onFileSelect: () => {}, + reloadTreeData: () => {}, +}; + +const DebugContext = createContext(initialDebugContextValue); + +export const useDebugContext = (): DebugContextProps => + useContext(DebugContext); + +export default DebugContext; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Tree/FileTreeItem.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Tree/FileTreeItem.tsx new file mode 100644 index 0000000000..5ae11cff9d --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Tree/FileTreeItem.tsx @@ -0,0 +1,64 @@ +import { Box } from "@mui/material"; +import { TreeItem } from "@mui/x-tree-view"; +import { TreeData, determineFileType, getFileIcon } from "../utils"; +import { useDebugContext } from "../DebugContext"; + +interface Props { + name: string; + content: TreeData; + path: string; +} + +function FileTreeItem({ name, content, path }: Props) { + const { onFileSelect } = useDebugContext(); + const fullPath = `${path}/${name}`; + const fileType = determineFileType(content); + const FileIcon = getFileIcon(fileType); + const isFolderEmpty = !Object.keys(content).length; + + //////////////////////////////////////////////////////////////// + // Event handlers + //////////////////////////////////////////////////////////////// + + const handleClick = () => { + if (fileType !== "folder") { + onFileSelect(fileType, fullPath); + } + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + + + {name} + + } + > + {typeof content === "object" && + Object.keys(content).map((childName) => ( + + ))} + + ); +} + +export default FileTreeItem; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Tree/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Tree/index.tsx new file mode 100644 index 0000000000..faa75ab045 --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/Tree/index.tsx @@ -0,0 +1,28 @@ +import { TreeView } from "@mui/x-tree-view"; +import ExpandMoreIcon from "@mui/icons-material/ExpandMore"; +import ChevronRightIcon from "@mui/icons-material/ChevronRight"; +import FileTreeItem from "./FileTreeItem"; +import { useDebugContext } from "../DebugContext"; + +function Tree() { + const { treeData } = useDebugContext(); + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + } + defaultExpandIcon={} + > + {typeof treeData === "object" && + Object.keys(treeData).map((key) => ( + + ))} + + ); +} + +export default Tree; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/index.tsx new file mode 100644 index 0000000000..414c004093 --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/index.tsx @@ -0,0 +1,94 @@ +import { useCallback, useMemo, useState } from "react"; +import { useTranslation } from "react-i18next"; +import { useOutletContext } from "react-router-dom"; +import { Box } from "@mui/material"; +import Tree from "./Tree"; +import Data from "./Data"; +import { StudyMetadata } from "../../../../../../common/types"; +import SimpleLoader from "../../../../../common/loaders/SimpleLoader"; +import UsePromiseCond from "../../../../../common/utils/UsePromiseCond"; +import SimpleContent from "../../../../../common/page/SimpleContent"; +import usePromiseWithSnackbarError from "../../../../../../hooks/usePromiseWithSnackbarError"; +import { getStudyData } from "../../../../../../services/api/study"; +import DebugContext from "./DebugContext"; +import { TreeData, filterTreeData, File } from "./utils"; + +function Debug() { + const [t] = useTranslation(); + const { study } = useOutletContext<{ study: StudyMetadata }>(); + const [selectedFile, setSelectedFile] = useState(); + + const studyTree = usePromiseWithSnackbarError( + async () => { + const treeData = await getStudyData(study.id, "", -1); + return filterTreeData(treeData); + }, + { + errorMessage: t("studies.error.retrieveData"), + deps: [study.id], + }, + ); + + const handleFileSelection = useCallback( + (fileType: File["fileType"], filePath: string) => { + setSelectedFile({ fileType, filePath }); + }, + [], + ); + + //////////////////////////////////////////////////////////////// + // Utils + //////////////////////////////////////////////////////////////// + + const contextValue = useMemo( + () => ({ + treeData: studyTree.data ?? {}, + onFileSelect: handleFileSelection, + reloadTreeData: studyTree.reload, + }), + [studyTree.data, studyTree.reload, handleFileSelection], + ); + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + + + } + ifResolved={() => ( + <> + + + + + {selectedFile && ( + + )} + + + )} + ifRejected={(error) => } + /> + + + ); +} + +export default Debug; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/utils.ts new file mode 100644 index 0000000000..7fc82f087e --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Debug/utils.ts @@ -0,0 +1,72 @@ +import DataObjectIcon from "@mui/icons-material/DataObject"; +import TextSnippetIcon from "@mui/icons-material/TextSnippet"; +import FolderIcon from "@mui/icons-material/Folder"; +import DatasetIcon from "@mui/icons-material/Dataset"; +import { SvgIconComponent } from "@mui/icons-material"; + +//////////////////////////////////////////////////////////////// +// Types +//////////////////////////////////////////////////////////////// + +export type FileType = "json" | "file" | "matrix"; + +export interface File { + fileType: FileType; + filePath: string; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type TreeData = Record | string; + +//////////////////////////////////////////////////////////////// +// Utils +//////////////////////////////////////////////////////////////// + +/** + * Maps file types and folder to their corresponding icon components. + */ +const iconByFileType: Record = { + matrix: DatasetIcon, + json: DataObjectIcon, + folder: FolderIcon, + file: TextSnippetIcon, +} as const; + +/** + * Gets the icon component for a given file type or folder. + * @param {FileType | "folder"} type - The type of the file or "folder". + * @returns {SvgIconComponent} The corresponding icon component. + */ +export const getFileIcon = (type: FileType | "folder"): SvgIconComponent => { + return iconByFileType[type] || TextSnippetIcon; +}; + +/** + * Determines the file type based on the tree data. + * @param {TreeData} treeData - The data of the tree item. + * @returns {FileType | "folder"} The determined file type or "folder". + */ +export const determineFileType = (treeData: TreeData): FileType | "folder" => { + if (typeof treeData === "string") { + if (treeData.startsWith("matrix://")) { + return "matrix"; + } + if (treeData.startsWith("json://")) { + return "json"; + } + } + return typeof treeData === "object" ? "folder" : "file"; +}; + +/** + * Filters out specific keys from the tree data. + * @param {TreeData} data - The original tree data. + * @returns {TreeData} The filtered tree data. + */ +export const filterTreeData = (data: TreeData): TreeData => { + const excludedKeys = new Set(["Desktop", "study", "output", "logs"]); + + return Object.fromEntries( + Object.entries(data).filter(([key]) => !excludedKeys.has(key)), + ); +}; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyFileView.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyFileView.tsx deleted file mode 100644 index 1a35c18add..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyFileView.tsx +++ /dev/null @@ -1,122 +0,0 @@ -/* eslint-disable react-hooks/exhaustive-deps */ -import { useEffect, useState } from "react"; -import { AxiosError } from "axios"; -import debug from "debug"; -import { useSnackbar } from "notistack"; -import { useTranslation } from "react-i18next"; -import { Box, Button } from "@mui/material"; -import UploadOutlinedIcon from "@mui/icons-material/UploadOutlined"; -import { - getStudyData, - importFile, -} from "../../../../../../../services/api/study"; -import { Header, Root, Content } from "./style"; -import useEnqueueErrorSnackbar from "../../../../../../../hooks/useEnqueueErrorSnackbar"; -import SimpleLoader from "../../../../../../common/loaders/SimpleLoader"; -import ImportDialog from "../../../../../../common/dialogs/ImportDialog"; - -const logErr = debug("antares:createimportform:error"); - -interface PropTypes { - study: string; - url: string; - refreshView: () => void; - filterOut: Array; -} - -function StudyFileView(props: PropTypes) { - const { study, url, filterOut, refreshView } = props; - const { enqueueSnackbar } = useSnackbar(); - const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); - const [t] = useTranslation(); - const [data, setData] = useState(); - const [loaded, setLoaded] = useState(false); - const [isEditable, setEditable] = useState(true); - const [formattedPath, setFormattedPath] = useState(""); - const [openImportDialog, setOpenImportDialog] = useState(false); - - const loadFileData = async () => { - setData(undefined); - setLoaded(false); - try { - const res = await getStudyData(study, url); - if (Array.isArray(res)) { - setData(res.join("\n")); - } else { - setData(res); - } - } catch (e) { - enqueueErrorSnackbar(t("studies.error.retrieveData"), e as AxiosError); - } finally { - setLoaded(true); - } - }; - - const onImport = async (file: File) => { - try { - await importFile(file, study, formattedPath); - } catch (e) { - logErr("Failed to import file", file, e); - enqueueErrorSnackbar(t("studies.error.saveData"), e as AxiosError); - } - refreshView(); - enqueueSnackbar(t("studies.success.saveData"), { - variant: "success", - }); - }; - - useEffect(() => { - const urlParts = url.split("/"); - const tmpUrl = urlParts.filter((item) => item); - setFormattedPath(tmpUrl.join("/")); - if (tmpUrl.length > 0) { - setEditable(!filterOut.includes(tmpUrl[0])); - } - if (urlParts.length < 2) { - enqueueSnackbar(t("studies.error.retrieveData"), { - variant: "error", - }); - return; - } - loadFileData(); - }, [url, filterOut]); - - return ( - <> - {data && ( - - {isEditable && ( -
- -
- )} - - {data} - -
- )} - {!loaded && ( - - - - )} - {openImportDialog && ( - setOpenImportDialog(false)} - onImport={onImport} - /> - )} - - ); -} - -export default StudyFileView; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyJsonView.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyJsonView.tsx deleted file mode 100644 index 37f4ac97e8..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyJsonView.tsx +++ /dev/null @@ -1,116 +0,0 @@ -/* eslint-disable react-hooks/exhaustive-deps */ -import { useState, useEffect } from "react"; -import { AxiosError } from "axios"; -import { useSnackbar } from "notistack"; -import { useTranslation } from "react-i18next"; -import ReactJson from "react-json-view"; -import SaveIcon from "@mui/icons-material/Save"; -import { Box, Button, Typography } from "@mui/material"; -import { - editStudy, - getStudyData, -} from "../../../../../../../services/api/study"; -import useEnqueueErrorSnackbar from "../../../../../../../hooks/useEnqueueErrorSnackbar"; -import { Header, Root, Content } from "./style"; -import SimpleLoader from "../../../../../../common/loaders/SimpleLoader"; - -interface PropTypes { - data: string; - study: string; - filterOut: Array; -} - -function StudyJsonView(props: PropTypes) { - const { data, study, filterOut } = props; - const { enqueueSnackbar } = useSnackbar(); - const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); - const [t] = useTranslation(); - const [jsonData, setJsonData] = useState(); - const [loaded, setLoaded] = useState(false); - const [saveAllowed, setSaveAllowed] = useState(false); - const [isEditable, setEditable] = useState(true); - - const saveData = async () => { - const tmpDataPath = data.split("/").filter((item) => item); - const tmpPath = tmpDataPath.join("/"); - if (loaded && jsonData) { - try { - await editStudy(jsonData, study, tmpPath); - enqueueSnackbar(t("studies.success.saveData"), { - variant: "success", - }); - setSaveAllowed(false); - } catch (e) { - enqueueErrorSnackbar(t("studies.error.saveData"), e as AxiosError); - } - } else { - enqueueSnackbar(t("studies.error.saveData"), { variant: "error" }); - } - }; - - useEffect(() => { - (async () => { - setJsonData(undefined); - setLoaded(false); - const tmpDataPath = data.split("/").filter((item) => item); - if (tmpDataPath.length > 0) { - setEditable(!filterOut.includes(tmpDataPath[0])); - } - try { - const res = await getStudyData(study, data, -1); - setJsonData(res); - setSaveAllowed(false); - } catch (e) { - enqueueErrorSnackbar(t("studies.error.retrieveData"), e as AxiosError); - } finally { - setLoaded(true); - } - })(); - }, [data, filterOut]); - - return ( - - {isEditable && ( -
- -
- )} - - {jsonData && ( - { - setJsonData(e.updated_src); - setSaveAllowed(true); - } - : undefined - } - theme="monokai" - /> - )} - {!loaded && ( - - - - )} - -
- ); -} - -export default StudyJsonView; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyMatrixView/StudyMatrixView.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyMatrixView/StudyMatrixView.tsx deleted file mode 100644 index 3c6f113dca..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyMatrixView/StudyMatrixView.tsx +++ /dev/null @@ -1,217 +0,0 @@ -import { useEffect, useState } from "react"; -import { AxiosError } from "axios"; -import debug from "debug"; -import { useSnackbar } from "notistack"; -import { useTranslation } from "react-i18next"; -import { Box, Typography, Divider, ButtonGroup, Button } from "@mui/material"; -import TableViewIcon from "@mui/icons-material/TableView"; -import BarChartIcon from "@mui/icons-material/BarChart"; -import GetAppOutlinedIcon from "@mui/icons-material/GetAppOutlined"; -import { - getStudyData, - importFile, -} from "../../../../../../../../services/api/study"; -import { - MatrixType, - MatrixEditDTO, -} from "../../../../../../../../common/types"; -import { Header, Root, Content } from "../style"; -import usePromiseWithSnackbarError from "../../../../../../../../hooks/usePromiseWithSnackbarError"; -import { StyledButton } from "./style"; -import useEnqueueErrorSnackbar from "../../../../../../../../hooks/useEnqueueErrorSnackbar"; -import SimpleContent from "../../../../../../../common/page/SimpleContent"; -import ImportDialog from "../../../../../../../common/dialogs/ImportDialog"; -import SimpleLoader from "../../../../../../../common/loaders/SimpleLoader"; -import EditableMatrix from "../../../../../../../common/EditableMatrix"; -import { - editMatrix, - getStudyMatrixIndex, -} from "../../../../../../../../services/api/matrix"; - -const logErr = debug("antares:createimportform:error"); - -interface PropTypes { - study: string; - url: string; - filterOut: Array; -} - -function StudyMatrixView(props: PropTypes) { - const { study, url, filterOut } = props; - const { enqueueSnackbar } = useSnackbar(); - const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); - const [t] = useTranslation(); - const [data, setData] = useState(); - const [loaded, setLoaded] = useState(false); - const [toggleView, setToggleView] = useState(true); - const [openImportDialog, setOpenImportDialog] = useState(false); - const [isEditable, setEditable] = useState(true); - const [formattedPath, setFormattedPath] = useState(""); - - const { data: matrixIndex } = usePromiseWithSnackbarError( - () => getStudyMatrixIndex(study, formattedPath), - { - errorMessage: t("matrix.error.failedToRetrieveIndex"), - deps: [study, formattedPath], - }, - ); - - //////////////////////////////////////////////////////////////// - // Utils - //////////////////////////////////////////////////////////////// - - const loadFileData = async () => { - setData(undefined); - setLoaded(false); - try { - const res = await getStudyData(study, url); - if (typeof res === "string") { - const fixed = res - .replace(/NaN/g, '"NaN"') - .replace(/Infinity/g, '"Infinity"'); - setData(JSON.parse(fixed)); - } else { - setData(res); - } - } catch (e) { - enqueueErrorSnackbar(t("data.error.matrix"), e as AxiosError); - } finally { - setLoaded(true); - } - }; - - //////////////////////////////////////////////////////////////// - // Event Handlers - //////////////////////////////////////////////////////////////// - - const handleUpdate = async (change: MatrixEditDTO[], source: string) => { - if (source !== "loadData" && source !== "updateData") { - try { - if (change.length > 0) { - await editMatrix(study, formattedPath, change); - enqueueSnackbar(t("matrix.success.matrixUpdate"), { - variant: "success", - }); - } - } catch (e) { - enqueueErrorSnackbar(t("matrix.error.matrixUpdate"), e as AxiosError); - } - } - }; - - const handleImport = async (file: File) => { - try { - await importFile(file, study, formattedPath); - } catch (e) { - logErr("Failed to import file", file, e); - enqueueErrorSnackbar(t("variants.error.import"), e as AxiosError); - } finally { - enqueueSnackbar(t("variants.success.import"), { - variant: "success", - }); - loadFileData(); - } - }; - - useEffect(() => { - const urlParts = url.split("/"); - const tmpUrl = urlParts.filter((item) => item); - setFormattedPath(tmpUrl.join("/")); - if (tmpUrl.length > 0) { - setEditable(!filterOut.includes(tmpUrl[0])); - } - if (urlParts.length < 2) { - enqueueSnackbar(t("studies.error.retrieveData"), { - variant: "error", - }); - return; - } - loadFileData(); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [url, filterOut, enqueueSnackbar, t]); - - //////////////////////////////////////////////////////////////// - // JSX - //////////////////////////////////////////////////////////////// - - return ( - - -
- - {t("xpansion.timeSeries")} - - - {loaded && data && data.columns?.length > 1 && ( - - setToggleView((prev) => !prev)}> - {toggleView ? ( - - ) : ( - - )} - - - )} - {isEditable && ( - - )} - -
- - {!loaded && } - {loaded && data && data.columns?.length >= 1 ? ( - - ) : ( - loaded && ( - } - onClick={() => setOpenImportDialog(true)} - > - {t("global.import")} - - } - /> - ) - )} -
- {openImportDialog && ( - setOpenImportDialog(false)} - onImport={handleImport} - /> - )} -
- ); -} - -export default StudyMatrixView; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyMatrixView/style.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyMatrixView/style.ts deleted file mode 100644 index 0e6eb5513b..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/StudyMatrixView/style.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { styled, Button } from "@mui/material"; - -export const StyledButton = styled(Button)(({ theme }) => ({ - backgroundColor: "rgba(180, 180, 180, 0.09)", - color: "white", - borderRight: "none !important", - "&:hover": { - color: "white", - backgroundColor: theme.palette.secondary.main, - }, - "&:disabled": { - backgroundColor: theme.palette.secondary.dark, - color: "white !important", - }, -})); - -export default {}; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/index.tsx deleted file mode 100644 index 8cc91350af..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyDataView/index.tsx +++ /dev/null @@ -1,66 +0,0 @@ -/* eslint-disable @typescript-eslint/no-explicit-any */ -import { CSSProperties, ReactNode } from "react"; -import { Box } from "@mui/material"; -import StudyFileView from "./StudyFileView"; -import StudyJsonView from "./StudyJsonView"; -import StudyMatrixView from "./StudyMatrixView/StudyMatrixView"; -import { StudyDataType } from "../../../../../../../common/types"; - -interface PropTypes { - study: string; - type: StudyDataType; - data: string; - studyData: any; - setStudyData: (elm: any) => void; -} - -interface RenderData { - css: CSSProperties; - data: ReactNode; -} - -function StudyDataView(props: PropTypes) { - const { study, type, data, studyData, setStudyData } = props; - const filterOut = ["output", "logs", "Desktop"]; - - const refreshView = () => { - setStudyData({ ...studyData }); - }; - - const renderData = (): RenderData => { - if (type === "file") { - return { - css: { overflow: "auto" }, - data: ( - - ), - }; - } - if (type === "matrix" || type === "matrixfile") { - return { - css: { overflow: "auto" }, - data: ( - - ), - }; - } - return { - css: { overflow: "hidden", paddingTop: "0px" }, - data: , - }; - }; - - const rd = renderData(); - return ( - - {rd.data} - - ); -} - -export default StudyDataView; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyTreeView/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyTreeView/index.tsx deleted file mode 100644 index fc7e2157e5..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyTreeView/index.tsx +++ /dev/null @@ -1,91 +0,0 @@ -/* eslint-disable jsx-a11y/interactive-supports-focus */ -/* eslint-disable jsx-a11y/click-events-have-key-events */ -import { Box } from "@mui/material"; -import { TreeItem, TreeView } from "@mui/lab"; -import ExpandMoreIcon from "@mui/icons-material/ExpandMore"; -import ChevronRightIcon from "@mui/icons-material/ChevronRight"; -import { StudyDataType } from "../../../../../../../common/types"; -import { getStudyParams } from "./utils"; - -interface ItemPropTypes { - itemkey: string; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - data: any; - path?: string; - viewer: (type: StudyDataType, data: string) => void; -} - -function StudyTreeItem(props: ItemPropTypes) { - const { itemkey, data, path = "/", viewer } = props; - - // if not an object then it's a RawFileNode or MatrixNode - // here we have to decide which viewer to use - const params = getStudyParams(data, path, itemkey); - if (params) { - const FileIcon = params.icon; - return ( - viewer(params.type, params.data)} - > - - {itemkey} - - } - /> - ); - } - - // else this is a folder containing.. stuff (recursion) - return ( - - {Object.keys(data).map((childkey) => ( - - ))} - - ); -} - -interface PropTypes { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - data: any; - view: (type: StudyDataType, data: string) => void; -} - -function StudyTreeView(props: PropTypes) { - const { data, view } = props; - return ( - } - defaultExpandIcon={} - > - {Object.keys(data).map((key) => ( - - ))} - - ); -} - -StudyTreeItem.defaultProps = { - path: "/", -}; - -export default StudyTreeView; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyTreeView/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyTreeView/utils.ts deleted file mode 100644 index b5395ccaba..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/StudyTreeView/utils.ts +++ /dev/null @@ -1,39 +0,0 @@ -import IntegrationInstructionsIcon from "@mui/icons-material/IntegrationInstructions"; -import TextSnippetIcon from "@mui/icons-material/TextSnippet"; -import { SvgIconComponent } from "@mui/icons-material"; -import { StudyDataType } from "../../../../../../../common/types"; - -export interface StudyParams { - type: StudyDataType; - icon: SvgIconComponent; - data: string; -} - -export const getStudyParams = ( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - data: any, - path: string, - itemkey: string, -): StudyParams | undefined => { - if (typeof data === "string") { - const tmp = data.split("://"); - if (tmp && tmp.length > 0) { - if (tmp[0] === "json" || tmp[1].endsWith(".json")) { - return { - type: "json", - data: `${path}/${itemkey}`, - icon: IntegrationInstructionsIcon, - }; - } - return { - type: tmp[0] as StudyDataType, - icon: TextSnippetIcon, - data: `${path}/${itemkey}`, - }; - } - return { type: "file", icon: TextSnippetIcon, data: `${path}/${itemkey}` }; - } - return undefined; -}; - -export default {}; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/index.tsx deleted file mode 100644 index 3ba45612c8..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/DebugView/index.tsx +++ /dev/null @@ -1,101 +0,0 @@ -import { useEffect, useState, useCallback } from "react"; -import { AxiosError } from "axios"; -import debug from "debug"; -import { useTranslation } from "react-i18next"; -import { useOutletContext } from "react-router-dom"; -import { Box } from "@mui/material"; -import { getStudyData } from "../../../../../../services/api/study"; -import StudyTreeView from "./StudyTreeView"; -import StudyDataView from "./StudyDataView"; -import { StudyDataType, StudyMetadata } from "../../../../../../common/types"; -import useEnqueueErrorSnackbar from "../../../../../../hooks/useEnqueueErrorSnackbar"; -import SimpleLoader from "../../../../../common/loaders/SimpleLoader"; - -const logError = debug("antares:studyview:error"); - -interface ElementView { - type: StudyDataType; - data: string; -} - -function DebugView() { - const { study } = useOutletContext<{ study: StudyMetadata }>(); - const [t] = useTranslation(); - const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const [studyData, setStudyData] = useState(); - const [loaded, setLoaded] = useState(false); - const [elementView, setElementView] = useState(); - - const initStudyData = useCallback( - async (sid: string) => { - setLoaded(false); - try { - const data = await getStudyData(sid, "", -1); - setStudyData(data); - } catch (e) { - enqueueErrorSnackbar(t("studies.error.retrieveData"), e as AxiosError); - logError("Failed to fetch study data", sid, e); - } finally { - setLoaded(true); - } - }, - [enqueueErrorSnackbar, t], - ); - - useEffect(() => { - if (study) { - initStudyData(study.id); - } - }, [study, initStudyData]); - - return ( - - {study && studyData && ( - <> - - - {studyData && ( - setElementView({ type, data })} - /> - )} - - - - - {elementView && ( - - )} - - - - )} - {!loaded && studyData === undefined && } - - ); -} - -export default DebugView; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Map/Areas/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Map/Areas/index.tsx index 74680200cd..104dada6f6 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Map/Areas/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Map/Areas/index.tsx @@ -11,7 +11,7 @@ import { } from "../../../../../../../redux/selectors"; import useAppDispatch from "../../../../../../../redux/hooks/useAppDispatch"; import AreaConfig from "./AreaConfig"; -import { isSearchMatching } from "../../../../../../../utils/textUtils"; +import { isSearchMatching } from "../../../../../../../utils/stringUtils"; import { setCurrentArea } from "../../../../../../../redux/ducks/studySyntheses"; import { StudyMapNode } from "../../../../../../../redux/ducks/studyMaps"; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Map/MapConfig/Districts/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Map/MapConfig/Districts/index.tsx index e012a6f0d6..901f6f3dec 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Map/MapConfig/Districts/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Map/MapConfig/Districts/index.tsx @@ -10,7 +10,7 @@ import { getStudyMapDistrictsById, } from "../../../../../../../../redux/selectors"; import { SubmitHandlerPlus } from "../../../../../../../common/Form/types"; -import FormTable from "../../../../../../../common/FormTable"; +import TableForm from "../../../../../../../common/TableForm"; import CreateDistrictDialog from "./CreateDistrictDialog"; import useAppDispatch from "../../../../../../../../redux/hooks/useAppDispatch"; import { updateStudyMapDistrict } from "../../../../../../../../redux/ducks/studyMaps"; @@ -123,7 +123,7 @@ function Districts() { {columns.length > 0 && ( - {columns.length > 0 && ( - [ - ...DEFAULT_TABLE_TEMPLATES.map((tp) => ({ - ...tp, - name: t(`study.modelization.tableMode.template.${tp.name}`), - })), - ...(storage.getItem(StorageKey.StudiesModelTableModeTemplates) || []).map( - (tp) => ({ ...tp, id: uuidv4() }), - ), - ]); - - const [selectedTemplateId, setSelectedTemplateId] = useState(templates[0].id); - - const [dialog, setDialog] = useState<{ - type: "add" | "edit" | "delete"; - templateId: TableTemplate["id"]; - } | null>(null); - - const { study } = useOutletContext<{ study: StudyMetadata }>(); - - const selectedTemplate = - templates.find((tp) => tp.id === selectedTemplateId) || templates[0]; - - const res = usePromise(async () => { - const { type, columns } = selectedTemplate; - return api.getTableData(study.id, type, columns); - }, [selectedTemplate]); - - // Update local storage - useUpdateEffect(() => { - storage.setItem( - StorageKey.StudiesModelTableModeTemplates, - templates - .filter((tp) => !DEFAULT_TABLE_TEMPLATE_IDS.includes(tp.id)) - // It is useless to keep template ids in local storage - .map(({ id, ...rest }) => rest), - ); - }, [templates]); - - //////////////////////////////////////////////////////////////// - // Utils - //////////////////////////////////////////////////////////////// - - const closeDialog = () => setDialog(null); - - //////////////////////////////////////////////////////////////// - // Event Handlers - //////////////////////////////////////////////////////////////// - - const handleSubmit = (data: SubmitHandlerPlus) => { - return api.setTableData(study.id, selectedTemplate.type, data.dirtyValues); - }; - - const handleDeleteTemplate = () => { - setTemplates((templates) => - templates.filter((tp) => tp.id !== dialog?.templateId), - ); - closeDialog(); - }; - - //////////////////////////////////////////////////////////////// - // JSX - //////////////////////////////////////////////////////////////// - - return ( - <> - setSelectedTemplateId(id)} - contextMenuContent={({ element, close }) => { - const isNotAllowed = DEFAULT_TABLE_TEMPLATE_IDS.includes( - element.id, - ); - return ( - <> - { - event.stopPropagation(); - setDialog({ - type: "edit", - templateId: element.id, - }); - close(); - }} - disabled={isNotAllowed} - > - Edit - - { - event.stopPropagation(); - setDialog({ - type: "delete", - templateId: element.id, - }); - close(); - }} - disabled={isNotAllowed} - > - Delete - - - ); - }} - /> - } - onAdd={() => setDialog({ type: "add", templateId: "" })} - /> - } - right={ - ( - - )} - /> - } - /> - {dialog?.type === "add" && ( - - )} - {dialog?.type === "edit" && ( - tp.id === dialog.templateId) as TableTemplate - } - templates={templates} - setTemplates={setTemplates} - onCancel={closeDialog} - open - /> - )} - {dialog?.type === "delete" && ( - - {t("study.modelization.tableMode.dialog.delete.text", { - 0: templates.find((tp) => tp.id === dialog.templateId)?.name, - })} - - )} - - ); -} - -export default TableMode; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/utils.ts deleted file mode 100644 index 4c85691af3..0000000000 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/utils.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { v4 as uuidv4 } from "uuid"; - -export enum TableTemplateType { - Area = "area", - Link = "link", - Cluster = "cluster", - Renewable = "renewable", - BindingConstraint = "binding constraint", -} - -export const TABLE_TEMPLATE_TYPE_OPTIONS = Object.values(TableTemplateType); - -const TABLE_TEMPLATE_COLUMNS_BY_TYPE = { - [TableTemplateType.Area]: [ - // Optimization - Nodal optimization - "nonDispatchablePower", - "dispatchableHydroPower", - "otherDispatchablePower", - "averageUnsuppliedEnergyCost", - "spreadUnsuppliedEnergyCost", - "averageSpilledEnergyCost", - "spreadSpilledEnergyCost", - // Optimization - Filtering - "filterSynthesis", - "filterYearByYear", - // Adequacy patch - "adequacyPatchMode", - ], - [TableTemplateType.Link]: [ - "hurdlesCost", - "loopFlow", - "usePhaseShifter", - "transmissionCapacities", - "assetType", - "linkStyle", - "linkWidth", - "displayComments", - "filterSynthesis", - "filterYearByYear", - ], - [TableTemplateType.Cluster]: [ - "group", - "enabled", - "mustRun", - "unitCount", - "nominalCapacity", - "minStablePower", - "spinning", - "minUpTime", - "minDownTime", - "co2", - "marginalCost", - "fixedCost", - "startupCost", - "marketBidCost", - "spreadCost", - "tsGen", - "volatilityForced", - "volatilityPlanned", - "lawForced", - "lawPlanned", - ], - [TableTemplateType.Renewable]: [ - "group", - "tsInterpretation", - "enabled", - "unitCount", - "nominalCapacity", - ], - [TableTemplateType.BindingConstraint]: ["type", "operator", "enabled"], -} as const; - -export type TableTemplateColumnsForType = Array< - (typeof TABLE_TEMPLATE_COLUMNS_BY_TYPE)[T][number] ->; - -export interface TableTemplate< - T extends TableTemplateType = TableTemplateType, -> { - id: string; - name: string; - type: T; - columns: TableTemplateColumnsForType; - frozen: true; -} - -/** - * Allows to check columns validity for specified type. - */ -export function createTableTemplate( - name: string, - type: T, - columns: TableTemplateColumnsForType, -): TableTemplate { - return { id: uuidv4(), name, type, columns, frozen: true }; -} - -export const DEFAULT_TABLE_TEMPLATES: TableTemplate[] = [ - createTableTemplate("economicOpt", TableTemplateType.Area, [ - "averageUnsuppliedEnergyCost", - "spreadUnsuppliedEnergyCost", - "averageSpilledEnergyCost", - "spreadSpilledEnergyCost", - "nonDispatchablePower", - "dispatchableHydroPower", - "otherDispatchablePower", - ]), - createTableTemplate("geographicTrimmingAreas", TableTemplateType.Area, [ - "filterYearByYear", - "filterSynthesis", - ]), - createTableTemplate("geographicTrimmingLinks", TableTemplateType.Link, [ - "filterYearByYear", - "filterSynthesis", - ]), -]; - -export const DEFAULT_TABLE_TEMPLATE_IDS = DEFAULT_TABLE_TEMPLATES.map( - (t) => t.id, -); - -export function getTableColumnsForType(type: TableTemplateType): string[] { - // Arrays have a numeric index signature because of `as const` - return Object.values(TABLE_TEMPLATE_COLUMNS_BY_TYPE[type]); -} - -export type TableData = Record< - string, - Record ->; diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/index.tsx index 06b1a4da83..b034cc2c80 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/index.tsx @@ -33,7 +33,9 @@ function Modelization() { if (firstAreaId) { dispatch(setCurrentArea(firstAreaId)); - navigate(`${basePath}/area/${firstAreaId}`, { replace: true }); + navigate(`${basePath}/area/${firstAreaId}`, { + replace: true, + }); } } }; @@ -45,7 +47,7 @@ function Modelization() { }, { label: t("study.areas"), - path: `${basePath}/area/${areaId}`, + path: `${basePath}/area/${encodeURI(areaId)}`, onClick: handleAreasClick, }, { @@ -56,14 +58,6 @@ function Modelization() { label: t("study.bindingconstraints"), path: `${basePath}/bindingcontraint`, }, - { - label: t("study.debug"), - path: `${basePath}/debug`, - }, - { - label: t("study.modelization.tableMode"), - path: `${basePath}/tablemode`, - }, ]; }, [areaId, areas, dispatch, navigate, study?.id, t]); diff --git a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx index 1f6b4a9bdc..eab7f08950 100644 --- a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx @@ -27,7 +27,7 @@ import { getStudyOutput, } from "../../../../../../redux/selectors"; import { getStudyData } from "../../../../../../services/api/study"; -import { isSearchMatching } from "../../../../../../utils/textUtils"; +import { isSearchMatching } from "../../../../../../utils/stringUtils"; import EditableMatrix from "../../../../../common/EditableMatrix"; import PropertiesView from "../../../../../common/PropertiesView"; import SplitLayoutView from "../../../../../common/SplitLayoutView"; diff --git a/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx b/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx index 482801c482..6188b99dc4 100644 --- a/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx @@ -5,7 +5,7 @@ import { styled, SxProps, Theme } from "@mui/material"; import Tabs from "@mui/material/Tabs"; import Tab from "@mui/material/Tab"; import Box from "@mui/material/Box"; -import { Outlet, useLocation, useNavigate } from "react-router-dom"; +import { Outlet, matchPath, useLocation, useNavigate } from "react-router-dom"; import { StudyMetadata } from "../../../../common/types"; import { mergeSxProp } from "../../../../utils/muiUtils"; @@ -56,21 +56,11 @@ function TabWrapper({ const [selectedTab, setSelectedTab] = useState(0); useEffect(() => { - const getTabIndex = (): number => { - const index = tabList.findIndex( - (tab) => location.pathname.substring(0, tab.path.length) === tab.path, - ); - - if (index >= 0) { - return index; - } - return 0; - }; - - if (study) { - setSelectedTab(getTabIndex); - } - }, [location.pathname, study, tabList]); + const matchedTab = tabList.findIndex((tab) => + matchPath({ path: tab.path, end: false }, location.pathname), + ); + setSelectedTab(matchedTab >= 0 ? matchedTab : 0); + }, [location.pathname, tabList]); //////////////////////////////////////////////////////////////// // Event Handlers @@ -79,11 +69,7 @@ function TabWrapper({ const handleChange = (event: React.SyntheticEvent, newValue: number) => { setSelectedTab(newValue); navigate(tabList[newValue].path); - - const onTabClick = tabList[newValue].onClick; - if (onTabClick) { - onTabClick(); - } + tabList[newValue].onClick?.(); }; //////////////////////////////////////////////////////////////// diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/dialogs/CreateTemplateTableDialog.tsx b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx similarity index 84% rename from webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/dialogs/CreateTemplateTableDialog.tsx rename to webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx index 24c4aea21c..24f7accc77 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/dialogs/CreateTemplateTableDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx @@ -1,14 +1,10 @@ import { useTranslation } from "react-i18next"; import AddCircleIcon from "@mui/icons-material/AddCircle"; -import { - createTableTemplate, - TableTemplate, - TableTemplateType, -} from "../utils"; +import { createTableTemplate, type TableTemplate } from "../utils"; import TableTemplateFormDialog, { TableTemplateFormDialogProps, } from "./TableTemplateFormDialog"; -import { SubmitHandlerPlus } from "../../../../../../common/Form/types"; +import { SubmitHandlerPlus } from "../../../../../common/Form/types"; interface Props extends Pick { @@ -42,12 +38,12 @@ function CreateTemplateTableDialog(props: Props) { return ( resetField("columns")} name="type" diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/dialogs/UpdateTemplateTableDialog.tsx b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/UpdateTemplateTableDialog.tsx similarity index 90% rename from webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/dialogs/UpdateTemplateTableDialog.tsx rename to webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/UpdateTemplateTableDialog.tsx index 31238d8b91..ba8569d0a3 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/TableMode/dialogs/UpdateTemplateTableDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/UpdateTemplateTableDialog.tsx @@ -4,7 +4,7 @@ import TableTemplateFormDialog, { TableTemplateFormDialogProps, } from "./TableTemplateFormDialog"; import { TableTemplate } from "../utils"; -import { SubmitHandlerPlus } from "../../../../../../common/Form/types"; +import { SubmitHandlerPlus } from "../../../../../common/Form/types"; interface Props extends Pick { @@ -36,7 +36,7 @@ function UpdateTemplateTableDialog(props: Props) { return ( (() => { + const list = + storage.getItem(StorageKey.StudiesModelTableModeTemplates) || []; + return list.map((tp) => ({ ...tp, id: uuidv4() })); + }); + + const [selectedTemplateId, setSelectedTemplateId] = useState< + TableTemplate["id"] | undefined + >(templates[0]?.id); + + const [dialog, setDialog] = useState<{ + type: "add" | "edit" | "delete"; + templateId: TableTemplate["id"]; + } | null>(null); + + const { study } = useOutletContext<{ study: StudyMetadata }>(); + const selectedTemplate = templates.find((tp) => tp.id === selectedTemplateId); + const dialogTemplate = + dialog && templates.find((tp) => tp.id === dialog.templateId); + + // Update local storage + useUpdateEffect(() => { + storage.setItem( + StorageKey.StudiesModelTableModeTemplates, + templates + // It is useless to keep template ids in local storage + .map(({ id, ...rest }) => rest), + ); + }, [templates]); + + //////////////////////////////////////////////////////////////// + // Utils + //////////////////////////////////////////////////////////////// + + const closeDialog = () => setDialog(null); + + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleDeleteTemplate = () => { + setTemplates((templates) => + templates.filter((tp) => tp.id !== dialog?.templateId), + ); + closeDialog(); + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + <> + setSelectedTemplateId(id)} + contextMenuContent={({ element, close }) => ( + <> + { + event.stopPropagation(); + setDialog({ + type: "edit", + templateId: element.id, + }); + close(); + }} + > + Edit + + { + event.stopPropagation(); + setDialog({ + type: "delete", + templateId: element.id, + }); + close(); + }} + > + Delete + + + )} + /> + } + onAdd={() => setDialog({ type: "add", templateId: "" })} + /> + } + right={ + selectedTemplate && ( + + ) + } + /> + {dialog?.type === "add" && ( + + )} + {dialog?.type === "edit" && dialogTemplate && ( + + )} + {dialog?.type === "delete" && dialogTemplate && ( + + {t("study.tableMode.dialog.delete.text", { + name: dialogTemplate.name, + })} + + )} + + ); +} + +export default TableModeList; diff --git a/webapp/src/components/App/Singlestudy/explore/TableModeList/utils.ts b/webapp/src/components/App/Singlestudy/explore/TableModeList/utils.ts new file mode 100644 index 0000000000..bcd307a954 --- /dev/null +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/utils.ts @@ -0,0 +1,37 @@ +import { v4 as uuidv4 } from "uuid"; +import { + TableModeColumnsForType, + TableModeType, +} from "../../../../../services/api/studies/tableMode/type"; +import { TABLE_MODE_COLUMNS_BY_TYPE } from "../../../../../services/api/studies/tableMode/constants"; + +//////////////////////////////////////////////////////////////// +// Types +//////////////////////////////////////////////////////////////// + +export interface TableTemplate { + id: string; + name: string; + type: T; + columns: TableModeColumnsForType; +} + +//////////////////////////////////////////////////////////////// +// Functions +//////////////////////////////////////////////////////////////// + +/** + * Allows to check columns validity for specified type. + */ +export function createTableTemplate( + name: string, + type: T, + columns: TableModeColumnsForType, +): TableTemplate { + return { id: uuidv4(), name, type, columns }; +} + +export function getTableColumnsForType(type: TableModeType): readonly string[] { + // Arrays have a numeric index signature because of `as const` + return TABLE_MODE_COLUMNS_BY_TYPE[type]; +} diff --git a/webapp/src/components/App/Singlestudy/explore/Xpansion/Settings/SettingsForm.tsx b/webapp/src/components/App/Singlestudy/explore/Xpansion/Settings/SettingsForm.tsx index cfb0e73c99..085917473a 100644 --- a/webapp/src/components/App/Singlestudy/explore/Xpansion/Settings/SettingsForm.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Xpansion/Settings/SettingsForm.tsx @@ -34,7 +34,6 @@ function SettingsForm(props: PropType) { const ucType = ["expansion_fast", "expansion_accurate"]; const master = ["relaxed", "integer"]; const solver = ["Cbc", "Xpress"]; - const cutType = ["yearly", "weekly", "average"]; const handleChange = (key: string, value: string | number) => { setSaveAllowed(true); @@ -72,7 +71,7 @@ function SettingsForm(props: PropType) { justifyContent="space-between" alignItems="flex-end" > - {t("global.settings")} + {t("xpansion.optimization")} )} {onDelete && (
+ +
+ ); } -export default FormTable; +export default TableForm; diff --git a/webapp/src/components/common/FormTable/utils.ts b/webapp/src/components/common/TableForm/utils.ts similarity index 100% rename from webapp/src/components/common/FormTable/utils.ts rename to webapp/src/components/common/TableForm/utils.ts diff --git a/webapp/src/components/common/TableMode.tsx b/webapp/src/components/common/TableMode.tsx new file mode 100644 index 0000000000..a96780f43d --- /dev/null +++ b/webapp/src/components/common/TableMode.tsx @@ -0,0 +1,56 @@ +import { StudyMetadata } from "../../common/types"; +import usePromise from "../../hooks/usePromise"; +import { + getTableMode, + setTableMode, +} from "../../services/api/studies/tableMode"; +import { + TableData, + TableModeColumnsForType, + TableModeType, +} from "../../services/api/studies/tableMode/type"; +import { SubmitHandlerPlus } from "./Form/types"; +import TableForm from "./TableForm"; +import UsePromiseCond from "./utils/UsePromiseCond"; + +export interface TableModeProps { + studyId: StudyMetadata["id"]; + type: T; + columns: TableModeColumnsForType; +} + +function TableMode(props: TableModeProps) { + const { studyId, type, columns } = props; + + const res = usePromise(async () => { + return getTableMode(studyId, type, columns); + }, [studyId, type, JSON.stringify(columns)]); + + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleSubmit = (data: SubmitHandlerPlus) => { + return setTableMode(studyId, type, data.dirtyValues); + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + ( + + )} + /> + ); +} + +export default TableMode; diff --git a/webapp/src/components/common/TabsView.tsx b/webapp/src/components/common/TabsView.tsx new file mode 100644 index 0000000000..3c58e07c3c --- /dev/null +++ b/webapp/src/components/common/TabsView.tsx @@ -0,0 +1,60 @@ +/* eslint-disable react/no-array-index-key */ +import { TabContext, TabList, TabListProps, TabPanel } from "@mui/lab"; +import { Tab } from "@mui/material"; +import { useState } from "react"; +import { mergeSxProp } from "../../utils/muiUtils"; + +interface TabsViewProps { + items: Array<{ + label: string; + content?: React.ReactNode; + }>; + TabListProps?: TabListProps; +} + +function TabsView({ items, TabListProps }: TabsViewProps) { + const [value, setValue] = useState("0"); + + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleChange = (event: React.SyntheticEvent, newValue: string) => { + setValue(newValue); + TabListProps?.onChange?.(event, newValue); + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + + + {items.map(({ label }, index) => ( + + ))} + + {items.map(({ content }, index) => ( + + {content} + + ))} + + ); +} + +export default TabsView; diff --git a/webapp/src/components/common/dialogs/FormDialog.tsx b/webapp/src/components/common/dialogs/FormDialog.tsx index 8c1097f4e9..8f0ab7673f 100644 --- a/webapp/src/components/common/dialogs/FormDialog.tsx +++ b/webapp/src/components/common/dialogs/FormDialog.tsx @@ -3,6 +3,9 @@ import { Button } from "@mui/material"; import { useId, useState } from "react"; import { FieldValues, FormState } from "react-hook-form"; import { useTranslation } from "react-i18next"; +import { LoadingButton } from "@mui/lab"; +import * as RA from "ramda-adjunct"; +import SaveIcon from "@mui/icons-material/Save"; import BasicDialog, { BasicDialogProps } from "./BasicDialog"; import Form, { FormProps } from "../Form"; @@ -23,6 +26,7 @@ export interface FormDialogProps< > extends SuperType { cancelButtonText?: string; onCancel: VoidFunction; + isCreationForm?: boolean; } // TODO: `formState.isSubmitting` doesn't update when auto submit enabled @@ -44,6 +48,8 @@ function FormDialog< onClose, cancelButtonText, submitButtonText, + submitButtonIcon, + isCreationForm = false, ...dialogProps } = props; @@ -59,7 +65,7 @@ function FormDialog< const { t } = useTranslation(); const formId = useId(); const [isSubmitting, setIsSubmitting] = useState(false); - const [isSubmitAllowed, setIsSubmitAllowed] = useState(false); + const [isSubmitAllowed, setIsSubmitAllowed] = useState(isCreationForm); //////////////////////////////////////////////////////////////// // Event Handlers @@ -69,7 +75,7 @@ function FormDialog< const { isSubmitting, isDirty } = formState; onStateChange?.(formState); setIsSubmitting(isSubmitting); - setIsSubmitAllowed(isDirty && !isSubmitting); + setIsSubmitAllowed((isDirty || isCreationForm) && !isSubmitting); }; //////////////////////////////////////////////////////////////// @@ -97,14 +103,23 @@ function FormDialog< {cancelButtonText || t("button.close")} {!autoSubmit && ( - + )} } diff --git a/webapp/src/services/api/constants.ts b/webapp/src/services/api/constants.ts new file mode 100644 index 0000000000..421ab814ed --- /dev/null +++ b/webapp/src/services/api/constants.ts @@ -0,0 +1,4 @@ +const API_URL_BASE = "v1"; +const STUDIES_API_URL = `${API_URL_BASE}/studies/{studyId}`; + +export const TABLE_MODE_API_URL = `${STUDIES_API_URL}/tablemode`; diff --git a/webapp/src/services/api/forms/tableMode.ts b/webapp/src/services/api/forms/tableMode.ts deleted file mode 100644 index ab146fa1fc..0000000000 --- a/webapp/src/services/api/forms/tableMode.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { snakeCase } from "lodash"; -import { DeepPartial } from "react-hook-form"; -import { StudyMetadata } from "../../../common/types"; -import { - TableData, - TableTemplateColumnsForType, - TableTemplateType, -} from "../../../components/App/Singlestudy/explore/Modelization/TableMode/utils"; -import client from "../client"; - -function makeRequestURL(studyId: StudyMetadata["id"]): string { - return `v1/studies/${studyId}/tablemode/form`; -} - -export async function getTableData( - studyId: StudyMetadata["id"], - type: T, - columns: TableTemplateColumnsForType, -): Promise { - const res = await client.get(makeRequestURL(studyId), { - params: { - table_type: type, - columns: columns.map(snakeCase).join(","), - }, - }); - return res.data; -} - -export function setTableData( - studyId: StudyMetadata["id"], - type: TableTemplateType, - data: DeepPartial, -): Promise { - return client.put(makeRequestURL(studyId), data, { - params: { - table_type: type, - }, - }); -} diff --git a/webapp/src/services/api/studies/tableMode/constants.ts b/webapp/src/services/api/studies/tableMode/constants.ts new file mode 100644 index 0000000000..70526c7484 --- /dev/null +++ b/webapp/src/services/api/studies/tableMode/constants.ts @@ -0,0 +1,73 @@ +const AREA = "area"; +const LINK = "link"; +const CLUSTER = "cluster"; +const RENEWABLE = "renewable"; +const BINDING_CONSTRAINT = "binding constraint"; + +export const TABLE_MODE_TYPES = [ + AREA, + LINK, + CLUSTER, + RENEWABLE, + BINDING_CONSTRAINT, +] as const; + +export const TABLE_MODE_COLUMNS_BY_TYPE = { + [AREA]: [ + // Optimization - Nodal optimization + "nonDispatchablePower", + "dispatchableHydroPower", + "otherDispatchablePower", + "averageUnsuppliedEnergyCost", + "spreadUnsuppliedEnergyCost", + "averageSpilledEnergyCost", + "spreadSpilledEnergyCost", + // Optimization - Filtering + "filterSynthesis", + "filterYearByYear", + // Adequacy patch + "adequacyPatchMode", + ], + [LINK]: [ + "hurdlesCost", + "loopFlow", + "usePhaseShifter", + "transmissionCapacities", + "assetType", + "linkStyle", + "linkWidth", + "displayComments", + "filterSynthesis", + "filterYearByYear", + ], + [CLUSTER]: [ + "group", + "enabled", + "mustRun", + "unitCount", + "nominalCapacity", + "minStablePower", + "spinning", + "minUpTime", + "minDownTime", + "co2", + "marginalCost", + "fixedCost", + "startupCost", + "marketBidCost", + "spreadCost", + "tsGen", + "volatilityForced", + "volatilityPlanned", + "lawForced", + "lawPlanned", + ], + [RENEWABLE]: [ + "group", + "tsInterpretation", + "enabled", + "unitCount", + "nominalCapacity", + ], + [BINDING_CONSTRAINT]: ["type", "operator", "enabled"], +} as const; diff --git a/webapp/src/services/api/studies/tableMode/index.ts b/webapp/src/services/api/studies/tableMode/index.ts new file mode 100644 index 0000000000..cd77a71891 --- /dev/null +++ b/webapp/src/services/api/studies/tableMode/index.ts @@ -0,0 +1,35 @@ +import { snakeCase } from "lodash"; +import { DeepPartial } from "react-hook-form"; +import { StudyMetadata } from "../../../../common/types"; +import client from "../../client"; +import { format } from "../../../../utils/stringUtils"; +import { TABLE_MODE_API_URL } from "../../constants"; +import type { TableData, TableModeColumnsForType, TableModeType } from "./type"; + +export async function getTableMode( + studyId: StudyMetadata["id"], + type: T, + columns: TableModeColumnsForType, +): Promise { + const url = format(TABLE_MODE_API_URL, { studyId }); + const res = await client.get(url, { + params: { + table_type: type, + columns: columns.map(snakeCase).join(","), + }, + }); + return res.data; +} + +export function setTableMode( + studyId: StudyMetadata["id"], + type: TableModeType, + data: DeepPartial, +): Promise { + const url = format(TABLE_MODE_API_URL, { studyId }); + return client.put(url, data, { + params: { + table_type: type, + }, + }); +} diff --git a/webapp/src/services/api/studies/tableMode/type.ts b/webapp/src/services/api/studies/tableMode/type.ts new file mode 100644 index 0000000000..71b751d875 --- /dev/null +++ b/webapp/src/services/api/studies/tableMode/type.ts @@ -0,0 +1,12 @@ +import { TABLE_MODE_COLUMNS_BY_TYPE, TABLE_MODE_TYPES } from "./constants"; + +export type TableModeType = (typeof TABLE_MODE_TYPES)[number]; + +export type TableModeColumnsForType = Array< + (typeof TABLE_MODE_COLUMNS_BY_TYPE)[T][number] +>; + +export type TableData = Record< + string, + Record +>; diff --git a/webapp/src/services/api/studydata.ts b/webapp/src/services/api/studydata.ts index 424f0e972f..cef29e4e7e 100644 --- a/webapp/src/services/api/studydata.ts +++ b/webapp/src/services/api/studydata.ts @@ -4,6 +4,7 @@ import { LinkInfoWithUI, UpdateAreaUi, } from "../../common/types"; +import { CreateBindingConstraint } from "../../components/App/Singlestudy/Commands/Edition/commandTypes"; import { BindingConstFields, BindingConstFieldsDTO, @@ -143,6 +144,17 @@ export const updateBindingConstraint = async ( return res.data; }; +export const createBindingConstraint = async ( + studyId: string, + data: CreateBindingConstraint, +): Promise => { + const res = await client.post( + `/v1/studies/${studyId}/bindingconstraints`, + data, + ); + return res.data; +}; + export const getClustersAndLinks = async ( uuid: string, ): Promise => { diff --git a/webapp/src/services/utils/localStorage.ts b/webapp/src/services/utils/localStorage.ts index ef01aae017..ee8b599ded 100644 --- a/webapp/src/services/utils/localStorage.ts +++ b/webapp/src/services/utils/localStorage.ts @@ -1,7 +1,7 @@ import * as RA from "ramda-adjunct"; import packages from "../../../package.json"; import { UserInfo } from "../../common/types"; -import { TableTemplate } from "../../components/App/Singlestudy/explore/Modelization/TableMode/utils"; +import { TableTemplate } from "../../components/App/Singlestudy/explore/TableModeList/utils"; import { StudiesSortConf, StudiesState } from "../../redux/ducks/studies"; import { UIState } from "../../redux/ducks/ui"; diff --git a/webapp/src/utils/textUtils.ts b/webapp/src/utils/stringUtils.ts similarity index 56% rename from webapp/src/utils/textUtils.ts rename to webapp/src/utils/stringUtils.ts index d5ad945296..1b83ea6b6d 100644 --- a/webapp/src/utils/textUtils.ts +++ b/webapp/src/utils/stringUtils.ts @@ -9,3 +9,12 @@ export const isSearchMatching = R.curry( return RA.ensureArray(values).find(isMatching); }, ); + +/** + * Formats a string with values. + * @example + * format("Hello {name}", { name: "John" }); // returns "Hello John" + */ +export function format(str: string, values: Record): string { + return str.replace(/{([a-zA-Z0-9]+)}/g, (_, key) => values[key]); +} diff --git a/webapp/src/utils/studiesUtils.ts b/webapp/src/utils/studiesUtils.ts index a4184f5a24..ca3fc3da57 100644 --- a/webapp/src/utils/studiesUtils.ts +++ b/webapp/src/utils/studiesUtils.ts @@ -3,7 +3,7 @@ import * as R from "ramda"; import * as RA from "ramda-adjunct"; import { StudyMetadata, StudyType } from "../common/types"; import { StudiesSortConf, StudyFilters } from "../redux/ducks/studies"; -import { isSearchMatching } from "./textUtils"; +import { isSearchMatching } from "./stringUtils"; //////////////////////////////////////////////////////////////// // Sort