Skip to content

Commit

Permalink
Showing 231 changed files with 23,193 additions and 3,503 deletions.
46 changes: 46 additions & 0 deletions .github/ISSUE_TEMPLATE/bug_report.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: 'bug'
assignees: ''

---

## Description

Provide a clear and concise description of the issue.
## Steps to Reproduce

### Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
## Actual Behavior

Describe what actually happened.
## Expected Behavior

Describe what you expected to happen.
## Screenshots

If applicable, add screenshots to help explain your problem.
## Possible Solution

If you have a solution in mind, describe it here.
## Environment

- [ ] Production
- [ ] Staging
## Additional Information
### Add any other context about the problem here.

- If applicable, provide the study ID that this issue relates to:
- Any relevant logs or error messages: [e.g. console outputs, stack traces]
- Any relevant code snippets: [e.g. relevant function calls, configuration files]
- Any relevant library versions: [e.g. React 17.0.1]
- The date and time when the issue occurred:
- Browser and version: [e.g. Chrome 88, Safari 14]
- Operating System: [e.g. Windows 10, macOS 11.2]

29 changes: 29 additions & 0 deletions .github/ISSUE_TEMPLATE/feature_request.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: 'feature request'
assignees: ''

---

## Problem Statement

A concise statement that clearly defines the problem the feature is trying to solve, or user problem it addresses:

## Proposed Solution

A high-level description of the proposed solution, including:

- The desired outcome

- Screenshots or mockups that illustrate the desired outcome (if available)

- Any specific requirements or constraints for the feature

- Add any other context or examples to help explain the requested feature

## Developer Notes

- Any potential roadblocks or concerns: [e.g. dependencies on other features, limitations in technology]

9 changes: 0 additions & 9 deletions .github/issue-branch.yml

This file was deleted.

4 changes: 2 additions & 2 deletions .github/workflows/compatibility.yml
Original file line number Diff line number Diff line change
@@ -10,7 +10,7 @@ jobs:
strategy:
max-parallel: 9
matrix:
os: [windows-latest, ubuntu-latest, macOS-latest]
os: [windows-latest, ubuntu-20.04, macOS-latest]
python-version: [ 3.8 ]

steps:
@@ -35,7 +35,7 @@ jobs:
strategy:
max-parallel: 9
matrix:
os: [ windows-latest, ubuntu-latest, macOS-latest ]
os: [ windows-latest, ubuntu-20.04, macOS-latest ]
node-version: [ 10.x, 14.x ]
steps:
- name: Checkout github repo
11 changes: 3 additions & 8 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
@@ -11,7 +11,7 @@ jobs:
strategy:
max-parallel: 3
matrix:
os: [windows-latest, ubuntu-latest]
os: [windows-latest, ubuntu-20.04]

steps:
- name: Checkout github repo (+ download lfs dependencies)
@@ -46,18 +46,13 @@ jobs:
- name: Install Windows dependencies
if: matrix.os == 'windows-latest'
run: pip install -r requirements-windows.txt
- name: Fix symlink for windows
if: matrix.os == 'windows-latest'
run: |
rm antareslauncher
ln -s antares-launcher\antareslauncher antareslauncher
- name: Generate Windows binary
if: matrix.os == 'windows-latest'
run: |
git log -1 HEAD --format=%H > .\resources\commit_id
pyinstaller AntaresWebWin.spec
- name: Generate linux binary
if: matrix.os == 'ubuntu-latest'
if: matrix.os == 'ubuntu-20.04'
run: |
git log -1 HEAD --format=%H > .\resources\commit_id
pyinstaller AntaresWebLinux.spec
@@ -67,7 +62,7 @@ jobs:
git log -1 HEAD --format=%H > .\resources\commit_id
pyinstaller -F antarest\worker\archive_worker_service.py -n AntaresWebWorker --add-data ".\resources;.\resources"
- name: Generate Linux worker binary
if: matrix.os == 'ubuntu-latest'
if: matrix.os == 'ubuntu-20.04'
run: |
git log -1 HEAD --format=%H > .\resources\commit_id
pyinstaller -F antarest/worker/archive_worker_service.py -n AntaresWebWorker --add-data resources:resources
19 changes: 7 additions & 12 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -6,7 +6,7 @@ on:

jobs:
python-lint:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
steps:
- name: Checkout github repo (+ download lfs dependencies)
uses: actions/checkout@v2
@@ -36,7 +36,7 @@ jobs:
strategy:
max-parallel: 9
matrix:
os: [windows-latest, ubuntu-latest]
os: [windows-latest, ubuntu-20.04]

steps:
- name: Checkout github repo (+ download lfs dependencies)
@@ -47,11 +47,6 @@ jobs:
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Fix symlink for windows
if: matrix.os == 'windows-latest'
run: |
rm antareslauncher
ln -s antares-launcher\antareslauncher antareslauncher
- name: Install dependencies
run: |
python -m pip install --upgrade pip
@@ -60,11 +55,11 @@ jobs:
run: |
pytest --cov antarest --cov-report xml
- name: Fix code coverage paths
if: matrix.os == 'ubuntu-latest'
if: matrix.os == 'ubuntu-20.04'
run: |
sed -i 's/\/home\/runner\/work\/AntaREST\/AntaREST/\/github\/workspace/g' coverage.xml
- name: Archive code coverage results
if: matrix.os == 'ubuntu-latest'
if: matrix.os == 'ubuntu-20.04'
uses: actions/upload-artifact@v3
with:
name: python-code-coverage-report
@@ -74,7 +69,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ ubuntu-latest ]
os: [ ubuntu-20.04 ]
steps:
- name: Checkout github repo
uses: actions/checkout@v1
@@ -106,7 +101,7 @@ jobs:
path: webapp/coverage/lcov.info

sonarcloud:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
needs: [python-test, npm-test]
steps:
- uses: actions/checkout@v2
@@ -123,4 +118,4 @@ jobs:
uses: sonarsource/sonarcloud-github-action@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
13 changes: 9 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -13,10 +13,15 @@ COPY ./scripts /scripts
COPY ./alembic /alembic
COPY ./alembic.ini /alembic.ini

COPY ./antares-launcher /antares-launcher
RUN ln -s /antares-launcher/antareslauncher /antareslauncher
RUN mkdir /conf/antares-launcher
RUN cp /antares-launcher/requirements.txt /conf/antares-launcher/requirements.txt
# > IMPORTANT: The `antares-launcher` project (source files) is no longer needed,
# > because the `Antares-Launcher` Python library is now declared as a dependency
# > in the `requirements.txt` file.
# > In other words, we can dispense with the creation of the symbolic link.

# COPY ./antares-launcher /antares-launcher
# RUN ln -s /antares-launcher/antareslauncher /antareslauncher
# RUN mkdir /conf/antares-launcher
# RUN cp /antares-launcher/requirements.txt /conf/antares-launcher/requirements.txt

RUN ./scripts/install-debug.sh

1 change: 0 additions & 1 deletion antareslauncher

This file was deleted.

13 changes: 12 additions & 1 deletion antarest/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
__version__ = "2.12.2"
"""
Antares Web
This module contains the project metadata.
"""
from pathlib import Path

# Standard project metadata

__version__ = "2.13.0"
__author__ = "RTE, Antares Web Team"
__date__ = "2023-03-09"
# noinspection SpellCheckingInspection
__credits__ = "(c) Réseau de Transport de l’Électricité (RTE)"

ROOT_DIR: Path = Path(__file__).resolve().parent
6 changes: 4 additions & 2 deletions antarest/core/cache/business/local_chache.py
Original file line number Diff line number Diff line change
@@ -20,11 +20,13 @@ class LocalCacheElement(BaseModel):

class LocalCache(ICache):
def __init__(self, config: CacheConfig = CacheConfig()):
self.cache: Dict[str, LocalCacheElement] = dict()
self.cache: Dict[str, LocalCacheElement] = {}
self.lock = threading.Lock()
self.checker_delay = config.checker_delay
self.checker_thread = threading.Thread(
target=self.checker, daemon=True
target=self.checker,
name=self.__class__.__name__,
daemon=True,
)

def start(self) -> None:
9 changes: 5 additions & 4 deletions antarest/core/config.py
Original file line number Diff line number Diff line change
@@ -73,9 +73,9 @@ class WorkspaceConfig:
def from_dict(data: JSON) -> "WorkspaceConfig":
return WorkspaceConfig(
path=Path(data["path"]),
groups=data.get("groups", list()),
groups=data.get("groups", []),
filter_in=data.get("filter_in", [".*"]),
filter_out=data.get("filter_out", list()),
filter_out=data.get("filter_out", []),
)


@@ -254,7 +254,7 @@ class LoggingConfig:
@staticmethod
def from_dict(data: JSON) -> "LoggingConfig":
logging_config: Dict[str, Any] = data or {}
logfile: Optional[str] = logging_config.get("logfile", None)
logfile: Optional[str] = logging_config.get("logfile")
return LoggingConfig(
logfile=Path(logfile) if logfile is not None else None,
json=logging_config.get("json", False),
@@ -287,6 +287,7 @@ class EventBusConfig:
Sub config object dedicated to eventbus module
"""

# noinspection PyUnusedLocal
@staticmethod
def from_dict(data: JSON) -> "EventBusConfig":
return EventBusConfig()
@@ -298,7 +299,7 @@ class CacheConfig:
Sub config object dedicated to cache module
"""

checker_delay: float = 0.2 # in ms
checker_delay: float = 0.2 # in seconds

@staticmethod
def from_dict(data: JSON) -> "CacheConfig":
8 changes: 8 additions & 0 deletions antarest/core/exceptions.py
Original file line number Diff line number Diff line change
@@ -216,3 +216,11 @@ def __init__(self, *district_ids: str):
class BadEditInstructionException(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.BAD_REQUEST, message)


class CannotScanInternalWorkspace(HTTPException):
def __init__(self) -> None:
super().__init__(
HTTPStatus.BAD_REQUEST,
"You cannot scan the default internal workspace",
)
28 changes: 18 additions & 10 deletions antarest/core/interfaces/eventbus.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
from abc import ABC, abstractmethod
from enum import Enum
from typing import Any, Callable, Optional, List, Awaitable

from pydantic import BaseModel
from typing import Any, Awaitable, Callable, List, Optional

from antarest.core.model import PermissionInfo
from pydantic import BaseModel


class EventType(str, Enum):
@@ -49,8 +48,8 @@ class EventChannelDirectory:
class Event(BaseModel):
type: EventType
payload: Any
permissions: PermissionInfo = PermissionInfo()
channel: Optional[str] = None
permissions: PermissionInfo
channel: str = ""


class IEventBus(ABC):
@@ -79,13 +78,22 @@ def add_listener(
type_filter: Optional[List[EventType]] = None,
) -> str:
"""
Add an event listener listener
@param listener listener callback
@param type_filter list of event types to listen to (or None to catch all)
Add a new event listener in the event bus.
The listener can listen to several types of events, depending on the filter
list. If not specified, the listener will listen to all event types.
Beware of the fact that in gunicorn, listeners will be called on the same event as many as there is workers
Note:
Be aware that in `gunicorn`, the listeners will be called on the same
event as many times as there are workers.
Args:
listener: callback of the listener
type_filter: list of event types to listen to (or `None` to catch everything).
Returns:
Listener registration ID (usually a UUID).
"""
pass

@abstractmethod
def remove_listener(self, listener_id: str) -> None:
6 changes: 5 additions & 1 deletion antarest/core/interfaces/service.py
Original file line number Diff line number Diff line change
@@ -4,7 +4,11 @@

class IService(ABC):
def __init__(self) -> None:
self.thread = threading.Thread(target=self._loop, daemon=True)
self.thread = threading.Thread(
target=self._loop,
name=self.__class__.__name__,
daemon=True,
)

def start(self, threaded: bool = True) -> None:
if threaded:
17 changes: 7 additions & 10 deletions antarest/core/maintenance/model.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
from enum import Enum
from typing import Optional

from pydantic import BaseModel
from sqlalchemy import Column, String, Integer, Boolean, DateTime, ForeignKey, Sequence # type: ignore
from sqlalchemy.orm import relationship # type: ignore


class MaintenanceMode(str, Enum):
NORMAL_MODE = "NORMAL"
MAINTENANCE_MODE = "MAINTENANCE"

@staticmethod
def to_str(element: bool) -> str:
if element:
return MaintenanceMode.MAINTENANCE_MODE.value
return MaintenanceMode.NORMAL_MODE.value
@classmethod
def from_bool(cls, flag: bool) -> "MaintenanceMode":
return {False: cls.NORMAL_MODE, True: cls.MAINTENANCE_MODE}[flag]

def __bool__(self) -> bool:
cls = self.__class__
return {cls.NORMAL_MODE: False, cls.MAINTENANCE_MODE: True}[self]
40 changes: 19 additions & 21 deletions antarest/core/maintenance/service.py
Original file line number Diff line number Diff line change
@@ -2,27 +2,18 @@
import shutil
import time
from threading import Thread
from typing import Optional, Callable
from typing import Callable, Optional

from fastapi import HTTPException

from antarest.core.config import Config
from antarest.core.configdata.model import ConfigDataAppKeys
from antarest.core.interfaces.cache import ICache
from antarest.core.interfaces.eventbus import (
IEventBus,
EventType,
Event,
)
from antarest.core.maintenance.model import (
MaintenanceMode,
)
from antarest.core.interfaces.eventbus import Event, EventType, IEventBus
from antarest.core.maintenance.model import MaintenanceMode
from antarest.core.maintenance.repository import MaintenanceRepository
from antarest.core.model import PermissionInfo, PublicMode
from antarest.core.requests import (
RequestParameters,
UserHasNotPermissionError,
)
from antarest.core.requests import RequestParameters, UserHasNotPermissionError

logger = logging.getLogger(__name__)

@@ -42,7 +33,11 @@ def __init__(
self._init()

def _init(self) -> None:
self.thread = Thread(target=self.check_disk_usage, daemon=True)
self.thread = Thread(
target=self.check_disk_usage,
name=self.__class__.__name__,
daemon=True,
)
self.thread.start()

def check_disk_usage(self) -> None:
@@ -51,7 +46,8 @@ def check_disk_usage(self) -> None:
try:
usage = shutil.disk_usage(workspace.path)
logger.info(
f"Disk usage for {name}: {(100 * usage.used / usage.total):.2f}% ({(usage.free / 1000000000):.3f}GB free)"
f"Disk usage for {name}: {(100 * usage.used / usage.total):.2f}%"
f" ({(usage.free / 1000000000):.3f}GB free)"
)
except Exception as e:
logger.error(
@@ -113,16 +109,19 @@ def _set_maintenance_data(
except Exception as e:
cache_save_error = f"Failed to put {cache_id} in cache"
logger.error(cache_save_error, exc_info=e)
raise HTTPException(status_code=500, detail=cache_save_error)
raise HTTPException(
status_code=500,
detail=cache_save_error,
) from e

def set_maintenance_status(
self,
data: bool,
request_params: RequestParameters,
) -> None:
maintenance_mode = MaintenanceMode.to_str(data)
maintenance_mode = MaintenanceMode.from_bool(data)
self._set_maintenance_data(
data=maintenance_mode,
data=maintenance_mode.value,
cache_id=ConfigDataAppKeys.MAINTENANCE_MODE.value,
db_call=lambda x: self.repo.save_maintenance_mode(x),
request_params=request_params,
@@ -141,15 +140,14 @@ def get_maintenance_status(self) -> bool:
db_call=lambda: self.repo.get_maintenance_mode(),
default_value=MaintenanceMode.NORMAL_MODE.value,
)
return data == MaintenanceMode.MAINTENANCE_MODE.value
return bool(MaintenanceMode(data))

def set_message_info(
self,
data: str,
request_params: RequestParameters,
) -> None:
message = "" if data.replace("\t", "").replace(" ", "") == "" else data
message = message.strip()
message = data.strip()
self._set_maintenance_data(
data=message,
cache_id=ConfigDataAppKeys.MESSAGE_INFO.value,
3 changes: 1 addition & 2 deletions antarest/core/maintenance/web.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import logging
from typing import Any

from fastapi import APIRouter, Depends, Body
from fastapi import APIRouter, Body, Depends

from antarest.core.config import Config
from antarest.core.jwt import JWTUser
from antarest.core.maintenance.model import MaintenanceMode
from antarest.core.maintenance.service import MaintenanceService
from antarest.core.requests import RequestParameters
from antarest.login.auth import Auth
21 changes: 19 additions & 2 deletions antarest/core/model.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import enum
from typing import Any, Dict, List, Union, Optional
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union

from pydantic import BaseModel

if TYPE_CHECKING:
# These dependencies are only used for type checking with mypy.
from antarest.study.model import Study, StudyMetadataDTO

JSON = Dict[str, Any]
ELEMENT = Union[str, int, float, bool, bytes]
SUB_JSON = Union[ELEMENT, JSON, List, None]
@@ -29,5 +34,17 @@ class StudyPermissionType(str, enum.Enum):

class PermissionInfo(BaseModel):
owner: Optional[int] = None
groups: List[str] = list()
groups: List[str] = []
public_mode: PublicMode = PublicMode.NONE

@classmethod
def from_study(
cls, study: Union["Study", "StudyMetadataDTO"]
) -> "PermissionInfo":
return cls(
owner=None if study.owner is None else study.owner.id,
groups=[g.id for g in study.groups if g.id is not None],
public_mode=PublicMode.NONE
if study.public_mode is None
else PublicMode(study.public_mode),
)
8 changes: 5 additions & 3 deletions antarest/core/permissions.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging

from antarest.core.jwt import JWTUser
from antarest.core.model import PermissionInfo, StudyPermissionType, PublicMode
from antarest.core.model import PermissionInfo, PublicMode, StudyPermissionType
from antarest.core.roles import RoleType

logger = logging.getLogger(__name__)
@@ -70,8 +70,9 @@ def check_permission(
):
return True

allowed_roles = permission_matrix[permission]["roles"]
group_permission = any(
role in permission_matrix[permission]["roles"] # type: ignore
role in allowed_roles # type: ignore
for role in [
group.role
for group in (user.groups or [])
@@ -81,4 +82,5 @@ def check_permission(
if group_permission:
return True

return permission_info.public_mode in permission_matrix[permission]["public_modes"] # type: ignore
allowed_public_modes = permission_matrix[permission]["public_modes"]
return permission_info.public_mode in allowed_public_modes # type: ignore
82 changes: 47 additions & 35 deletions antarest/core/tasks/service.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,36 @@
import asyncio
import datetime
import logging
import time
from abc import ABC, abstractmethod
from concurrent.futures import ThreadPoolExecutor, Future
from enum import Enum
from concurrent.futures import Future, ThreadPoolExecutor
from http import HTTPStatus
from typing import Callable, Optional, List, Dict, Awaitable, Union, cast
from typing import Awaitable, Callable, Dict, List, Optional, Union

from fastapi import HTTPException

from antarest.core.config import Config
from antarest.core.interfaces.eventbus import (
IEventBus,
Event,
EventType,
EventChannelDirectory,
EventType,
IEventBus,
)
from antarest.core.jwt import DEFAULT_ADMIN_USER
from antarest.core.model import PermissionInfo
from antarest.core.model import PermissionInfo, PublicMode
from antarest.core.requests import (
RequestParameters,
MustBeAuthenticatedError,
RequestParameters,
UserHasNotPermissionError,
)
from antarest.core.tasks.model import (
CustomTaskEventMessages,
TaskDTO,
TaskListFilter,
TaskEventPayload,
TaskJob,
TaskStatus,
TaskJobLog,
TaskListFilter,
TaskResult,
CustomTaskEventMessages,
TaskEventPayload,
TaskStatus,
TaskType,
)
from antarest.core.tasks.repository import TaskJobRepository
@@ -93,6 +91,7 @@ def await_task(
raise NotImplementedError()


# noinspection PyUnusedLocal
def noop_notifier(message: str) -> None:
pass

@@ -139,7 +138,8 @@ async def _await_task_end(event: Event) -> None:

return _await_task_end

def _send_worker_task(logger: TaskUpdateNotifier) -> TaskResult:
# todo: Is `logger_` parameter required? (consider refactoring)
def _send_worker_task(logger_: TaskUpdateNotifier) -> TaskResult:
listener_id = self.event_bus.add_listener(
_create_awaiter(task_result_wrapper),
[EventType.WORKER_TASK_ENDED],
@@ -152,6 +152,8 @@ def _send_worker_task(logger: TaskUpdateNotifier) -> TaskResult:
task_type=task_type,
task_args=task_args,
),
# Use `NONE` for internal events
permissions=PermissionInfo(public_mode=PublicMode.NONE),
),
task_type,
)
@@ -163,10 +165,7 @@ def _send_worker_task(logger: TaskUpdateNotifier) -> TaskResult:
return _send_worker_task

def check_remote_worker_for_queue(self, task_queue: str) -> bool:
for rw in self.remote_workers:
if task_queue in rw.queues:
return True
return False
return any(task_queue in rw.queues for rw in self.remote_workers)

def add_worker_task(
self,
@@ -279,7 +278,12 @@ def _cancel_task(self, task_id: str, dispatch: bool = False) -> None:
self.repo.save(task)
elif dispatch:
self.event_bus.push(
Event(type=EventType.TASK_CANCEL_REQUEST, payload=task_id)
Event(
type=EventType.TASK_CANCEL_REQUEST,
payload=task_id,
# Use `NONE` for internal events
permissions=PermissionInfo(public_mode=PublicMode.NONE),
)
)

def status_task(
@@ -290,14 +294,13 @@ def status_task(
) -> TaskDTO:
if not request_params.user:
raise MustBeAuthenticatedError()

task = self.repo.get(task_id)
if not task:
if task := self.repo.get(task_id):
return task.to_dto(with_logs)
else:
raise HTTPException(
status_code=HTTPStatus.NOT_FOUND,
detail=f"Failed to retrieve task {task_id} in db",
)
return task.to_dto(with_logs)

def list_tasks(
self, task_filter: TaskListFilter, request_params: RequestParameters
@@ -312,12 +315,12 @@ def list_db_tasks(
) -> List[TaskJob]:
if not request_params.user:
raise MustBeAuthenticatedError()
return self.repo.list(
task_filter,
request_params.user.impersonator
if not request_params.user.is_site_admin()
else None,
user = (
None
if request_params.user.is_site_admin()
else request_params.user.impersonator
)
return self.repo.list(task_filter, user)

def await_task(
self, task_id: str, timeout_sec: Optional[int] = None
@@ -357,6 +360,7 @@ def _run_task(
if custom_event_messages is not None
else f"Task {task_id} is running",
).dict(),
permissions=PermissionInfo(public_mode=PublicMode.READ),
channel=EventChannelDirectory.TASK + task_id,
)
)
@@ -392,24 +396,32 @@ def _run_task(
if custom_event_messages is not None
else f'Task {task_id} {"completed" if result.success else "failed"}',
).dict(),
permissions=PermissionInfo(public_mode=PublicMode.READ),
channel=EventChannelDirectory.TASK + task_id,
)
)
except Exception as e:
logger.error(f"Exception when running task {task_id}", exc_info=e)
except Exception as exc:
err_msg = f"Task {task_id} failed: Unhandled exception {exc}"
logger.error(err_msg, exc_info=exc)
with db():
self._update_task_status(
task_id, TaskStatus.FAILED, False, repr(e)
task_id,
TaskStatus.FAILED,
False,
f"{err_msg}\nSee the logs for detailed information and the error traceback.",
)
message = (
err_msg
if custom_event_messages is None
else custom_event_messages.end
)
self.event_bus.push(
Event(
type=EventType.TASK_FAILED,
payload=TaskEventPayload(
id=task_id,
message=custom_event_messages.end
if custom_event_messages is not None
else f"Task {task_id} failed",
id=task_id, message=message
).dict(),
permissions=PermissionInfo(public_mode=PublicMode.READ),
channel=EventChannelDirectory.TASK + task_id,
)
)
@@ -453,5 +465,5 @@ def _update_task_status(
task.result_status = result
task.result = command_result
if status.is_final():
task.completion_date = datetime.datetime.utcnow()
task.completion_date = datetime.datetime.now(datetime.timezone.utc)
self.repo.save(task)
1 change: 0 additions & 1 deletion antarest/eventbus/business/redis_eventbus.py
Original file line number Diff line number Diff line change
@@ -6,7 +6,6 @@
from redis.client import Redis

from antarest.core.interfaces.eventbus import Event
from antarest.core.model import PermissionInfo
from antarest.eventbus.business.interfaces import IEventBusBackend

logger = logging.getLogger(__name__)
3 changes: 1 addition & 2 deletions antarest/eventbus/main.py
Original file line number Diff line number Diff line change
@@ -4,7 +4,6 @@
from redis import Redis

from antarest.core.config import Config
from antarest.core.interfaces.eventbus import IEventBus
from antarest.eventbus.business.local_eventbus import LocalEventBus
from antarest.eventbus.business.redis_eventbus import RedisEventBus
from antarest.eventbus.service import EventBusService
@@ -16,7 +15,7 @@ def build_eventbus(
config: Config,
autostart: bool = True,
redis_client: Optional[Redis] = None, # type: ignore
) -> IEventBus:
) -> EventBusService:
eventbus = EventBusService(
RedisEventBus(redis_client)
if redis_client is not None
20 changes: 11 additions & 9 deletions antarest/eventbus/service.py
Original file line number Diff line number Diff line change
@@ -3,11 +3,10 @@
import random
import threading
import time
from typing import List, Callable, Optional, Dict, Awaitable, Any, cast
from uuid import uuid4
from typing import Awaitable, Callable, Dict, List, Optional
import uuid

from antarest.core.interfaces.eventbus import Event, IEventBus, EventType
from antarest.core.utils.utils import suppress_exception
from antarest.core.interfaces.eventbus import Event, EventType, IEventBus
from antarest.eventbus.business.interfaces import IEventBusBackend

logger = logging.getLogger(__name__)
@@ -39,7 +38,7 @@ def add_queue_consumer(
self, listener: Callable[[Event], Awaitable[None]], queue: str
) -> str:
with self.lock:
listener_id = str(uuid4())
listener_id = str(uuid.uuid4())
if queue not in self.consumers:
self.consumers[queue] = {}
self.consumers[queue][listener_id] = listener
@@ -57,7 +56,7 @@ def add_listener(
type_filter: Optional[List[EventType]] = None,
) -> str:
with self.lock:
listener_id = str(uuid4())
listener_id = str(uuid.uuid4())
types = type_filter or [EventType.ANY]
for listener_type in types:
self.listeners[listener_type][listener_id] = listener
@@ -76,7 +75,7 @@ async def _run_loop(self) -> None:
await self._on_events()
except Exception as e:
logger.error(
f"Unexpected error when processing events", exc_info=e
"Unexpected error when processing events", exc_info=e
)

async def _on_events(self) -> None:
@@ -125,8 +124,11 @@ def _async_loop(self, new_loop: bool = True) -> None:

def start(self, threaded: bool = True) -> None:
if threaded:
t = threading.Thread(target=self._async_loop)
t.setDaemon(True)
t = threading.Thread(
target=self._async_loop,
name=self.__class__.__name__,
daemon=True,
)
logger.info("Starting event bus")
t.start()
else:
38 changes: 16 additions & 22 deletions antarest/eventbus/web.py
Original file line number Diff line number Diff line change
@@ -3,19 +3,18 @@
import logging
from enum import Enum
from http import HTTPStatus
from typing import List, Dict, Optional, Tuple

from fastapi import FastAPI, Query, HTTPException, Depends
from fastapi_jwt_auth import AuthJWT # type: ignore
from pydantic import BaseModel
from starlette.websockets import WebSocket, WebSocketDisconnect
from typing import List, Optional

from antarest.core.config import Config
from antarest.core.interfaces.eventbus import IEventBus, Event
from antarest.core.jwt import JWTUser, DEFAULT_ADMIN_USER
from antarest.core.model import PermissionInfo, StudyPermissionType, PublicMode
from antarest.core.interfaces.eventbus import Event, IEventBus
from antarest.core.jwt import DEFAULT_ADMIN_USER, JWTUser
from antarest.core.model import PermissionInfo, StudyPermissionType
from antarest.core.permissions import check_permission
from antarest.login.auth import Auth
from fastapi import Depends, FastAPI, HTTPException, Query
from fastapi_jwt_auth import AuthJWT # type: ignore
from pydantic import BaseModel
from starlette.websockets import WebSocket, WebSocketDisconnect

logger = logging.getLogger(__name__)

@@ -63,9 +62,7 @@ def disconnect(self, websocket: WebSocket) -> None:
if connection_to_remove is not None:
self.active_connections.remove(connection_to_remove)

def process_message(
self, message: str, websocket: WebSocket, user: JWTUser
) -> None:
def process_message(self, message: str, websocket: WebSocket) -> None:
connection = self._get_connection(websocket)
if not connection:
return
@@ -79,17 +76,16 @@ def process_message(
connection.channel_subscriptions.remove(ws_message.payload)

async def broadcast(
self, message: str, permissions: PermissionInfo, channel: Optional[str]
self, message: str, permissions: PermissionInfo, channel: str
) -> None:
for connection in self.active_connections:
if channel is not None or check_permission(
# if is subscribed to chanel and has permission, send message to websocket
if (
not channel or channel in connection.channel_subscriptions
) and check_permission(
connection.user, permissions, StudyPermissionType.READ
):
if (
channel is None
or channel in connection.channel_subscriptions
):
await connection.websocket.send_text(message)
await connection.websocket.send_text(message)


def configure_websockets(
@@ -130,9 +126,7 @@ async def connect(
while True:
message = await websocket.receive_text()
try:
manager.process_message(
message, websocket, user or DEFAULT_ADMIN_USER
)
manager.process_message(message, websocket)
except Exception as e:
logger.error(
f"Failed to process websocket message {message}",
29 changes: 25 additions & 4 deletions antarest/launcher/adapters/abstractlauncher.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,40 @@
import logging
import os
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Callable, NamedTuple, Optional, Dict, List
from typing import Callable, Dict, List, NamedTuple, Optional

from antarest.core.config import Config
from antarest.core.interfaces.cache import ICache
from antarest.core.interfaces.eventbus import (
Event,
EventType,
EventChannelDirectory,
EventType,
IEventBus,
)
from antarest.core.model import PermissionInfo, PublicMode
from antarest.core.requests import RequestParameters
from antarest.launcher.adapters.log_parser import LaunchProgressDTO, LogParser
from antarest.launcher.model import JobStatus, LauncherParametersDTO, LogType


class LauncherInitException(Exception):
pass
"""
Exception raised during local or SLURM launcher initialisation
when a required parameter is not set in the application configuration.
In Docker environment, the configuration path is `/resources/application.yaml`.
"""

def __init__(self, reason: str) -> None:
from antarest.core.utils import utils

if config_path := (
os.getenv("ANTAREST_CONF") or utils.get_default_config_path()
):
msg = f"Invalid configuration '{config_path}': {reason}"
else:
msg = f"Invalid configuration: {reason}"
super().__init__(msg)


class LauncherCallbacks(NamedTuple):
@@ -74,6 +91,7 @@ def update_log(log_line: str) -> None:
"log": log_line,
"job_id": job_id,
},
permissions=PermissionInfo(public_mode=PublicMode.READ),
channel=EventChannelDirectory.JOB_LOGS + job_id,
)
)
@@ -99,6 +117,9 @@ def update_log(log_line: str) -> None:
"progress": launch_progress_dto.progress,
"message": "",
},
permissions=PermissionInfo(
public_mode=PublicMode.READ
),
channel=EventChannelDirectory.JOB_STATUS + job_id,
)
)
48 changes: 23 additions & 25 deletions antarest/launcher/adapters/local_launcher/local_launcher.py
Original file line number Diff line number Diff line change
@@ -6,7 +6,7 @@
import threading
import time
from pathlib import Path
from typing import Dict, Optional, Tuple, Callable, cast, IO
from typing import IO, Callable, Dict, Optional, Tuple, cast
from uuid import UUID

from antarest.core.config import Config
@@ -15,8 +15,8 @@
from antarest.core.requests import RequestParameters
from antarest.launcher.adapters.abstractlauncher import (
AbstractLauncher,
LauncherInitException,
LauncherCallbacks,
LauncherInitException,
)
from antarest.launcher.adapters.log_manager import LogTailManager
from antarest.launcher.model import JobStatus, LauncherParametersDTO, LogType
@@ -37,26 +37,28 @@ def __init__(
cache: ICache,
) -> None:
super().__init__(config, callbacks, event_bus, cache)
if self.config.launcher.local is None:
raise LauncherInitException("Missing parameter 'launcher.local'")
self.tmpdir = config.storage.tmp_dir
self.job_id_to_study_id: Dict[ # type: ignore
str, Tuple[str, Path, subprocess.Popen]
] = {}
self.logs: Dict[str, str] = {}

def _select_best_binary(self, version: str) -> Path:
if self.config.launcher.local is None:
raise LauncherInitException()

if version in self.config.launcher.local.binaries:
antares_solver_path = self.config.launcher.local.binaries[version]
local = self.config.launcher.local
if local is None:
raise LauncherInitException("Missing parameter 'launcher.local'")
elif version in local.binaries:
antares_solver_path = local.binaries[version]
else:
# sourcery skip: extract-method, max-min-default
# fixme: `version` must remain a string, consider using a `Version` class
version_int = int(version)
keys = list(map(int, self.config.launcher.local.binaries.keys()))
keys = list(map(int, local.binaries.keys()))
keys_sup = [k for k in keys if k > version_int]
best_existing_version = min(keys_sup) if keys_sup else max(keys)
antares_solver_path = self.config.launcher.local.binaries[
str(best_existing_version)
]
antares_solver_path = local.binaries[str(best_existing_version)]
logger.warning(
f"Version {version} is not available. Version {best_existing_version} has been selected instead"
)
@@ -70,9 +72,6 @@ def run_study(
launcher_parameters: LauncherParametersDTO,
params: RequestParameters,
) -> None:
if self.config.launcher.local is None:
raise LauncherInitException()

antares_solver_path = self._select_best_binary(version)

job = threading.Thread(
@@ -84,6 +83,7 @@ def run_study(
job_id,
launcher_parameters,
),
name=f"{self.__class__.__name__}-JobRunner",
)
job.start()

@@ -144,23 +144,21 @@ def stop_reading_output() -> bool:
stop_reading_output,
None,
),
name=f"{self.__class__.__name__}-LogsWatcher",
daemon=True,
)
thread.start()

while True:
if process.poll() is not None:
break
while process.poll() is None:
time.sleep(1)

if launcher_parameters is not None:
if (
launcher_parameters.post_processing
or launcher_parameters.adequacy_patch is not None
):
subprocess.run(
["Rscript", "post-processing.R"], cwd=export_path
)
if launcher_parameters is not None and (
launcher_parameters.post_processing
or launcher_parameters.adequacy_patch is not None
):
subprocess.run(
["Rscript", "post-processing.R"], cwd=export_path
)

output_id: Optional[str] = None
try:
1 change: 1 addition & 0 deletions antarest/launcher/adapters/log_manager.py
Original file line number Diff line number Diff line change
@@ -32,6 +32,7 @@ def track(
target=lambda: self._follow(
log_path, handler, self._stop_tracking(str(log_path))
),
name=f"{self.__class__.__name__}-LogsWatcher",
daemon=True,
)
self.tracked_logs[str(log_path)] = thread
394 changes: 223 additions & 171 deletions antarest/launcher/adapters/slurm_launcher/slurm_launcher.py

Large diffs are not rendered by default.

38 changes: 18 additions & 20 deletions antarest/launcher/service.py
Original file line number Diff line number Diff line change
@@ -5,7 +5,7 @@
from functools import reduce
from http import HTTPStatus
from pathlib import Path
from typing import List, Optional, cast, Dict
from typing import Dict, List, Optional, cast
from uuid import UUID, uuid4

from fastapi import HTTPException
@@ -16,29 +16,24 @@
from antarest.core.filetransfer.service import FileTransferManager
from antarest.core.interfaces.cache import ICache
from antarest.core.interfaces.eventbus import (
IEventBus,
Event,
EventType,
EventChannelDirectory,
EventType,
IEventBus,
)
from antarest.core.jwt import JWTUser, DEFAULT_ADMIN_USER
from antarest.core.model import (
StudyPermissionType,
)
from antarest.core.requests import (
RequestParameters,
UserHasNotPermissionError,
)
from antarest.core.jwt import DEFAULT_ADMIN_USER, JWTUser
from antarest.core.model import PermissionInfo, PublicMode, StudyPermissionType
from antarest.core.requests import RequestParameters, UserHasNotPermissionError
from antarest.core.tasks.model import TaskResult, TaskType
from antarest.core.tasks.service import TaskUpdateNotifier, ITaskService
from antarest.core.tasks.service import ITaskService, TaskUpdateNotifier
from antarest.core.utils.fastapi_sqlalchemy import db
from antarest.core.utils.utils import (
concat_files,
zip_dir,
StopWatch,
concat_files,
concat_files_to_str,
is_zip,
read_in_zip,
concat_files_to_str,
zip_dir,
)
from antarest.launcher.adapters.abstractlauncher import LauncherCallbacks
from antarest.launcher.adapters.factory_launcher import FactoryLauncher
@@ -47,19 +42,18 @@
)
from antarest.launcher.extensions.interface import ILauncherExtension
from antarest.launcher.model import (
JobResult,
JobStatus,
JobLog,
JobLogType,
JobResult,
JobStatus,
LauncherParametersDTO,
XpansionParametersDTO,
LogType,
XpansionParametersDTO,
)
from antarest.launcher.repository import JobResultRepository
from antarest.study.service import StudyService
from antarest.study.storage.utils import (
assert_permission,
create_permission_from_study,
extract_output_name,
find_single_output_path,
)
@@ -200,6 +194,9 @@ def update(
if final_status
else EventType.STUDY_JOB_STATUS_UPDATE,
payload=job_result.to_dto().dict(),
permissions=PermissionInfo(
public_mode=PublicMode.READ
),
channel=EventChannelDirectory.JOB_STATUS
+ job_result.id,
)
@@ -281,7 +278,7 @@ def run_study(
Event(
type=EventType.STUDY_JOB_STARTED,
payload=job_status.to_dto().dict(),
permissions=create_permission_from_study(study_info),
permissions=PermissionInfo.from_study(study_info),
)
)
return job_uuid
@@ -314,6 +311,7 @@ def kill_job(self, job_id: str, params: RequestParameters) -> JobResult:
Event(
type=EventType.STUDY_JOB_CANCELLED,
payload=job_status.to_dto().dict(),
permissions=PermissionInfo.from_study(study),
channel=EventChannelDirectory.JOB_STATUS + job_result.id,
)
)
6 changes: 6 additions & 0 deletions antarest/study/business/area_management.py
Original file line number Diff line number Diff line change
@@ -168,6 +168,12 @@ def get_layers(self, study: RawStudy) -> List[LayerInfoDTO]:
if len(file_study.config.areas)
else {}
)

# if there is only 1 area, the area_ui object is not a dict keyed by area_id
area_list = list(file_study.config.areas.keys())
if len(area_list) == 1:
areas_ui = {area_list[0]: areas_ui}

if len(layers) == 0:
layers["0"] = "All"
layers_with_items = [
23 changes: 15 additions & 8 deletions antarest/study/business/binding_constraint_management.py
Original file line number Diff line number Diff line change
@@ -133,6 +133,20 @@ def process_constraint(
continue
return new_config

@staticmethod
def constraints_to_coeffs(
constraint: BindingConstraintDTO,
) -> Dict[str, List[float]]:
coeffs: Dict[str, List[float]] = {}
if constraint.constraints is not None:
for term in constraint.constraints:
if term.id is not None and term.weight is not None:
coeffs[term.id] = [term.weight]
if term.offset is not None:
coeffs[term.id].append(term.offset)

return coeffs

def get_binding_constraint(
self, study: Study, constraint_id: Optional[str]
) -> Union[BindingConstraintDTO, List[BindingConstraintDTO], None]:
@@ -173,13 +187,6 @@ def update_binding_constraint(
if not isinstance(constraint, BindingConstraintDTO):
raise NoBindingConstraintError(study.id)

coeffs = {}
if constraint.constraints is not None:
for term in constraint.constraints:
coeffs[term.id] = [term.weight]
if term.offset is not None:
coeffs[term.id].append(term.offset)

command = UpdateBindingConstraint(
id=constraint.id,
enabled=data.value
@@ -191,7 +198,7 @@ def update_binding_constraint(
operator=data.value
if data.key == "operator"
else constraint.operator,
coeffs=coeffs,
coeffs=BindingConstraintManager.constraints_to_coeffs(constraint),
values=constraint.values,
filter_year_by_year=data.value
if data.key == "filterByYear"
6 changes: 1 addition & 5 deletions antarest/study/business/hydro_management.py
Original file line number Diff line number Diff line change
@@ -5,6 +5,7 @@
from antarest.study.business.utils import (
FormFieldsBaseModel,
execute_or_add_commands,
FieldInfo,
)
from antarest.study.model import Study
from antarest.study.storage.storage_service import StudyStorageService
@@ -30,11 +31,6 @@ class ManagementOptionsFormFields(FormFieldsBaseModel):
pumping_efficiency: Optional[Union[StrictFloat, StrictInt]]


class FieldInfo(TypedDict, total=False):
path: str
default_value: Any


HYDRO_PATH = "input/hydro/hydro"

FIELDS_INFO: Dict[str, FieldInfo] = {
48 changes: 45 additions & 3 deletions antarest/study/business/optimization_management.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from enum import Enum
from typing import Optional, Union, Literal, List, Any, Dict, TypedDict
from typing import Optional, Union, List, Any, Dict

from pydantic.types import StrictBool
from pydantic.types import StrictBool, StrictFloat, StrictInt

from antarest.study.business.utils import (
FormFieldsBaseModel,
@@ -45,6 +45,11 @@ class SimplexOptimizationRange(str, Enum):
WEEK = "week"


class PriceTakingOrder(str, Enum):
DENS = "DENS"
LOAD = "Load"


class OptimizationFormFields(FormFieldsBaseModel):
binding_constraints: Optional[StrictBool]
hurdle_costs: Optional[StrictBool]
@@ -71,6 +76,13 @@ class OptimizationFormFields(FormFieldsBaseModel):
StrictBool
]
ntc_between_physical_areas_out_adequacy_patch: Optional[StrictBool]
# version 850
price_taking_order: Optional[PriceTakingOrder]
include_hurdle_cost_csr: Optional[StrictBool]
check_csr_cost_function: Optional[StrictBool]
threshold_initiate_curtailment_sharing_rule: Optional[StrictFloat]
threshold_display_local_matching_rule_violations: Optional[StrictFloat]
threshold_csr_variable_bounds_relaxation: Optional[StrictInt]


OPTIMIZATION_PATH = f"{GENERAL_DATA_PATH}/optimization"
@@ -151,6 +163,36 @@ class OptimizationFormFields(FormFieldsBaseModel):
"default_value": True,
"start_version": 830,
},
"price_taking_order": {
"path": f"{ADEQUACY_PATCH_PATH}/price-taking-order",
"default_value": "DENS",
"start_version": 850,
},
"include_hurdle_cost_csr": {
"path": f"{ADEQUACY_PATCH_PATH}/include-hurdle-cost-csr",
"default_value": False,
"start_version": 850,
},
"check_csr_cost_function": {
"path": f"{ADEQUACY_PATCH_PATH}/check-csr-cost-function",
"default_value": False,
"start_version": 850,
},
"threshold_initiate_curtailment_sharing_rule": {
"path": f"{ADEQUACY_PATCH_PATH}/threshold-initiate-curtailment-sharing-rule",
"default_value": 0.0,
"start_version": 850,
},
"threshold_display_local_matching_rule_violations": {
"path": f"{ADEQUACY_PATCH_PATH}/threshold-display-local-matching-rule-violations",
"default_value": 0.0,
"start_version": 850,
},
"threshold_csr_variable_bounds_relaxation": {
"path": f"{ADEQUACY_PATCH_PATH}/threshold-csr-variable-bounds-relaxation",
"default_value": 3,
"start_version": 850,
},
}


@@ -205,7 +247,7 @@ def set_field_values(
)
)

if len(commands) > 0:
if commands:
file_study = self.storage_service.get_storage(study).get_raw(study)
execute_or_add_commands(
study, file_study, commands, self.storage_service
115 changes: 115 additions & 0 deletions antarest/study/business/renewable_management.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
from enum import Enum
from pathlib import Path, PurePosixPath
from typing import Union, Optional, Dict, TypedDict, Any, List

from pydantic import StrictFloat, StrictInt, StrictStr, StrictBool

from antarest.study.business.utils import (
execute_or_add_commands,
FormFieldsBaseModel,
FieldInfo,
)
from antarest.study.model import Study
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.update_config import (
UpdateConfig,
)


class TimeSeriesInterpretation(str, Enum):
POWER_GENERATION = "power-generation"
PRODUCTION_FACTOR = "production-factor"


RENEWABLE_PATH = "input/renewables/clusters/{area}/list/{cluster}"


class RenewableFormFields(FormFieldsBaseModel):
group: Optional[StrictStr]
name: Optional[StrictStr]
ts_interpretation: Optional[TimeSeriesInterpretation]
unit_count: Optional[StrictInt]
enabled: Optional[StrictBool]
nominal_capacity: Optional[StrictInt]


FIELDS_INFO: Dict[str, FieldInfo] = {
"group": {
"path": f"{RENEWABLE_PATH}/group",
"default_value": "",
},
"name": {
"path": f"{RENEWABLE_PATH}/name",
"default_value": "",
},
"ts_interpretation": {
"path": f"{RENEWABLE_PATH}/ts-interpretation",
"default_value": TimeSeriesInterpretation.POWER_GENERATION.value,
},
"unit_count": {
"path": f"{RENEWABLE_PATH}/unitcount",
"default_value": 0,
},
"enabled": {
"path": f"{RENEWABLE_PATH}/enabled",
"default_value": True,
},
"nominal_capacity": {
"path": f"{RENEWABLE_PATH}/nominalcapacity",
"default_value": 0,
},
}


def format_path(path: str, area_id: str, cluster_id: str) -> str:
return path.format(area=area_id, cluster=cluster_id)


class RenewableManager:
def __init__(self, storage_service: StudyStorageService):
self.storage_service = storage_service

def get_field_values(
self, study: Study, area_id: str, cluster_id: str
) -> RenewableFormFields:
file_study = self.storage_service.get_storage(study).get_raw(study)
renewable_config = file_study.tree.get(
format_path(RENEWABLE_PATH, area_id, cluster_id).split("/")
)

def get_value(field_info: FieldInfo) -> Any:
target_name = PurePosixPath(field_info["path"]).name
return renewable_config.get(
target_name, field_info["default_value"]
)

return RenewableFormFields.construct(
**{name: get_value(info) for name, info in FIELDS_INFO.items()}
)

def set_field_values(
self,
study: Study,
area_id: str,
cluster_id: str,
field_values: RenewableFormFields,
) -> None:
commands: List[UpdateConfig] = []

for field_name, value in field_values.__iter__():
if value is not None:
info = FIELDS_INFO[field_name]

commands.append(
UpdateConfig(
target=format_path(info["path"], area_id, cluster_id),
data=value,
command_context=self.storage_service.variant_study_service.command_factory.command_context,
)
)

if commands:
file_study = self.storage_service.get_storage(study).get_raw(study)
execute_or_add_commands(
study, file_study, commands, self.storage_service
)
417 changes: 277 additions & 140 deletions antarest/study/business/table_mode_management.py

Large diffs are not rendered by default.

194 changes: 194 additions & 0 deletions antarest/study/business/thermal_management.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
from enum import Enum
from pathlib import PurePosixPath
from typing import Optional, Dict, Any, List

from pydantic import StrictStr, StrictBool

from antarest.study.business.utils import (
FormFieldsBaseModel,
FieldInfo,
execute_or_add_commands,
)
from antarest.study.model import Study
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.update_config import (
UpdateConfig,
)


class TimeSeriesGenerationOption(str, Enum):
USE_GLOBAL_PARAMETER = "use global parameter"
FORCE_NO_GENERATION = "force no generation"
FORCE_GENERATION = "force generation"


class LawOption(str, Enum):
UNIFORM = "uniform"
GEOMETRIC = "geometric"


THERMAL_PATH = "input/thermal/clusters/{area}/list/{cluster}"


class ThermalFormFields(FormFieldsBaseModel):
group: Optional[StrictStr]
name: Optional[StrictStr]
unit_count: Optional[int]
enabled: Optional[StrictBool]
nominal_capacity: Optional[int]
gen_ts: Optional[TimeSeriesGenerationOption]
min_stable_power: Optional[int]
min_up_time: Optional[int]
min_down_time: Optional[int]
must_run: Optional[StrictBool]
spinning: Optional[int]
co2: Optional[int]
volatility_forced: Optional[int]
volatility_planned: Optional[int]
law_forced: Optional[LawOption]
law_planned: Optional[LawOption]
marginal_cost: Optional[int]
spread_cost: Optional[int]
fixed_cost: Optional[int]
startup_cost: Optional[int]
market_bid_cost: Optional[int]


FIELDS_INFO: Dict[str, FieldInfo] = {
"group": {
"path": f"{THERMAL_PATH}/group",
"default_value": "",
},
"name": {
"path": f"{THERMAL_PATH}/name",
"default_value": "",
},
"unit_count": {
"path": f"{THERMAL_PATH}/unitcount",
"default_value": 0,
},
"enabled": {
"path": f"{THERMAL_PATH}/enabled",
"default_value": True,
},
"nominal_capacity": {
"path": f"{THERMAL_PATH}/nominalcapacity",
"default_value": 0,
},
"gen_ts": {
"path": f"{THERMAL_PATH}/gen-ts",
"default_value": TimeSeriesGenerationOption.USE_GLOBAL_PARAMETER.value,
},
"min_stable_power": {
"path": f"{THERMAL_PATH}/min-stable-power",
"default_value": 0,
},
"min_up_time": {
"path": f"{THERMAL_PATH}/min-up-time",
"default_value": 1,
},
"min_down_time": {
"path": f"{THERMAL_PATH}/min-down-time",
"default_value": 1,
},
"must_run": {
"path": f"{THERMAL_PATH}/must-run",
"default_value": False,
},
"spinning": {
"path": f"{THERMAL_PATH}/spinning",
"default_value": 0,
},
"co2": {
"path": f"{THERMAL_PATH}/co2",
"default_value": 0,
},
"volatility_forced": {
"path": f"{THERMAL_PATH}/volatility.forced",
"default_value": 0,
},
"volatility_planned": {
"path": f"{THERMAL_PATH}/volatility.planned",
"default_value": 0,
},
"law_forced": {
"path": f"{THERMAL_PATH}/law.forced",
"default_value": LawOption.UNIFORM.value,
},
"law_planned": {
"path": f"{THERMAL_PATH}/law.planned",
"default_value": LawOption.UNIFORM.value,
},
"marginal_cost": {
"path": f"{THERMAL_PATH}/marginal-cost",
"default_value": 0,
},
"spread_cost": {
"path": f"{THERMAL_PATH}/spread-cost",
"default_value": 0,
},
"fixed_cost": {
"path": f"{THERMAL_PATH}/fixed-cost",
"default_value": 0,
},
"startup_cost": {
"path": f"{THERMAL_PATH}/startup-cost",
"default_value": 0,
},
"market_bid_cost": {
"path": f"{THERMAL_PATH}/market-bid-cost",
"default_value": 0,
},
}


def format_path(path: str, area_id: str, cluster_id: str) -> str:
return path.format(area=area_id, cluster=cluster_id)


class ThermalManager:
def __init__(self, storage_service: StudyStorageService):
self.storage_service = storage_service

def get_field_values(
self, study: Study, area_id: str, cluster_id: str
) -> ThermalFormFields:
file_study = self.storage_service.get_storage(study).get_raw(study)
thermal_config = file_study.tree.get(
format_path(THERMAL_PATH, area_id, cluster_id).split("/")
)

def get_value(field_info: FieldInfo) -> Any:
target_name = PurePosixPath(field_info["path"]).name
return thermal_config.get(target_name, field_info["default_value"])

return ThermalFormFields.construct(
**{name: get_value(info) for name, info in FIELDS_INFO.items()}
)

def set_field_values(
self,
study: Study,
area_id: str,
cluster_id: str,
field_values: ThermalFormFields,
) -> None:
commands: List[UpdateConfig] = []

for field_name, value in field_values.__iter__():
if value is not None:
info = FIELDS_INFO[field_name]

commands.append(
UpdateConfig(
target=format_path(info["path"], area_id, cluster_id),
data=value,
command_context=self.storage_service.variant_study_service.command_factory.command_context,
)
)

if commands:
file_study = self.storage_service.get_storage(study).get_raw(study)
execute_or_add_commands(
study, file_study, commands, self.storage_service
)
2 changes: 1 addition & 1 deletion antarest/study/business/xpansion_management.py
Original file line number Diff line number Diff line change
@@ -64,7 +64,7 @@ class MaxIteration(str, Enum):

class XpansionSensitivitySettingsDTO(BaseModel):
epsilon: float
projection: List[str]
projection: Optional[List[str]]
capex: bool = False


41 changes: 30 additions & 11 deletions antarest/study/model.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,26 @@
import enum
import uuid
from dataclasses import dataclass
from datetime import timedelta, datetime
from datetime import datetime, timedelta
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple

from pydantic import BaseModel
from sqlalchemy import Column, String, Integer, DateTime, Table, ForeignKey, Enum, Boolean # type: ignore
from sqlalchemy.orm import relationship # type: ignore

from antarest.core.exceptions import ShouldNotHappenException
from antarest.core.model import PublicMode
from antarest.core.persistence import Base
from antarest.login.model import Group, Identity, GroupDTO
from antarest.login.model import Group, GroupDTO, Identity
from pydantic import BaseModel
from sqlalchemy import ( # type: ignore
Boolean,
Column,
DateTime,
Enum,
ForeignKey,
Integer,
String,
Table,
)
from sqlalchemy.orm import relationship # type: ignore

DEFAULT_WORKSPACE_NAME = "default"

@@ -35,9 +43,10 @@
"820": "empty_study_820.zip",
"830": "empty_study_830.zip",
"840": "empty_study_840.zip",
"850": "empty_study_850.zip",
}

NEW_DEFAULT_STUDY_VERSION: str = "840"
NEW_DEFAULT_STUDY_VERSION: str = "850"


class StudyContentStatus(enum.Enum):
@@ -118,7 +127,17 @@ class Study(Base): # type: ignore
__mapper_args__ = {"polymorphic_identity": "study", "polymorphic_on": type}

def __str__(self) -> str:
return f"[Study] id={self.id}, type={self.type}, name={self.name}, version={self.version}, updated_at={self.updated_at}, last_access={self.last_access}, owner={self.owner}, groups={[str(u) + ',' for u in self.groups]}"
return (
f"[Study]"
f" id={self.id},"
f" type={self.type},"
f" name={self.name},"
f" version={self.version},"
f" updated_at={self.updated_at},"
f" last_access={self.last_access},"
f" owner={self.owner},"
f" groups={[str(u) + ',' for u in self.groups]}"
)

def __eq__(self, other: Any) -> bool:
if not isinstance(other, Study):
@@ -206,7 +225,7 @@ class PatchCluster(BaseModel):
class Config:
@classmethod
def alias_generator(cls, string: str) -> str:
return "-".join(word for word in string.split("_"))
return "-".join(string.split("_"))


class PatchOutputs(BaseModel):
@@ -349,13 +368,13 @@ class MatrixIndex(BaseModel):
class TimeSerie(BaseModel):
name: str
unit: str
data: List[Optional[float]] = list()
data: List[Optional[float]] = []


class TimeSeriesData(BaseModel):
type: StudyDownloadType
name: str
data: Dict[str, List[TimeSerie]] = dict()
data: Dict[str, List[TimeSerie]] = {}


class MatrixAggregationResultDTO(BaseModel):
6 changes: 6 additions & 0 deletions antarest/study/repository.py
Original file line number Diff line number Diff line change
@@ -45,9 +45,15 @@ def refresh(self, metadata: Study) -> None:
db.session.refresh(metadata)

def get(self, id: str) -> Optional[Study]:
"""Get the study by ID or return `None` if not found in database."""
metadata: Study = db.session.query(Study).get(id)
return metadata

def one(self, id: str) -> Study:
"""Get the study by ID or raise `sqlalchemy.exc.NoResultFound` if not found in database."""
study: Study = db.session.query(Study).filter_by(id=id).one()
return study

def get_list(self, study_id: List[str]) -> List[Study]:
studies: List[Study] = (
db.session.query(Study).where(Study.id.in_(study_id)).all()
315 changes: 184 additions & 131 deletions antarest/study/service.py

Large diffs are not rendered by default.

12 changes: 3 additions & 9 deletions antarest/study/storage/abstract_storage_service.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import logging
import os
import shutil
import tempfile
from abc import ABC
@@ -10,32 +9,28 @@
from antarest.core.config import Config
from antarest.core.exceptions import BadOutputError, StudyOutputNotFoundError
from antarest.core.interfaces.cache import CacheConstants, ICache
from antarest.core.model import JSON, PublicMode
from antarest.core.model import JSON
from antarest.core.utils.utils import (
extract_zip,
StopWatch,
assert_this,
zip_dir,
unzip,
)
from antarest.login.model import GroupDTO
from antarest.study.common.studystorage import IStudyStorageService, T
from antarest.study.common.utils import get_study_information
from antarest.study.model import (
StudyMetadataDTO,
StudySimResultDTO,
StudySimSettingsDTO,
PatchOutputs,
OwnerInfo,
DEFAULT_WORKSPACE_NAME,
PatchStudy,
StudyMetadataPatchDTO,
Patch,
StudyAdditionalData,
)
from antarest.study.storage.patch_service import PatchService
from antarest.study.storage.rawstudy.model.filesystem.config.files import (
ConfigPathBuilder,
get_playlist,
)
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
Simulation,
@@ -171,8 +166,7 @@ def get_study_sim_result(
playlist=[
year
for year in (
ConfigPathBuilder.get_playlist(file_metadata)
or {}
get_playlist(file_metadata) or {}
).keys()
],
)
71 changes: 71 additions & 0 deletions antarest/study/storage/antares_configparser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import configparser
from typing import Any, Dict, Union, Optional


def _convert_value(value: Any) -> str:
"""
Convert a value to a string using the specific format of Antares INI files.
- strings are preserved,
- ``None`` is converted to an empty string,
- booleans are converted to "true"/"false" in lower case.
- numbers are converted to strings using the :class:`str` function.
"""
if value is None:
return ""
elif value is True or value is False:
return str(value).lower()
else:
return str(value)


class AntaresSectionProxy(configparser.SectionProxy):
"""
This class extends the :class:`configparser.SectionProxy` class in order
to store strings or other types of values by converting them according
to the rules of Antares INI files.
"""

def __repr__(self) -> str:
"""String representation of the section proxy used for debug."""
cls = self.__class__.__name__
return f"<{cls}: {self._name}>"

def __setitem__(self, key: str, value: Any) -> None:
"""
Sets the value of the specified key in the section.
"""
super().__setitem__(key, _convert_value(value))


class AntaresConfigParser(configparser.RawConfigParser):
"""
This class extends the :class:`configparser.RawConfigParser` class in order
to store strings or other types of values by converting them according
to the rules of Antares INI files.
"""

_proxies: Dict[str, AntaresSectionProxy]
_sections: Dict[str, Optional[Union[bool, int, float, str]]]

def __init__(self, *args, **kwargs) -> None: # type: ignore
super().__init__(*args, **kwargs)
self._proxies[self.default_section] = AntaresSectionProxy(
self, self.default_section
)

def add_section(self, section: str) -> None:
super().add_section(section)
self._proxies[section] = AntaresSectionProxy(self, section)

def set(self, section: str, option: str, value: Any = None) -> None:
super().set(section, option, _convert_value(value))

def _read(self, fp, fpname): # type: ignore
# noinspection PyProtectedMember
super()._read(fp, fpname) # type: ignore
# cast section proxies to AntaresSectionProxy
proxies = self._proxies
for name, proxy in self._sections.items():
if not isinstance(proxy, AntaresSectionProxy):
proxies[name] = AntaresSectionProxy(self, name)
9 changes: 4 additions & 5 deletions antarest/study/storage/patch_service.py
Original file line number Diff line number Diff line change
@@ -27,13 +27,12 @@ def get(
self, study: Union[RawStudy, VariantStudy], get_from_file: bool = False
) -> Patch:
if not get_from_file:
try:
return Patch.parse_raw(study.additional_data.patch)
except Exception as e:
logger.warning("Failed to parse patch data", exc_info=e)
# the `study.additional_data.patch` field is optional
if patch_data := study.additional_data.patch:
return Patch.parse_raw(patch_data)

patch = Patch()
patch_path = (Path(study.path)) / "patch.json"
patch_path = Path(study.path) / "patch.json"
if patch_path.exists():
patch = Patch.parse_file(patch_path)

81 changes: 37 additions & 44 deletions antarest/study/storage/rawstudy/io/reader/ini_reader.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import configparser
import contextlib
import re
from abc import ABC, abstractmethod
from pathlib import Path
from typing import List, Optional, Union

from antarest.core.model import ELEMENT, JSON, SUB_JSON
from antarest.core.model import JSON, SUB_JSON


class IReader(ABC):
@@ -32,8 +33,7 @@ class IniReader(IReader):

@staticmethod
def _parse_bool(value: str) -> Optional[bool]:
value = value.lower()
return bool(value == "true") if value in ["true", "false"] else None
return {"true": True, "false": False}.get(value.lower())

@staticmethod
def _parse_int(value: str) -> Optional[int]:
@@ -50,15 +50,14 @@ def _parse_float(value: str) -> Optional[float]:
return None

@staticmethod
def parse_value(value: str) -> SUB_JSON:
parsed: Union[str, int, float, bool, None] = IniReader._parse_bool(
value
)
parsed = parsed if parsed is not None else IniReader._parse_int(value)
parsed = (
parsed if parsed is not None else IniReader._parse_float(value)
)
return parsed if parsed is not None else value
def parse_value(value: str) -> Union[bool, int, float, str]:
def strict_bool(v: str) -> bool:
return {"true": True, "false": False}[v.lower()]

for parser in [strict_bool, int, float]:
with contextlib.suppress(KeyError, ValueError):
return parser(value) # type: ignore
return value

@staticmethod
def _parse_json(json: configparser.SectionProxy) -> JSON:
@@ -89,6 +88,7 @@ def _parse_inf(value: str) -> Optional[str]:
except ValueError:
return None

# noinspection PyProtectedMember
@staticmethod
def parse_value(value: str) -> SUB_JSON:
parsed: Union[
@@ -109,9 +109,9 @@ def _parse_json(json: JSON) -> JSON:
}

def read(self, path: Path) -> JSON:
with open(path, "r") as f:
json = {}
for line in f.readlines():
json = {}
with open(path, "r") as fd:
for line in fd:
line = line.strip()
if line and not line.startswith("#"):
key, value = line.split("=")
@@ -121,6 +121,7 @@ def read(self, path: Path) -> JSON:


class IniConfigParser(configparser.RawConfigParser):
# noinspection SpellCheckingInspection
def optionxform(self, optionstr: str) -> str:
return optionstr

@@ -129,7 +130,7 @@ class MultipleSameKeysIniReader(IReader):
"""
Custom .ini reader for inputs/sets.ini file.
This file has format :
``` python
```python
[chap]
+ = areaA
+ = areaB
@@ -142,35 +143,27 @@ def __init__(self, special_keys: Optional[List[str]] = None) -> None:
self.special_keys = special_keys or []
super().__init__()

@staticmethod
def fetch_cleaned_lines(path: Path) -> List[str]:
return [l for l in path.read_text().split("\n") if l.strip() != ""]

def read(self, path: Path) -> JSON:
data: JSON = dict()
curr_part = ""
lines = MultipleSameKeysIniReader.fetch_cleaned_lines(path)

for l in lines:
line = l.strip()
regex = re.search("^\[(.*)\]$", line)
if regex:
curr_part = regex.group(1)
data[curr_part] = dict()
else:
elements = re.split("\s+=\s*", line)
key = elements[0]
value = None
if len(elements) == 2:
value = IniReader.parse_value(elements[1].strip())
if key not in data[curr_part]:
if key in self.special_keys:
data[curr_part][key] = [value]
data: JSON = {}
section = ""
with path.open(encoding="utf-8") as lines:
for line in lines:
line = line.strip()
if match := re.fullmatch(r"\[(.*)]", line):
section = match[1]
data[section] = {}
elif "=" in line:
key, arg = map(str.strip, line.split("=", 1))
value = IniReader.parse_value(arg)
group = data[section]
if key in group:
if isinstance(group[key], list):
group[key].append(value)
else:
group[key] = [group[key], value]
elif key in self.special_keys:
group[key] = [value]
else:
data[curr_part][key] = value
else:
if not isinstance(data[curr_part][key], list):
data[curr_part][key] = [data[curr_part][key]]
data[curr_part][key].append(value)
group[key] = value

return data
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from pathlib import Path
from typing import cast


class BaseConfigError(Exception):
"""Base class of the configuration errors."""


class SimulationParsingError(BaseConfigError):
def __init__(self, output_path: Path, reason: str):
super().__init__(output_path, reason)

@property
def output_path(self) -> Path:
return cast(Path, self.args[0])

@property
def reason(self) -> str:
return cast(str, self.args[1])

def __str__(self) -> str:
output_path = self.output_path
reason = self.reason
return f"Fail to parse the simulation file '{output_path}': {reason}"


class XpansionParsingError(BaseConfigError):
def __init__(self, xpansion_json: Path, reason: str):
super().__init__(xpansion_json, reason)

@property
def xpansion_json(self) -> Path:
return cast(Path, self.args[0])

@property
def reason(self) -> str:
return cast(str, self.args[1])

def __str__(self) -> str:
xpansion_json = self.xpansion_json
reason = self.reason
return f"Fail to parse the Xpansion file '{xpansion_json}': {reason}"
711 changes: 354 additions & 357 deletions antarest/study/storage/rawstudy/model/filesystem/config/files.py

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -91,7 +91,7 @@ class Simulation(BaseModel):
error: bool
playlist: Optional[List[int]]
archived: bool = False
xpansion: bool = False
xpansion: str

def get_file(self) -> str:
modes = {"economy": "eco", "adequacy": "adq", "draft": "dft"}
6 changes: 2 additions & 4 deletions antarest/study/storage/rawstudy/model/filesystem/factory.py
Original file line number Diff line number Diff line change
@@ -9,9 +9,7 @@
from antarest.matrixstore.uri_resolver_service import (
UriResolverService,
)
from antarest.study.storage.rawstudy.model.filesystem.config.files import (
ConfigPathBuilder,
)
from antarest.study.storage.rawstudy.model.filesystem.config.files import build
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
FileStudyTreeConfig,
FileStudyTreeConfigDTO,
@@ -63,7 +61,7 @@ def create_from_fs(
).to_build_config()
return FileStudy(config, FileStudyTree(self.context, config))
start_time = time.time()
config = ConfigPathBuilder.build(path, study_id, output_path)
config = build(path, study_id, output_path)
duration = "{:.3f}".format(time.time() - start_time)
logger.info(f"Study {study_id} config built in {duration}s")
result = FileStudy(config, FileStudyTree(self.context, config))
Original file line number Diff line number Diff line change
@@ -66,7 +66,10 @@ def async_denormalize(self) -> Thread:
logger.info(
f"Denormalizing (async) study data for study {self.config.study_id}"
)
thread = Thread(target=self._threaded_denormalize)
thread = Thread(
target=self._threaded_denormalize,
name=f"{self.__class__.__name__}-Denormalizer",
)
thread.start()
return thread

Original file line number Diff line number Diff line change
@@ -106,36 +106,48 @@ class GeneralData(IniFileNode):

def __init__(self, context: ContextServer, config: FileStudyTreeConfig):
types = deepcopy(GeneralData.TYPES)
general = types["general"]
optimization = types["optimization"]
other_preferences = types["other preferences"]
if config.version >= 650:
types["other preferences"]["initial-reservoir-levels"] = str
other_preferences["initial-reservoir-levels"] = str
if config.version >= 700:
types["optimization"]["link-type"] = str
optimization["link-type"] = str
if config.version >= 710:
types["general"]["thematic-trimming"] = bool
types["general"]["geographic-trimming"] = bool
del types["general"]["filtering"]
general["thematic-trimming"] = bool
general["geographic-trimming"] = bool
del general["filtering"]
if config.version >= 720:
types["other preferences"]["hydro-pricing-mode"] = str
other_preferences["hydro-pricing-mode"] = str
if config.version >= 800:
types["other preferences"]["hydro-heuristic-policy"] = str
types["optimization"]["include-exportstructure"] = bool
types["optimization"]["include-unfeasible-problem-behavior"] = str
types["general"]["custom-scenario"] = bool
del types["general"]["custom-ts-numbers"]
other_preferences["hydro-heuristic-policy"] = str
optimization["include-exportstructure"] = bool
optimization["include-unfeasible-problem-behavior"] = str
general["custom-scenario"] = bool
del general["custom-ts-numbers"]
if config.version >= 810:
types["other preferences"]["renewable-generation-modelling"] = str
other_preferences["renewable-generation-modelling"] = str
if config.version >= 830:
types["adequacy patch"] = {
"include-adq-patch": bool,
"set-to-null-ntc-from-physical-out-to-physical-in-for-first-step": bool,
"set-to-null-ntc-between-physical-out-for-first-step": bool,
}
types["optimization"]["include-split-exported-mps"] = bool
types["optimization"][
"include-exportmps"
] = str # none, optim-1, optim-2, both-optims
optimization["include-split-exported-mps"] = bool
# include-exportmps: none, optim-1, optim-2, both-optims
optimization["include-exportmps"] = str
if config.version >= 840:
del types["optimization"]["include-split-exported-mps"]
del optimization["include-split-exported-mps"]
if config.version >= 850:
# fmt: off
adequacy = types["adequacy patch"]
adequacy["price-taking-order"] = str
adequacy["include-hurdle-cost-csr"] = bool
adequacy["check-csr-cost-function"] = bool
adequacy["threshold-initiate-curtailment-sharing-rule"] = float
adequacy["threshold-display-local-matching-rule-violations"] = float
adequacy["threshold-csr-variable-bounds-relaxation"] = int
# fmt: on

IniFileNode.__init__(
self,
4 changes: 2 additions & 2 deletions antarest/study/storage/rawstudy/model/helpers.py
Original file line number Diff line number Diff line change
@@ -3,7 +3,7 @@
from antarest.core.model import JSON
from antarest.core.utils.utils import assert_this
from antarest.study.storage.rawstudy.model.filesystem.config.files import (
ConfigPathBuilder,
get_playlist,
)
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy

@@ -32,7 +32,7 @@ def get_playlist(
study: FileStudy, output_id: Optional[str] = None
) -> Optional[Dict[int, float]]:
config = FileStudyHelpers.get_config(study, output_id)
return ConfigPathBuilder.get_playlist(config)
return get_playlist(config)

@staticmethod
def set_playlist(
1 change: 1 addition & 0 deletions antarest/study/storage/rawstudy/raw_study_service.py
Original file line number Diff line number Diff line change
@@ -73,6 +73,7 @@ def __init__(
self.path_resources: Path = path_resources
self.cleanup_thread = Thread(
target=RawStudyService.cleanup_lazynode_zipfilelist_cache,
name=f"{self.__class__.__name__}-Cleaner",
daemon=True,
)
self.cleanup_thread.start()
3 changes: 3 additions & 0 deletions antarest/study/storage/rawstudy/watcher.py
Original file line number Diff line number Diff line change
@@ -12,6 +12,7 @@
from filelock import FileLock

from antarest.core.config import Config
from antarest.core.exceptions import CannotScanInternalWorkspace
from antarest.core.interfaces.service import IService
from antarest.core.requests import RequestParameters
from antarest.core.tasks.model import TaskResult, TaskType
@@ -204,6 +205,8 @@ def scan(
studies: List[StudyFolder] = list()
directory_path: Optional[Path] = None
if workspace_directory_path is not None and workspace_name:
if workspace_name == DEFAULT_WORKSPACE_NAME:
raise CannotScanInternalWorkspace
try:
workspace = self.config.storage.workspaces[workspace_name]
except KeyError:
195 changes: 195 additions & 0 deletions antarest/study/storage/study_upgrader/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,195 @@
from http import HTTPStatus
from http.client import HTTPException
from pathlib import Path
from typing import NamedTuple, Callable
import logging
import re
import shutil
import tempfile
import time

from antarest.core.exceptions import StudyValidationError

from .upgrader_710 import upgrade_710
from .upgrader_720 import upgrade_720
from .upgrader_800 import upgrade_800
from .upgrader_810 import upgrade_810
from .upgrader_820 import upgrade_820
from .upgrader_830 import upgrade_830
from .upgrader_840 import upgrade_840
from .upgrader_850 import upgrade_850


logger = logging.getLogger(__name__)


class UpgradeMethod(NamedTuple):
"""Raw study upgrade method (old version, new version, upgrade function)."""

old: str
new: str
method: Callable[[Path], None]


UPGRADE_METHODS = [
UpgradeMethod("700", "710", upgrade_710),
UpgradeMethod("710", "720", upgrade_720),
UpgradeMethod("720", "800", upgrade_800),
UpgradeMethod("800", "810", upgrade_810),
UpgradeMethod("810", "820", upgrade_820),
UpgradeMethod("820", "830", upgrade_830),
UpgradeMethod("830", "840", upgrade_840),
UpgradeMethod("840", "850", upgrade_850),
]


class InvalidUpgrade(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)


def find_next_version(from_version: str) -> str:
"""
Find the next study version from the given version.
Args:
from_version: The current version as a string.
Returns:
The next version as a string.
If no next version was found, returns an empty string.
"""
return next(
(meth.new for meth in UPGRADE_METHODS if from_version == meth.old),
"",
)


def upgrade_study(study_path: Path, target_version: str) -> None:
tmp_dir = Path(
tempfile.mkdtemp(
suffix=".upgrade.tmp", prefix="~", dir=study_path.parent
)
)
shutil.copytree(study_path, tmp_dir, dirs_exist_ok=True)
try:
src_version = get_current_version(tmp_dir)
can_upgrade_version(src_version, target_version)
_do_upgrade(tmp_dir, src_version, target_version)
except (StudyValidationError, InvalidUpgrade) as e:
shutil.rmtree(tmp_dir)
logger.warning(str(e))
raise
except Exception as e:
shutil.rmtree(tmp_dir)
logger.error(f"Unhandled exception : {e}", exc_info=True)
raise
else:
backup_dir = Path(
tempfile.mkdtemp(
suffix=".backup.tmp", prefix="~", dir=study_path.parent
)
)
backup_dir.rmdir()
study_path.rename(backup_dir)
tmp_dir.rename(study_path)
shutil.rmtree(backup_dir, ignore_errors=True)


def get_current_version(study_path: Path) -> str:
"""
Get the current version of a study.
Args:
study_path: Path to the study.
Returns:
The current version of the study.
Raises:
StudyValidationError: If the version number is not found in the
`study.antares` file or does not match the expected format.
"""

antares_path = study_path / "study.antares"
pattern = r"version\s*=\s*([\w.-]+)\s*"
with antares_path.open(encoding="utf-8") as lines:
for line in lines:
if match := re.fullmatch(pattern, line):
return match[1].rstrip()
raise StudyValidationError(
f"File parsing error: the version number is not found in '{antares_path}'"
f" or does not match the expected '{pattern}' format."
)


def can_upgrade_version(from_version: str, to_version: str) -> None:
"""
Checks if upgrading from one version to another is possible.
Args:
from_version: The current version of the study.
to_version: The target version of the study.
Raises:
InvalidUpgrade: If the upgrade is not possible.
"""
if from_version == to_version:
raise InvalidUpgrade(
f"Your study is already in version '{to_version}'"
)

sources = [u.old for u in UPGRADE_METHODS]
if from_version not in sources:
raise InvalidUpgrade(
f"Version '{from_version}' unknown: possible versions are {', '.join(sources)}"
)

targets = [u.new for u in UPGRADE_METHODS]
if to_version not in targets:
raise InvalidUpgrade(
f"Version '{to_version}' unknown: possible versions are {', '.join(targets)}"
)

curr_version = from_version
for src, dst in zip(sources, targets):
if curr_version == src:
curr_version = dst
if curr_version == to_version:
return

# This code must be unreachable!
raise InvalidUpgrade(
f"Impossible to upgrade from version '{from_version}'"
f" to version '{to_version}':"
f" missing value in `UPGRADE_METHODS`."
)


def _update_study_antares_file(target_version: str, study_path: Path) -> None:
file = study_path / "study.antares"
content = file.read_text(encoding="utf-8")
content = re.sub(
r"^version\s*=.*$",
f"version = {target_version}",
content,
flags=re.MULTILINE,
)
content = re.sub(
r"^lastsave\s*=.*$",
f"lastsave = {int(time.time())}",
content,
flags=re.MULTILINE,
)
file.write_text(content, encoding="utf-8")


def _do_upgrade(
study_path: Path, src_version: str, target_version: str
) -> None:
_update_study_antares_file(target_version, study_path)
curr_version = src_version
for old, new, method in UPGRADE_METHODS:
if curr_version == old and curr_version != target_version:
method(study_path)
curr_version = new
30 changes: 30 additions & 0 deletions antarest/study/storage/study_upgrader/upgrader_710.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from pathlib import Path
from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader
from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter
from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import (
DUPLICATE_KEYS,
)

GENERAL_DATA_PATH = "settings/generaldata.ini"


def upgrade_710(study_path: Path) -> None:
"""
Upgrade the study configuration to version 710.
NOTE:
The file `study.antares` is not upgraded here.
Args:
study_path: path to the study directory.
"""

reader = MultipleSameKeysIniReader(DUPLICATE_KEYS)
data = reader.read(study_path / GENERAL_DATA_PATH)
data["general"]["geographic-trimming"] = data["general"]["filtering"]
data["general"]["thematic-trimming"] = False
data["optimization"]["link-type"] = "local"
data["other preferences"]["hydro-pricing-mode"] = "fast"
del data["general"]["filtering"]
writer = IniWriter(special_keys=DUPLICATE_KEYS)
writer.write(data, study_path / GENERAL_DATA_PATH)
6 changes: 6 additions & 0 deletions antarest/study/storage/study_upgrader/upgrader_720.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from pathlib import Path


def upgrade_720(study_path: Path) -> None:
# There is no input modification between the 7.1.0 and the 7.2.0 version
pass
34 changes: 34 additions & 0 deletions antarest/study/storage/study_upgrader/upgrader_800.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from pathlib import Path
from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader
from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter
from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import (
DUPLICATE_KEYS,
)

GENERAL_DATA_PATH = "settings/generaldata.ini"


def upgrade_800(study_path: Path) -> None:
"""
Upgrade the study configuration to version 800.
NOTE:
The file `study.antares` is not upgraded here.
Args:
study_path: path to the study directory.
"""

reader = MultipleSameKeysIniReader(DUPLICATE_KEYS)
data = reader.read(study_path / GENERAL_DATA_PATH)
data["other preferences"][
"hydro-heuristic-policy"
] = "accommodate rule curves"
data["optimization"]["include-exportstructure"] = False
data["optimization"][
"include-unfeasible-problem-behavior"
] = "error-verbose"
data["general"]["custom-scenario"] = data["general"]["custom-ts-numbers"]
del data["general"]["custom-ts-numbers"]
writer = IniWriter(special_keys=DUPLICATE_KEYS)
writer.write(data, study_path / GENERAL_DATA_PATH)
28 changes: 28 additions & 0 deletions antarest/study/storage/study_upgrader/upgrader_810.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from pathlib import Path
from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader
from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter
from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import (
DUPLICATE_KEYS,
)

GENERAL_DATA_PATH = "settings/generaldata.ini"


def upgrade_810(study_path: Path) -> None:
"""
Upgrade the study configuration to version 810.
NOTE:
The file `study.antares` is not upgraded here.
Args:
study_path: path to the study directory.
"""

reader = MultipleSameKeysIniReader(DUPLICATE_KEYS)
data = reader.read(study_path / GENERAL_DATA_PATH)
data["other preferences"]["renewable-generation-modelling"] = "aggregated"
writer = IniWriter(special_keys=DUPLICATE_KEYS)
writer.write(data, study_path / GENERAL_DATA_PATH)
study_path.joinpath("input", "renewables", "clusters").mkdir(parents=True)
study_path.joinpath("input", "renewables", "series").mkdir(parents=True)
50 changes: 50 additions & 0 deletions antarest/study/storage/study_upgrader/upgrader_820.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import glob
from pathlib import Path

import numpy
import pandas # type: ignore


def upgrade_820(study_path: Path) -> None:
"""
Upgrade the study configuration to version 820.
NOTE:
The file `study.antares` is not upgraded here.
Args:
study_path: path to the study directory.
"""

links = glob.glob(str(study_path / "input" / "links" / "*"))
if len(links) > 0:
for folder in links:
folder_path = Path(folder)
all_txt = glob.glob(str(folder_path / "*.txt"))
if len(all_txt) > 0:
(folder_path / "capacities").mkdir()
for txt in all_txt:
df = pandas.read_csv(txt, sep="\t", header=None)
df_parameters = df.iloc[:, 2:8]
df_direct = df.iloc[:, 0]
df_indirect = df.iloc[:, 1]
name = Path(txt).stem
numpy.savetxt(
folder_path / f"{name}_parameters.txt",
df_parameters.values,
delimiter="\t",
fmt="%.6f",
)
numpy.savetxt(
folder_path / "capacities" / f"{name}_direct.txt",
df_direct.values,
delimiter="\t",
fmt="%.6f",
)
numpy.savetxt(
folder_path / "capacities" / f"{name}_indirect.txt",
df_indirect.values,
delimiter="\t",
fmt="%.6f",
)
(folder_path / f"{name}.txt").unlink()
42 changes: 42 additions & 0 deletions antarest/study/storage/study_upgrader/upgrader_830.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import glob
from pathlib import Path

from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader
from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter
from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import (
DUPLICATE_KEYS,
)

GENERAL_DATA_PATH = "settings/generaldata.ini"


def upgrade_830(study_path: Path) -> None:
"""
Upgrade the study configuration to version 830.
NOTE:
The file `study.antares` is not upgraded here.
Args:
study_path: path to the study directory.
"""

reader = MultipleSameKeysIniReader(DUPLICATE_KEYS)
data = reader.read(study_path / GENERAL_DATA_PATH)
data["adequacy patch"] = {
"include-adq-patch": False,
"set-to-null-ntc-between-physical-out-for-first-step": True,
"set-to-null-ntc-from-physical-out-to-physical-in-for-first-step": True,
}
data["optimization"]["include-split-exported-mps"] = False
writer = IniWriter(special_keys=DUPLICATE_KEYS)
writer.write(data, study_path / GENERAL_DATA_PATH)
areas = glob.glob(str(study_path / "input" / "areas" / "*"))
for folder in areas:
folder_path = Path(folder)
if folder_path.is_dir():
writer = IniWriter(special_keys=DUPLICATE_KEYS)
writer.write(
{"adequacy-patch": {"adequacy-patch-mode": "outside"}},
folder_path / "adequacy_patch.ini",
)
36 changes: 36 additions & 0 deletions antarest/study/storage/study_upgrader/upgrader_840.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
from pathlib import Path
from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader
from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter
from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import (
DUPLICATE_KEYS,
)

GENERAL_DATA_PATH = "settings/generaldata.ini"
MAPPING_TRANSMISSION_CAPACITIES = {
True: "local-values",
False: "null-for-all-links",
"infinite": "infinite-for-all-links",
}


def upgrade_840(study_path: Path) -> None:
"""
Upgrade the study configuration to version 840.
NOTE:
The file `study.antares` is not upgraded here.
Args:
study_path: path to the study directory.
"""

reader = MultipleSameKeysIniReader(DUPLICATE_KEYS)
data = reader.read(study_path / GENERAL_DATA_PATH)
data["optimization"][
"transmission-capacities"
] = MAPPING_TRANSMISSION_CAPACITIES[
data["optimization"]["transmission-capacities"]
]
del data["optimization"]["include-split-exported-mps"]
writer = IniWriter(special_keys=DUPLICATE_KEYS)
writer.write(data, study_path / GENERAL_DATA_PATH)
34 changes: 34 additions & 0 deletions antarest/study/storage/study_upgrader/upgrader_850.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from pathlib import Path
from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader
from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter
from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import (
DUPLICATE_KEYS,
)

# noinspection SpellCheckingInspection
GENERAL_DATA_PATH = "settings/generaldata.ini"


def upgrade_850(study_path: Path) -> None:
"""
Upgrade the study configuration to version 850.
NOTE:
The file `study.antares` is not upgraded here.
Args:
study_path: path to the study directory.
"""

reader = MultipleSameKeysIniReader(DUPLICATE_KEYS)
data = reader.read(study_path / GENERAL_DATA_PATH)
# fmt: off
data["adequacy patch"]["price-taking-order"] = "DENS"
data["adequacy patch"]["include-hurdle-cost-csr"] = False
data["adequacy patch"]["check-csr-cost-function"] = False
data["adequacy patch"]["threshold-initiate-curtailment-sharing-rule"] = 0.0
data["adequacy patch"]["threshold-display-local-matching-rule-violations"] = 0.0
data["adequacy patch"]["threshold-csr-variable-bounds-relaxation"] = 3
# fmt: on
writer = IniWriter(special_keys=DUPLICATE_KEYS)
writer.write(data, study_path / GENERAL_DATA_PATH)
358 changes: 0 additions & 358 deletions antarest/study/storage/study_version_upgrader.py
Original file line number Diff line number Diff line change
@@ -1,358 +0,0 @@
import glob
import json
import os
import typing
from datetime import datetime
from http import HTTPStatus
from http.client import HTTPException
from pathlib import Path
from typing import Optional

import numpy
import pandas # type: ignore

from antarest.core.exceptions import StudyValidationError
from antarest.study.storage.rawstudy.io.reader import MultipleSameKeysIniReader
from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter
from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import (
DUPLICATE_KEYS,
)


def modify_file(
study_path: str,
file_path: str,
key: str,
parameter_to_add: Optional[str],
value: typing.Any,
parameter_to_delete: Optional[str],
) -> None:
reader = MultipleSameKeysIniReader(DUPLICATE_KEYS)
file = glob.glob(os.path.join(study_path, file_path))[0]
path = Path(file)
data = reader.read(path)
if key in data:
if parameter_to_add is not None:
data[key][parameter_to_add] = value
if parameter_to_delete is not None:
del data[key][parameter_to_delete]
else:
if parameter_to_add is not None:
data[key] = {parameter_to_add: value}
writer = IniWriter(special_keys=DUPLICATE_KEYS)
writer.write(data, path)


def find_value_in_file(
study_path: str, file_path: str, key: str, parameter_to_check: str
) -> typing.Any:
reader = MultipleSameKeysIniReader(DUPLICATE_KEYS)
file = glob.glob(os.path.join(study_path, file_path))[0]
path = Path(file)
data = reader.read(path)
return data[key][parameter_to_check]


sep = os.sep
other_preferencies = "other preferences"
general_data_path = f"settings{sep}generaldata.ini"
adequacy_patch = "adequacy patch"
mapping_transmission_capacities = {
True: "local-values",
False: "null-for-all-links",
"infinite": "infinite-for-all-links",
}


def upgrade_700(study_path: str) -> None:
# It's the basecase study so we pass
pass


def upgrade_710(study_path: str) -> None:
geographical_trimming = find_value_in_file(
study_path, general_data_path, "general", "filtering"
)
modify_file(
study_path,
general_data_path,
"optimization",
"link-type",
"local",
None,
)
modify_file(
study_path,
general_data_path,
"general",
"geographic-trimming",
geographical_trimming,
"filtering",
)
modify_file(
study_path,
general_data_path,
"general",
"thematic-trimming",
False,
None,
)
modify_file(
study_path,
general_data_path,
other_preferencies,
"hydro-pricing-mode",
"fast",
None,
)


def upgrade_720(study_path: str) -> None:
# There is no input modification between the 7.1.0 and the 7.2.0 version
pass


def upgrade_800(study_path: str) -> None:
custom_ts_numbers_value = find_value_in_file(
study_path, general_data_path, "general", "custom-ts-numbers"
)
modify_file(
study_path,
general_data_path,
other_preferencies,
"hydro-heuristic-policy",
"accommodate rule curves",
None,
)
modify_file(
study_path,
general_data_path,
"optimization",
"include-exportstructure",
False,
None,
)
modify_file(
study_path,
general_data_path,
"optimization",
"include-unfeasible-problem-behavior",
"error-verbose",
None,
)
modify_file(
study_path,
general_data_path,
"general",
"custom-scenario",
custom_ts_numbers_value,
"custom-ts-numbers",
)


def upgrade_810(study_path: str) -> None:
modify_file(
study_path,
general_data_path,
other_preferencies,
"renewable-generation-modelling",
"aggregated",
None,
)
os.mkdir(os.path.join(study_path + f"{sep}input", "renewables"))
os.mkdir(
os.path.join(study_path + f"{sep}input{sep}renewables", "clusters")
)
os.mkdir(os.path.join(study_path + f"{sep}input{sep}renewables", "series"))
# TODO Cannot update study with renewables clusters for the moment


def upgrade_820(study_path: str) -> None:
links = glob.glob(os.path.join(study_path, f"input{sep}links{sep}*"))
if len(links) > 0:
for folder in links:
all_txt = glob.glob(os.path.join(folder, "*.txt"))
if len(all_txt) > 0:
os.mkdir(os.path.join(folder, "capacities"))
for txt in all_txt:
df = pandas.read_csv(txt, sep="\t", header=None)
df_parameters = df.iloc[:, 2:8]
df_direct = df.iloc[:, 0]
df_indirect = df.iloc[:, 1]
reversed_txt = txt[::-1]
k = 0
while reversed_txt[k] != sep:
k += 1
name = reversed_txt[4:k][::-1]
numpy.savetxt(
folder + f"{sep}{name}_parameters.txt",
df_parameters.values,
delimiter="\t",
fmt="%.6f",
)
numpy.savetxt(
folder + f"{sep}capacities{sep}{name}_direct.txt",
df_direct.values,
delimiter="\t",
fmt="%.6f",
)
numpy.savetxt(
folder + f"{sep}capacities{sep}{name}_indirect.txt",
df_indirect.values,
delimiter="\t",
fmt="%.6f",
)
os.remove(folder + f"{sep}{name}.txt")


def upgrade_830(study_path: str) -> None:
modify_file(
study_path,
general_data_path,
"optimization",
"include-split-exported-mps",
False,
None,
)
modify_file(
study_path,
general_data_path,
adequacy_patch,
"include-adq-patch",
False,
None,
)
modify_file(
study_path,
general_data_path,
adequacy_patch,
"set-to-null-ntc-between-physical-out-for-first-step",
True,
None,
)
modify_file(
study_path,
general_data_path,
adequacy_patch,
"set-to-null-ntc-from-physical-out-to-physical-in-for-first-step",
True,
None,
)
areas = glob.glob(os.path.join(study_path, f"input{sep}areas{sep}*"))
if len(areas) > 0:
for folder in areas:
if Path(folder).is_dir():
writer = IniWriter()
writer.write(
{"adequacy-patch": {"adequacy-patch-mode": "outside"}},
Path(folder) / "adequacy_patch.ini",
)


def upgrade_840(study_path: str) -> None:
old_value = find_value_in_file(
study_path,
general_data_path,
"optimization",
"transmission-capacities",
)
modify_file(
study_path,
general_data_path,
"optimization",
None,
None,
"include-split-exported-mps",
)
modify_file(
study_path,
general_data_path,
"optimization",
"transmission-capacities",
mapping_transmission_capacities[old_value],
None,
)


upgrade_methods = {
700: upgrade_700,
710: upgrade_710,
720: upgrade_720,
800: upgrade_800,
810: upgrade_810,
820: upgrade_820,
830: upgrade_830,
840: upgrade_840,
}


class InvalidUpgrade(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)


def upgrade_study(study_path: str, new_version: int) -> None:
old_version = get_current_version(study_path)
check_upgrade_is_possible(old_version, new_version)
return do_upgrade(study_path, old_version, new_version)


def get_current_version(study_path: str) -> int:
file = glob.glob(os.path.join(study_path, "study.antares"))
if len(file) != 1:
raise StudyValidationError("The path of your study is not valid")
f = open(file[0])
for line in f:
if "version" in line:
return int(line[10:])
raise StudyValidationError(
"Your study.antares file is not in the good format"
)


def check_upgrade_is_possible(old_version: int, new_version: int) -> None:
if new_version not in upgrade_methods.keys():
raise InvalidUpgrade(f"The version {new_version} is not supported")
if old_version < 700 or new_version < 700:
raise InvalidUpgrade(
"Sorry the first version we deal with is the 7.0.0"
)
elif old_version > new_version:
raise InvalidUpgrade("Cannot downgrade your study version")
elif old_version == new_version:
raise InvalidUpgrade(
"The version you asked for is the one you currently have"
)


def update_study_antares_file(new_version: int, study_path: str) -> None:
epoch_time = datetime(1970, 1, 1)
delta = int((datetime.now() - epoch_time).total_seconds())
file = glob.glob(os.path.join(study_path, "study.antares"))[0]
with open(file, "r") as f:
lines = f.readlines()
lines[1] = f"version = {new_version}\n"
lines[4] = f"lastsave = {delta}\n"
with open(file, "w") as f:
for item in lines:
f.write(item)
f.close()


def do_upgrade(study_path: str, old_version: int, new_version: int) -> None:
update_study_antares_file(new_version, study_path)
possibilities = list(upgrade_methods.keys())
start = 0
end = len(possibilities) - 1
while possibilities[start] != old_version:
start += 1
while possibilities[end] != new_version:
end -= 1
return recursive_changes(possibilities[start + 1 : end + 1], study_path)


def recursive_changes(update_list: typing.List[int], study_path: str) -> None:
if len(update_list) > 0:
elt = update_list[0]
upgrade_methods[elt](study_path)
recursive_changes(update_list[1:], study_path)
24 changes: 6 additions & 18 deletions antarest/study/storage/utils.py
Original file line number Diff line number Diff line change
@@ -4,11 +4,11 @@
import shutil
import tempfile
import time
from datetime import timedelta, datetime
from datetime import datetime, timedelta
from math import ceil
from pathlib import Path
from time import strptime
from typing import Optional, Union, cast, Callable, List
from typing import Callable, List, Optional, Union, cast
from uuid import uuid4
from zipfile import ZipFile

@@ -19,17 +19,17 @@
)
from antarest.core.interfaces.cache import CacheConstants, ICache
from antarest.core.jwt import JWTUser
from antarest.core.model import PermissionInfo, StudyPermissionType, PublicMode
from antarest.core.model import PermissionInfo, PublicMode, StudyPermissionType
from antarest.core.permissions import check_permission
from antarest.core.requests import UserHasNotPermissionError
from antarest.core.utils.utils import StopWatch
from antarest.study.model import (
DEFAULT_WORKSPACE_NAME,
Study,
STUDY_REFERENCE_TEMPLATES,
StudyMetadataDTO,
MatrixIndex,
Study,
StudyDownloadLevelDTO,
StudyMetadataDTO,
)
from antarest.study.storage.rawstudy.io.reader import IniReader
from antarest.study.storage.rawstudy.io.writer.ini_writer import IniWriter
@@ -208,18 +208,6 @@ def create_new_empty_study(
zip_output.extractall(path=path_study)


def create_permission_from_study(
study: Union[Study, StudyMetadataDTO]
) -> PermissionInfo:
return PermissionInfo(
owner=study.owner.id if study.owner is not None else None,
groups=[g.id for g in study.groups if g.id is not None],
public_mode=PublicMode(study.public_mode)
if study.public_mode is not None
else PublicMode.NONE,
)


def study_matcher(
name: Optional[str], workspace: Optional[str], folder: Optional[str]
) -> Callable[[StudyMetadataDTO], bool]:
@@ -262,7 +250,7 @@ def assert_permission(
logger.error("FAIL permission: study not exist")
raise ValueError("Metadata is None")

permission_info = create_permission_from_study(study)
permission_info = PermissionInfo.from_study(study)
ok = check_permission(user, permission_info, permission_type)
if raising and not ok:
logger.error(
Original file line number Diff line number Diff line change
@@ -8,13 +8,12 @@
from antarest.matrixstore.service import ISimpleMatrixService
from antarest.study.storage.patch_service import PatchService
from antarest.study.storage.rawstudy.model.filesystem.config.files import (
ConfigPathBuilder,
get_playlist,
)
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import (
FileStudyTree,
)
from antarest.study.storage.rawstudy.model.helpers import FileStudyHelpers
from antarest.study.storage.variantstudy.business.matrix_constants_generator import (
GeneratorMatrixConstants,
)
@@ -569,7 +568,7 @@ def generate_update_playlist(
study_tree: FileStudyTree,
) -> ICommand:
config = study_tree.get(["settings", "generaldata"])
playlist = ConfigPathBuilder.get_playlist(config)
playlist = get_playlist(config)
return UpdatePlaylist(
items=playlist.keys() if playlist else None,
weights={year for year, weight in playlist.items() if weight != 1}
Loading

0 comments on commit e7c9ceb

Please sign in to comment.