Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft: custom parsing, serialization and matching in INI file nodes #2310

Closed
wants to merge 13 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
4 changes: 2 additions & 2 deletions .github/workflows/commitlint.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
name: Lint Commit Messages
name: commitlint
on: [pull_request]

permissions:
contents: read
pull-requests: read

jobs:
commitlint:
commit-messages-lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
- name: 💚 Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 18.16.1
node-version: 22.13.0

- name: 💚 Install dependencies
run: npm install
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: check license headers
name: license-header
on:
push:
branches:
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 18.16.1
node-version: 22.13.0
- name: Cache node modules
uses: actions/cache@v4
with:
Expand All @@ -92,7 +92,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 18.16.1
node-version: 22.13.0
- name: Restore node modules
uses: actions/cache@v4
with:
Expand All @@ -112,7 +112,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 18.16.1
node-version: 22.13.0
- name: Restore node modules
uses: actions/cache@v4
with:
Expand All @@ -132,7 +132,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 18.16.1
node-version: 22.13.0
- name: Restore node modules
uses: actions/cache@v4
with:
Expand Down
13 changes: 13 additions & 0 deletions .github/workflows/pr-title.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
name: pr-title
on:
pull_request:
types: ['opened', 'edited', 'reopened', 'synchronize']

jobs:
pr-title-lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Dependencies
run: npm install @commitlint/[email protected]
- uses: JulienKode/[email protected]
5 changes: 5 additions & 0 deletions antarest/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,6 +545,11 @@ def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)


class MatrixImportFailed(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)


class ConstraintTermNotFound(HTTPException):
"""
Exception raised when a constraint term is not found.
Expand Down
5 changes: 3 additions & 2 deletions antarest/study/business/areas/renewable_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,8 @@ def get_all_renewables_props(
renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterOutput]]
renewables_by_areas = collections.defaultdict(dict)
for area_id, cluster_obj in clusters.items():
for cluster_id, cluster in cluster_obj.items():
for cluster_name, cluster in cluster_obj.items():
cluster_id = transform_name_to_id(cluster_name)
renewables_by_areas[area_id][cluster_id] = create_renewable_output(study.version, cluster_id, cluster)

return renewables_by_areas
Expand Down Expand Up @@ -223,7 +224,7 @@ def _make_create_cluster_cmd(
) -> CreateRenewablesCluster:
command = CreateRenewablesCluster(
area_id=area_id,
cluster_name=cluster.id,
cluster_name=cluster.name,
parameters=cluster.model_dump(mode="json", by_alias=True, exclude={"id"}),
command_context=self.storage_service.variant_study_service.command_factory.command_context,
study_version=study_version,
Expand Down
27 changes: 15 additions & 12 deletions antarest/study/business/areas/st_storage_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,15 +225,17 @@ def validate_rule_curve(self) -> "STStorageMatrices":
# ============================


_STORAGE_LIST_PATH = "input/st-storage/clusters/{area_id}/list/{storage_id}"
_AREA_STORAGES_PATH = "input/st-storage/clusters/{area_id}/list"
_STORAGE_PATH = _AREA_STORAGES_PATH + "/{storage_id}"
_STORAGE_SERIES_PATH = "input/st-storage/series/{area_id}/{storage_id}/{ts_name}"
_ALL_STORAGE_PATH = "input/st-storage/clusters"


def _get_values_by_ids(file_study: FileStudy, area_id: str) -> t.Mapping[str, t.Mapping[str, t.Any]]:
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id="")[:-1]
def _get_values_by_ids(file_study: FileStudy, area_id: str) -> t.Dict[str, t.Dict[str, t.Any]]:
path = _AREA_STORAGES_PATH.format(area_id=area_id)
try:
return CaseInsensitiveDict(file_study.tree.get(path.split("/"), depth=3))
values = file_study.tree.get(path.split("/"), depth=3)
return {transform_name_to_id(name): cluster for name, cluster in values.items()}
except ChildNotFoundError:
raise AreaNotFound(area_id) from None
except KeyError:
Expand Down Expand Up @@ -328,7 +330,7 @@ def get_storages(
"""

file_study = self._get_file_study(study)
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id="")[:-1]
path = _STORAGE_PATH.format(area_id=area_id, storage_id="")[:-1]
try:
config = file_study.tree.get(path.split("/"), depth=3)
except ChildNotFoundError:
Expand Down Expand Up @@ -373,7 +375,8 @@ def get_all_storages_props(
storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageOutput]]
storages_by_areas = collections.defaultdict(dict)
for area_id, cluster_obj in storages.items():
for cluster_id, cluster in cluster_obj.items():
for cluster_name, cluster in cluster_obj.items():
cluster_id = transform_name_to_id(cluster_name)
storages_by_areas[area_id][cluster_id] = create_storage_output(study_version, cluster_id, cluster)

return storages_by_areas
Expand Down Expand Up @@ -404,7 +407,7 @@ def update_storages_props(
study_version,
**new_cluster.model_dump(mode="json", by_alias=False, exclude_none=True),
)
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id)
path = _STORAGE_PATH.format(area_id=area_id, storage_id=storage_id)
cmd = UpdateConfig(
target=path,
data=properties.model_dump(mode="json", by_alias=True, exclude={"id"}),
Expand Down Expand Up @@ -437,11 +440,11 @@ def get_storage(
"""

file_study = self._get_file_study(study)
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id)
path = _STORAGE_PATH.format(area_id=area_id, storage_id=storage_id)
try:
config = file_study.tree.get(path.split("/"), depth=1)
except KeyError:
raise STStorageNotFound(path, storage_id) from None
raise STStorageNotFound(path, storage_id)
return create_storage_output(StudyVersion.parse(study.version), storage_id, config)

def update_storage(
Expand Down Expand Up @@ -472,7 +475,7 @@ def update_storage(

values = values_by_ids.get(storage_id)
if values is None:
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id)
path = _STORAGE_PATH.format(area_id=area_id, storage_id=storage_id)
raise STStorageNotFound(path, storage_id)
old_config = create_st_storage_config(study_version, **values)

Expand All @@ -490,7 +493,7 @@ def update_storage(

# create the update config commands with the modified data
command_context = self.storage_service.variant_study_service.command_factory.command_context
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id)
path = _STORAGE_PATH.format(area_id=area_id, storage_id=storage_id)
commands = [
UpdateConfig(
target=f"{path}/{key}", data=value, command_context=command_context, study_version=study_version
Expand Down Expand Up @@ -521,7 +524,7 @@ def delete_storages(

for storage_id in storage_ids:
if storage_id not in values_by_ids:
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id)
path = _STORAGE_PATH.format(area_id=area_id, storage_id=storage_id)
raise STStorageNotFound(path, storage_id)

command_context = self.storage_service.variant_study_service.command_factory.command_context
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/areas/thermal_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ def _make_create_cluster_cmd(
# between the cluster name and the cluster ID (which is a section name).
args = {
"area_id": area_id,
"cluster_name": cluster.id,
"cluster_name": cluster.name,
"parameters": cluster.model_dump(mode="json", by_alias=True, exclude={"id"}),
"command_context": self.storage_service.variant_study_service.command_factory.command_context,
"study_version": study_version,
Expand Down
17 changes: 9 additions & 8 deletions antarest/study/business/table_mode_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from antarest.study.business.link_management import LinkManager
from antarest.study.business.model.link_model import LinkBaseDTO
from antarest.study.model import STUDY_VERSION_8_2, RawStudy
from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import transform_name_to_id

_TableIndex = str # row name
_TableColumn = str # column name
Expand Down Expand Up @@ -218,10 +219,9 @@ def update_table_data(
thermals_by_areas: t.MutableMapping[str, t.MutableMapping[str, ThermalClusterInput]]
thermals_by_areas = collections.defaultdict(dict)
for key, values in data.items():
area_id, cluster_id = key.split(" / ")
# Thermal clusters ids were not lowered at the time.
# So to ensure this endpoint still works with old scripts we have to lower the id at first.
thermals_by_areas[area_id][cluster_id.lower()] = ThermalClusterInput(**values)
area_id, cluster_name = key.split(" / ")
cluster_id = transform_name_to_id(cluster_name)
thermals_by_areas[area_id][cluster_id] = ThermalClusterInput(**values)
thermals_map = self._thermal_manager.update_thermals_props(study, thermals_by_areas)
data = {
f"{area_id} / {cluster_id}": cluster.model_dump(by_alias=True, exclude={"id", "name"})
Expand All @@ -233,9 +233,9 @@ def update_table_data(
renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterInput]]
renewables_by_areas = collections.defaultdict(dict)
for key, values in data.items():
area_id, cluster_id = key.split(" / ")
# Same reason as for thermal clusters
renewables_by_areas[area_id][cluster_id.lower()] = RenewableClusterInput(**values)
area_id, cluster_name = key.split(" / ")
cluster_id = transform_name_to_id(cluster_name)
renewables_by_areas[area_id][cluster_id] = RenewableClusterInput(**values)
renewables_map = self._renewable_manager.update_renewables_props(study, renewables_by_areas)
data = {
f"{area_id} / {cluster_id}": cluster.model_dump(by_alias=True, exclude={"id", "name"})
Expand All @@ -247,7 +247,8 @@ def update_table_data(
storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageInput]]
storages_by_areas = collections.defaultdict(dict)
for key, values in data.items():
area_id, cluster_id = key.split(" / ")
area_id, cluster_name = key.split(" / ")
cluster_id = transform_name_to_id(cluster_name)
storages_by_areas[area_id][cluster_id] = STStorageInput(**values)
storages_map = self._st_storage_manager.update_storages_props(study, storages_by_areas)
data = {
Expand Down
31 changes: 17 additions & 14 deletions antarest/study/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import base64
import collections
import contextlib
import csv
import http
import io
import logging
Expand All @@ -25,6 +24,7 @@
from uuid import uuid4

import numpy as np
import numpy.typing as npt
import pandas as pd
from antares.study.version import StudyVersion
from fastapi import HTTPException, UploadFile
Expand All @@ -39,6 +39,7 @@
CommandApplicationError,
FolderCreationNotAllowed,
IncorrectPathError,
MatrixImportFailed,
NotAManagedStudyException,
OutputAlreadyArchived,
OutputAlreadyUnarchived,
Expand Down Expand Up @@ -197,6 +198,20 @@ def get_disk_usage(path: t.Union[str, Path]) -> int:
return total_size


def _imports_matrix_from_bytes(data: bytes) -> npt.NDArray[np.float64]:
"""Tries to convert bytes to a numpy array when importing a matrix"""
str_data = data.decode("utf-8")
if not str_data:
return np.zeros(shape=(0, 0))
for delimiter in [",", ";", "\t"]:
with contextlib.suppress(Exception):
df = pd.read_csv(io.BytesIO(data), delimiter=delimiter, header=None).replace(",", ".", regex=True)
df = df.dropna(axis=1, how="all") # We want to remove columns full of NaN at the import
matrix = df.to_numpy(dtype=np.float64)
return matrix
raise MatrixImportFailed("Could not parse the given matrix")


def _get_path_inside_user_folder(
path: str, exception_class: t.Type[t.Union[FolderCreationNotAllowed, ResourceDeletionNotAllowed]]
) -> str:
Expand Down Expand Up @@ -1591,19 +1606,7 @@ def _create_edit_study_command(
elif isinstance(tree_node, InputSeriesMatrix):
if isinstance(data, bytes):
# noinspection PyTypeChecker
str_data = data.decode("utf-8")
if not str_data:
matrix = np.zeros(shape=(0, 0))
else:
size_to_check = min(len(str_data), 64) # sniff a chunk only to speed up the code
try:
delimiter = csv.Sniffer().sniff(str_data[:size_to_check], delimiters=r"[,;\t]").delimiter
except csv.Error:
# Can happen with data with only one column. In this case, we don't care about the delimiter.
delimiter = "\t"
df = pd.read_csv(io.BytesIO(data), delimiter=delimiter, header=None).replace(",", ".", regex=True)
df = df.dropna(axis=1, how="all") # We want to remove columns full of NaN at the import
matrix = df.to_numpy(dtype=np.float64)
matrix = _imports_matrix_from_bytes(data)
matrix = matrix.reshape((1, 0)) if matrix.size == 0 else matrix
return ReplaceMatrix(
target=url, matrix=matrix.tolist(), command_context=context, study_version=study_version
Expand Down
Loading
Loading