Skip to content

Commit

Permalink
v2.16.4
Browse files Browse the repository at this point in the history
Merge pull request #1930 from AntaresSimulatorTeam/release/2.16.4
  • Loading branch information
laurent-laporte-pro authored Feb 12, 2024
2 parents f2ef5f8 + e334999 commit 7e69104
Show file tree
Hide file tree
Showing 284 changed files with 12,972 additions and 21,297 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/compatibility.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
with:
node-version: ${{ matrix.node-version }}
- name: Install dependencies
run: npm install --legacy-peer-deps
run: npm install
working-directory: webapp
- name: Build
run: npm run build
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
node-version: 18.16.1

- name: 💚 Install dependencies
run: npm install --legacy-peer-deps
run: npm install
working-directory: webapp

- name: 💚 Build webapp
Expand Down
17 changes: 1 addition & 16 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ jobs:
with:
node-version: 18.16.1
- name: Install dependencies
run: npm install --legacy-peer-deps
run: npm install
working-directory: webapp
- name: Build
run: npm run build
Expand All @@ -89,16 +89,6 @@ jobs:
- name: Lint
run: npm run lint
working-directory: webapp
- name: Run tests
run: npm run test-coverage
working-directory: webapp
env:
CI: true
- name: Archive code coverage results
uses: actions/upload-artifact@v3
with:
name: js-code-coverage-report
path: webapp/coverage/lcov.info

sonarcloud:
runs-on: ubuntu-20.04
Expand All @@ -109,11 +99,6 @@ jobs:
uses: actions/download-artifact@v3
with:
name: python-code-coverage-report
- name: Download js coverage report
uses: actions/download-artifact@v3
with:
name: js-code-coverage-report
path: webapp/coverage
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@master
env:
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ Install the front-end dependencies:

```shell script
cd webapp
npm install --legacy-peer-deps
npm install
cd ..
```

Expand Down
71 changes: 71 additions & 0 deletions alembic/versions/1f5db5dfad80_add_indexes_to_study_tables.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
# noinspection SpellCheckingInspection
"""
Add indexes to Study tables
The goal of this migration is to add indexes on the `study`, `rawstudy` and `study_additional_data` tables,
in order to speed up data search queries for the search engine.
Revision ID: 1f5db5dfad80
Revises: 782a481f3414
Create Date: 2024-01-19 18:37:34.155199
"""
from alembic import op
import sqlalchemy as sa # type: ignore


# revision identifiers, used by Alembic.
# noinspection SpellCheckingInspection
revision = "1f5db5dfad80"
down_revision = "782a481f3414"
branch_labels = None
depends_on = None


# noinspection SpellCheckingInspection
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("rawstudy", schema=None) as batch_op:
batch_op.alter_column("workspace", existing_type=sa.VARCHAR(length=255), nullable=False)
batch_op.create_index(batch_op.f("ix_rawstudy_missing"), ["missing"], unique=False)
batch_op.create_index(batch_op.f("ix_rawstudy_workspace"), ["workspace"], unique=False)

with op.batch_alter_table("study", schema=None) as batch_op:
batch_op.create_index(batch_op.f("ix_study_archived"), ["archived"], unique=False)
batch_op.create_index(batch_op.f("ix_study_created_at"), ["created_at"], unique=False)
batch_op.create_index(batch_op.f("ix_study_folder"), ["folder"], unique=False)
batch_op.create_index(batch_op.f("ix_study_name"), ["name"], unique=False)
batch_op.create_index(batch_op.f("ix_study_owner_id"), ["owner_id"], unique=False)
batch_op.create_index(batch_op.f("ix_study_parent_id"), ["parent_id"], unique=False)
batch_op.create_index(batch_op.f("ix_study_type"), ["type"], unique=False)
batch_op.create_index(batch_op.f("ix_study_updated_at"), ["updated_at"], unique=False)
batch_op.create_index(batch_op.f("ix_study_version"), ["version"], unique=False)

with op.batch_alter_table("study_additional_data", schema=None) as batch_op:
batch_op.create_index(batch_op.f("ix_study_additional_data_patch"), ["patch"], unique=False)

# ### end Alembic commands ###


# noinspection SpellCheckingInspection
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("study_additional_data", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_study_additional_data_patch"))

with op.batch_alter_table("study", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_study_version"))
batch_op.drop_index(batch_op.f("ix_study_updated_at"))
batch_op.drop_index(batch_op.f("ix_study_type"))
batch_op.drop_index(batch_op.f("ix_study_parent_id"))
batch_op.drop_index(batch_op.f("ix_study_owner_id"))
batch_op.drop_index(batch_op.f("ix_study_name"))
batch_op.drop_index(batch_op.f("ix_study_folder"))
batch_op.drop_index(batch_op.f("ix_study_created_at"))
batch_op.drop_index(batch_op.f("ix_study_archived"))

with op.batch_alter_table("rawstudy", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_rawstudy_workspace"))
batch_op.drop_index(batch_op.f("ix_rawstudy_missing"))
batch_op.alter_column("workspace", existing_type=sa.VARCHAR(length=255), nullable=True)

# ### end Alembic commands ###
58 changes: 58 additions & 0 deletions alembic/versions/3c70366b10ea_add_tag_and_study_tag_tables.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
"""Add tag and study_tag tables
Revision ID: 3c70366b10ea
Revises: 1f5db5dfad80
Create Date: 2024-02-02 13:06:47.627554
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = "3c70366b10ea"
down_revision = "1f5db5dfad80"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"tag",
sa.Column("label", sa.String(length=40), nullable=False),
sa.Column("color", sa.String(length=20), nullable=True),
sa.PrimaryKeyConstraint("label"),
)
with op.batch_alter_table("tag", schema=None) as batch_op:
batch_op.create_index(batch_op.f("ix_tag_color"), ["color"], unique=False)
batch_op.create_index(batch_op.f("ix_tag_label"), ["label"], unique=False)

op.create_table(
"study_tag",
sa.Column("study_id", sa.String(length=36), nullable=False),
sa.Column("tag_label", sa.String(length=40), nullable=False),
sa.ForeignKeyConstraint(["study_id"], ["study.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["tag_label"], ["tag.label"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("study_id", "tag_label"),
)
with op.batch_alter_table("study_tag", schema=None) as batch_op:
batch_op.create_index(batch_op.f("ix_study_tag_study_id"), ["study_id"], unique=False)
batch_op.create_index(batch_op.f("ix_study_tag_tag_label"), ["tag_label"], unique=False)

# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("study_tag", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_study_tag_tag_label"))
batch_op.drop_index(batch_op.f("ix_study_tag_study_id"))

op.drop_table("study_tag")
with op.batch_alter_table("tag", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_tag_label"))
batch_op.drop_index(batch_op.f("ix_tag_color"))

op.drop_table("tag")
# ### end Alembic commands ###
4 changes: 2 additions & 2 deletions antarest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@

# Standard project metadata

__version__ = "2.16.3"
__version__ = "2.16.4"
__author__ = "RTE, Antares Web Team"
__date__ = "2024-01-17"
__date__ = "2024-02-14"
# noinspection SpellCheckingInspection
__credits__ = "(c) Réseau de Transport de l’Électricité (RTE)"

Expand Down
2 changes: 2 additions & 0 deletions antarest/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,7 @@ class SlurmConfig:
default_time_limit: int = 0
default_json_db_name: str = ""
slurm_script_path: str = ""
partition: str = ""
max_cores: int = 64
antares_versions_on_remote_server: List[str] = field(default_factory=list)
enable_nb_cores_detection: bool = False
Expand Down Expand Up @@ -290,6 +291,7 @@ def from_dict(cls, data: JSON) -> "SlurmConfig":
default_time_limit=data.get("default_time_limit", defaults.default_time_limit),
default_json_db_name=data.get("default_json_db_name", defaults.default_json_db_name),
slurm_script_path=data.get("slurm_script_path", defaults.slurm_script_path),
partition=data.get("partition", defaults.partition),
antares_versions_on_remote_server=data.get(
"antares_versions_on_remote_server",
defaults.antares_versions_on_remote_server,
Expand Down
2 changes: 1 addition & 1 deletion antarest/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.BAD_REQUEST, message)


class NoBindingConstraintError(HTTPException):
class BindingConstraintNotFoundError(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.NOT_FOUND, message)

Expand Down
3 changes: 0 additions & 3 deletions antarest/core/interfaces/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,10 @@ class CacheConstants(Enum):
This cache is used by the `create_from_fs` function when retrieving the configuration
of a study from the data on the disk.
- `STUDY_LISTING`: variable used to store objects of type `StudyMetadataDTO`.
This cache is used by the `get_studies_information` function to store the list of studies.
"""

RAW_STUDY = "RAW_STUDY"
STUDY_FACTORY = "STUDY_FACTORY"
STUDY_LISTING = "STUDY_LISTING"


class ICache:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,7 @@ def _init_launcher_parameters(self, local_workspace: Optional[Path] = None) -> M
json_dir=local_workspace or self.slurm_config.local_workspace,
default_json_db_name=self.slurm_config.default_json_db_name,
slurm_script_path=self.slurm_config.slurm_script_path,
partition=self.slurm_config.partition,
antares_versions_on_remote_server=self.slurm_config.antares_versions_on_remote_server,
default_ssh_dict={
"username": self.slurm_config.username,
Expand Down
53 changes: 52 additions & 1 deletion antarest/launcher/model.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
import enum
import json
import typing as t
from datetime import datetime

from pydantic import BaseModel
from pydantic import BaseModel, Field
from sqlalchemy import Column, DateTime, Enum, ForeignKey, Integer, Sequence, String # type: ignore
from sqlalchemy.orm import relationship # type: ignore

from antarest.core.persistence import Base
from antarest.core.utils.string import to_camel_case
from antarest.login.model import Identity, UserInfo


Expand All @@ -32,6 +34,15 @@ class LauncherParametersDTO(BaseModel):
other_options: t.Optional[str] = None
# add extensions field here

@classmethod
def from_launcher_params(cls, params: t.Optional[str]) -> "LauncherParametersDTO":
"""
Convert the launcher parameters from a string to a `LauncherParametersDTO` object.
"""
if params is None:
return cls()
return cls.parse_obj(json.loads(params))


class LogType(str, enum.Enum):
STDOUT = "STDOUT"
Expand Down Expand Up @@ -214,3 +225,43 @@ class JobCreationDTO(BaseModel):

class LauncherEnginesDTO(BaseModel):
engines: t.List[str]


class LauncherLoadDTO(
BaseModel,
extra="forbid",
validate_assignment=True,
allow_population_by_field_name=True,
alias_generator=to_camel_case,
):
"""
DTO representing the load of the SLURM cluster or local machine.
Attributes:
allocated_cpu_rate: The rate of allocated CPU, in range (0, 100).
cluster_load_rate: The rate of cluster load, in range (0, 100).
nb_queued_jobs: The number of queued jobs.
launcher_status: The status of the launcher: "SUCCESS" or "FAILED".
"""

allocated_cpu_rate: float = Field(
description="The rate of allocated CPU, in range (0, 100)",
ge=0,
le=100,
title="Allocated CPU Rate",
)
cluster_load_rate: float = Field(
description="The rate of cluster load, in range (0, 100)",
ge=0,
le=100,
title="Cluster Load Rate",
)
nb_queued_jobs: int = Field(
description="The number of queued jobs",
ge=0,
title="Number of Queued Jobs",
)
launcher_status: str = Field(
description="The status of the launcher: 'SUCCESS' or 'FAILED'",
title="Launcher Status",
)
Loading

0 comments on commit 7e69104

Please sign in to comment.