Skip to content

Commit

Permalink
fix(bc): sonar warning fix
Browse files Browse the repository at this point in the history
  • Loading branch information
TheoPascoli committed Jan 30, 2025
1 parent 4d67159 commit 55eceee
Show file tree
Hide file tree
Showing 45 changed files with 124 additions and 123 deletions.
2 changes: 1 addition & 1 deletion antarest/core/filesystem_blueprint.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ async def from_path(cls, full_path: Path, *, details: bool = False) -> "FileInfo
return obj


async def _calc_details(full_path: t.Union[str, Path]) -> t.Tuple[int, int]:
async def _calc_details(full_path: str | Path) -> t.Tuple[int, int]:
"""Calculate the number of files and the total size of a directory recursively."""

full_path = Path(full_path)
Expand Down
2 changes: 1 addition & 1 deletion antarest/core/permissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
logger = logging.getLogger(__name__)


permission_matrix: t.Dict[str, t.Dict[str, t.Sequence[t.Union[RoleType, PublicMode]]]] = {
permission_matrix: t.Dict[str, t.Dict[str, t.Sequence[RoleType | PublicMode]]] = {
StudyPermissionType.READ.value: {
"roles": [
RoleType.ADMIN,
Expand Down
2 changes: 1 addition & 1 deletion antarest/core/serialization/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
# Since pydantic v2 is written in RUST it's way faster.


def from_json(data: t.Union[str, bytes, bytearray]) -> t.Dict[str, t.Any]:
def from_json(data: str | bytes | bytearray) -> t.Dict[str, t.Any]:
return ADAPTER.validate_json(data) # type: ignore


Expand Down
6 changes: 3 additions & 3 deletions antarest/core/tasks/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def add_worker_task(
self,
task_type: TaskType,
task_queue: str,
task_args: t.Dict[str, t.Union[int, float, bool, str]],
task_args: t.Dict[str, int | float | bool | str],
name: t.Optional[str],
ref_id: t.Optional[str],
request_params: RequestParameters,
Expand Down Expand Up @@ -178,7 +178,7 @@ def _create_worker_task(
self,
task_id: str,
task_type: str,
task_args: t.Dict[str, t.Union[int, float, bool, str]],
task_args: t.Dict[str, int | float | bool | str],
) -> Task:
task_result_wrapper: t.List[TaskResult] = []

Expand Down Expand Up @@ -227,7 +227,7 @@ def add_worker_task(
self,
task_type: TaskType,
task_queue: str,
task_args: t.Dict[str, t.Union[int, float, bool, str]],
task_args: t.Dict[str, int | float | bool | str],
name: t.Optional[str],
ref_id: t.Optional[str],
request_params: RequestParameters,
Expand Down
2 changes: 1 addition & 1 deletion antarest/launcher/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class LauncherParametersDTO(AntaresBaseModel):
nb_cpu: t.Optional[int] = None
post_processing: bool = False
time_limit: int = 240 * 3600 # Default value set to 240 hours (in seconds)
xpansion: t.Union[XpansionParametersDTO, bool, None] = None
xpansion: XpansionParametersDTO | bool | None = None
xpansion_r_version: bool = False
archive_output: bool = True
auto_unzip: bool = True
Expand Down
4 changes: 2 additions & 2 deletions antarest/matrixstore/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,8 +237,8 @@ class MatrixContent(AntaresBaseModel):
"""

data: t.List[t.List[MatrixData]]
index: t.List[t.Union[int, str]]
columns: t.List[t.Union[int, str]]
index: t.List[int | str]
columns: t.List[int | str]


class MatrixDataSetUpdateDTO(AntaresBaseModel):
Expand Down
2 changes: 1 addition & 1 deletion antarest/matrixstore/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def exists(self, matrix_hash: str) -> bool:
matrix_file = self.bucket_dir.joinpath(f"{matrix_hash}.tsv")
return matrix_file.exists()

def save(self, content: t.Union[t.List[t.List[MatrixData]], npt.NDArray[np.float64]]) -> str:
def save(self, content: t.List[t.List[MatrixData]] | npt.NDArray[np.float64]) -> str:
"""
Saves the content of a matrix as a TSV file in the bucket directory
and returns its SHA256 hash.
Expand Down
8 changes: 4 additions & 4 deletions antarest/matrixstore/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __init__(self, matrix_content_repository: MatrixContentRepository) -> None:
self.matrix_content_repository = matrix_content_repository

@abstractmethod
def create(self, data: t.Union[t.List[t.List[MatrixData]], npt.NDArray[np.float64]]) -> str:
def create(self, data: t.List[t.List[MatrixData]] | npt.NDArray[np.float64]) -> str:
raise NotImplementedError()

@abstractmethod
Expand All @@ -87,7 +87,7 @@ def exists(self, matrix_id: str) -> bool:
def delete(self, matrix_id: str) -> None:
raise NotImplementedError()

def get_matrix_id(self, matrix: t.Union[t.List[t.List[float]], str]) -> str:
def get_matrix_id(self, matrix: t.List[t.List[float]] | str) -> str:
"""
Get the matrix ID from a matrix or a matrix link.
Expand All @@ -114,7 +114,7 @@ def __init__(self, matrix_content_repository: MatrixContentRepository):
super().__init__(matrix_content_repository=matrix_content_repository)

@override
def create(self, data: t.Union[t.List[t.List[MatrixData]], npt.NDArray[np.float64]]) -> str:
def create(self, data: t.List[t.List[MatrixData]] | npt.NDArray[np.float64]) -> str:
return self.matrix_content_repository.save(data)

@override
Expand Down Expand Up @@ -171,7 +171,7 @@ def _from_dto(dto: MatrixDTO) -> t.Tuple[Matrix, MatrixContent]:
return matrix, content

@override
def create(self, data: t.Union[t.List[t.List[MatrixData]], npt.NDArray[np.float64]]) -> str:
def create(self, data: t.List[t.List[MatrixData]] | npt.NDArray[np.float64]) -> str:
"""
Creates a new matrix object with the specified data.
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/aggregator_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def __init__(
self,
study_path: Path,
output_id: str,
query_file: t.Union[MCIndAreasQueryFile, MCAllAreasQueryFile, MCIndLinksQueryFile, MCAllLinksQueryFile],
query_file: MCIndAreasQueryFile | MCAllAreasQueryFile | MCIndLinksQueryFile | MCAllLinksQueryFile,
frequency: MatrixFrequency,
ids_to_consider: t.Sequence[str],
columns_names: t.Sequence[str],
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/areas/st_storage_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,7 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus
]

# Prepare and execute commands
commands: t.List[t.Union[CreateSTStorage, ReplaceMatrix]] = [create_cluster_cmd]
commands: t.List[CreateSTStorage | ReplaceMatrix] = [create_cluster_cmd]
storage_service = self.storage_service.get_storage(study)
command_context = self.storage_service.variant_study_service.command_factory.command_context
for source_path, new_path in zip(source_paths, new_paths):
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/areas/thermal_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ def duplicate_cluster(
new_paths.append(f"input/thermal/series/{area_id}/{lower_new_id}/fuelCost")

# Prepare and execute commands
commands: t.List[t.Union[CreateCluster, ReplaceMatrix]] = [create_cluster_cmd]
commands: t.List[CreateCluster | ReplaceMatrix] = [create_cluster_cmd]
storage_service = self.storage_service.get_storage(study)
command_context = self.storage_service.variant_study_service.command_factory.command_context
for source_path, new_path in zip(source_paths, new_paths):
Expand Down
6 changes: 3 additions & 3 deletions antarest/study/business/binding_constraint_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ class ConstraintTerm(AntaresBaseModel):
id: t.Optional[str] = None
weight: t.Optional[float] = None
offset: t.Optional[int] = None
data: t.Optional[t.Union[LinkTerm, ClusterTerm]] = None
data: t.Optional[LinkTerm | ClusterTerm] = None

@field_validator("id")
def id_to_lower(cls, v: t.Optional[str]) -> t.Optional[str]:
Expand Down Expand Up @@ -348,7 +348,7 @@ class ConstraintOutput870(ConstraintOutput830):

# WARNING: Do not change the order of the following line, it is used to determine
# the type of the output constraint in the FastAPI endpoint.
ConstraintOutput = t.Union[ConstraintOutputBase, ConstraintOutput830, ConstraintOutput870]
ConstraintOutput = ConstraintOutputBase | ConstraintOutput830 | ConstraintOutput870

OPERATOR_MATRIX_FILE_MAP = {
BindingConstraintOperator.EQUAL: ["{bc_id}_eq"],
Expand Down Expand Up @@ -1177,7 +1177,7 @@ def _replace_matrices_according_to_frequency_and_version(


def check_attributes_coherence(
data: t.Union[ConstraintCreation, ConstraintInput],
data: ConstraintCreation | ConstraintInput,
study_version: StudyVersion,
operator: BindingConstraintOperator,
) -> None:
Expand Down
6 changes: 3 additions & 3 deletions antarest/study/business/scenario_builder_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

_HYDRO_LEVEL_PERCENT = 100

_Section: te.TypeAlias = t.MutableMapping[str, t.Union[int, float]]
_Section: te.TypeAlias = t.MutableMapping[str, int | float]
_Sections: te.TypeAlias = t.MutableMapping[str, _Section]

Ruleset: te.TypeAlias = t.MutableMapping[str, t.Any]
Expand Down Expand Up @@ -92,11 +92,11 @@ def _get_ruleset_config(
file_study: FileStudy,
ruleset_name: str,
symbol: str = "",
) -> t.Dict[str, t.Union[int, float]]:
) -> t.Dict[str, int | float]:
try:
suffix = f"/{symbol}" if symbol else ""
url = f"settings/scenariobuilder/{ruleset_name}{suffix}".split("/")
ruleset_cfg = t.cast(t.Dict[str, t.Union[int, float]], file_study.tree.get(url))
ruleset_cfg = t.cast(t.Dict[str, int | float], file_study.tree.get(url))
except KeyError:
ruleset_cfg = {}
return ruleset_cfg
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/xpansion_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,7 +703,7 @@ def get_resource_content(
study: Study,
resource_type: XpansionResourceFileType,
filename: str,
) -> t.Union[JSON, bytes]:
) -> JSON | bytes:
logger.info(f"Getting xpansion {resource_type} resource file '{filename}' from study '{study.id}'")
file_study = self.study_storage_service.get_storage(study).get_raw(study)
return file_study.tree.get(self._raw_file_dir(resource_type) + [filename])
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/common/studystorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def patch_update_study_metadata(self, study: T, metadata: StudyMetadataPatchDTO)
def import_output(
self,
study: T,
output: t.Union[t.BinaryIO, Path],
output: t.BinaryIO | Path,
output_name: t.Optional[str] = None,
) -> t.Optional[str]:
"""
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ class StudyMetadataDTO(AntaresBaseModel):
tags: t.List[str] = []

@field_validator("horizon", mode="before")
def transform_horizon_to_str(cls, val: t.Union[str, int, None]) -> t.Optional[str]:
def transform_horizon_to_str(cls, val: str | int | None) -> t.Optional[str]:
# horizon can be an int.
return str(val) if val else val # type: ignore

Expand Down
2 changes: 1 addition & 1 deletion antarest/study/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class AccessPermissions(AntaresBaseModel, frozen=True, extra="forbid"):
user_groups: t.Sequence[str] = ()

@classmethod
def from_params(cls, params: t.Union[RequestParameters, JWTUser]) -> "AccessPermissions":
def from_params(cls, params: RequestParameters | JWTUser) -> "AccessPermissions":
"""
This function makes it easier to pass on user ids and groups into the repository filtering function by
extracting the associated `AccessPermissions` object.
Expand Down
20 changes: 10 additions & 10 deletions antarest/study/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@
MAX_MISSING_STUDY_TIMEOUT = 2 # days


def get_disk_usage(path: t.Union[str, Path]) -> int:
def get_disk_usage(path: str | Path) -> int:
"""Calculate the total disk usage (in bytes) of a study in a compressed file or directory."""
path = Path(path)
if is_archive_format(path.suffix.lower()):
Expand Down Expand Up @@ -213,7 +213,7 @@ def _imports_matrix_from_bytes(data: bytes) -> npt.NDArray[np.float64]:


def _get_path_inside_user_folder(
path: str, exception_class: t.Type[t.Union[FolderCreationNotAllowed, ResourceDeletionNotAllowed]]
path: str, exception_class: t.Type[FolderCreationNotAllowed | ResourceDeletionNotAllowed]
) -> str:
"""
Retrieves the path inside the `user` folder for a given user path
Expand Down Expand Up @@ -498,7 +498,7 @@ def aggregate_output_data(
self,
uuid: str,
output_id: str,
query_file: t.Union[MCIndAreasQueryFile, MCAllAreasQueryFile, MCIndLinksQueryFile, MCAllLinksQueryFile],
query_file: MCIndAreasQueryFile | MCAllAreasQueryFile | MCIndLinksQueryFile | MCAllLinksQueryFile,
frequency: MatrixFrequency,
columns_names: t.Sequence[str],
ids_to_consider: t.Sequence[str],
Expand Down Expand Up @@ -594,7 +594,7 @@ def save_logs(
)
stopwatch.log_elapsed(lambda d: logger.info(f"Saved logs for job {job_id} in {d}s"))

def get_comments(self, study_id: str, params: RequestParameters) -> t.Union[str, JSON]:
def get_comments(self, study_id: str, params: RequestParameters) -> str | JSON:
"""
Get the comments of a study.
Expand Down Expand Up @@ -1360,7 +1360,7 @@ def download_outputs(
filetype: ExportFormat,
params: RequestParameters,
tmp_export_file: t.Optional[Path] = None,
) -> t.Union[Response, FileDownloadTaskDTO, FileResponse]:
) -> Response | FileDownloadTaskDTO | FileResponse:
"""
Download outputs
Args:
Expand Down Expand Up @@ -1551,7 +1551,7 @@ def import_study(
def import_output(
self,
uuid: str,
output: t.Union[t.BinaryIO, Path],
output: t.BinaryIO | Path,
params: RequestParameters,
output_name_suffix: t.Optional[str] = None,
auto_unzip: bool = True,
Expand Down Expand Up @@ -1896,7 +1896,7 @@ def get_all_areas(
area_type: t.Optional[AreaType],
ui: bool,
params: RequestParameters,
) -> t.Union[t.List[AreaInfoDTO], t.Dict[str, t.Any]]:
) -> t.List[AreaInfoDTO] | t.Dict[str, t.Any]:
study = self.get_study(uuid)
assert_permission(params.user, study, StudyPermissionType.READ)
return self.areas.get_all_areas_ui_info(study) if ui else self.areas.get_all_areas(study, area_type)
Expand Down Expand Up @@ -2817,9 +2817,9 @@ def create_user_folder(self, study_id: str, path: str, current_user: JWTUser) ->
def _alter_user_folder(
self,
study_id: str,
command_data: t.Union[CreateUserResourceData, RemoveUserResourceData],
command_class: t.Type[t.Union[CreateUserResource, RemoveUserResource]],
exception_class: t.Type[t.Union[FolderCreationNotAllowed, ResourceDeletionNotAllowed]],
command_data: CreateUserResourceData | RemoveUserResourceData,
command_class: t.Type[CreateUserResource | RemoveUserResource],
exception_class: t.Type[FolderCreationNotAllowed | ResourceDeletionNotAllowed],
current_user: JWTUser,
) -> None:
study = self.get_study(study_id)
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/storage/abstract_storage_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ def get_study_sim_result(
def import_output(
self,
metadata: T,
output: t.Union[t.BinaryIO, Path],
output: t.BinaryIO | Path,
output_name: t.Optional[str] = None,
) -> t.Optional[str]:
"""
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/storage/df_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def suffix(self) -> str:
def export_table(
self,
df: pd.DataFrame,
export_path: t.Union[str, Path],
export_path: str | Path,
*,
with_index: bool = True,
with_header: bool = True,
Expand Down
6 changes: 3 additions & 3 deletions antarest/study/storage/patch_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class PatchService:
def __init__(self, repository: t.Optional[StudyMetadataRepository] = None):
self.repository = repository

def get(self, study: t.Union[RawStudy, VariantStudy], get_from_file: bool = False) -> Patch:
def get(self, study: RawStudy | VariantStudy, get_from_file: bool = False) -> Patch:
if not get_from_file and study.additional_data is not None:
# the `study.additional_data.patch` field is optional
if study.additional_data.patch:
Expand All @@ -52,7 +52,7 @@ def get_from_filestudy(self, file_study: FileStudy) -> Patch:

def set_reference_output(
self,
study: t.Union[RawStudy, VariantStudy],
study: RawStudy | VariantStudy,
output_id: str,
status: bool = True,
) -> None:
Expand All @@ -63,7 +63,7 @@ def set_reference_output(
patch.outputs = PatchOutputs(reference=output_id)
self.save(study, patch)

def save(self, study: t.Union[RawStudy, VariantStudy], patch: Patch) -> None:
def save(self, study: RawStudy | VariantStudy, patch: Patch) -> None:
if self.repository:
study.additional_data = study.additional_data or StudyAdditionalData()
study.additional_data.patch = patch.model_dump_json()
Expand Down
8 changes: 4 additions & 4 deletions antarest/study/storage/rawstudy/ini_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@
from antarest.core.model import JSON


def convert_value(value: str) -> t.Union[str, int, float, bool]:
def convert_value(value: str) -> str | int | float | bool:
"""Convert value to the appropriate type for JSON."""

try:
# Infinity values are not supported by JSON, so we use a string instead.
mapping = {"true": True, "false": False, "+inf": "+Inf", "-inf": "-Inf", "inf": "+Inf"}
return t.cast(t.Union[str, int, float, bool], mapping[value.lower()])
return t.cast(str | int | float | bool, mapping[value.lower()])
except KeyError:
try:
return int(value)
Expand Down Expand Up @@ -56,8 +56,8 @@ def from_kwargs(
cls,
section: str = "",
option: str = "",
section_regex: t.Optional[t.Union[str, t.Pattern[str]]] = None,
option_regex: t.Optional[t.Union[str, t.Pattern[str]]] = None,
section_regex: t.Optional[str | t.Pattern[str]] = None,
option_regex: t.Optional[str | t.Pattern[str]] = None,
**_unused: t.Any, # ignore unknown options
) -> "IniFilter":
"""
Expand Down
Loading

0 comments on commit 55eceee

Please sign in to comment.