diff --git a/alembic/versions/d495746853cc_add_owner_id_to_jobresult.py b/alembic/versions/d495746853cc_add_owner_id_to_jobresult.py new file mode 100644 index 0000000000..f88b7906af --- /dev/null +++ b/alembic/versions/d495746853cc_add_owner_id_to_jobresult.py @@ -0,0 +1,27 @@ +"""Add owner_id to JobResult + +Revision ID: d495746853cc +Revises: e65e0c04606b +Create Date: 2023-10-19 13:16:29.969047 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "d495746853cc" +down_revision = "e65e0c04606b" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + with op.batch_alter_table("job_result", schema=None) as batch_op: + batch_op.add_column(sa.Column('owner_id', sa.Integer(), default=None)) + batch_op.create_foreign_key("fk_job_result_owner_id", "identities", ["owner_id"], ["id"], ondelete="SET NULL") + + +def downgrade() -> None: + with op.batch_alter_table("job_result", schema=None) as batch_op: + batch_op.drop_column("owner_id") diff --git a/antarest/launcher/model.py b/antarest/launcher/model.py index a9bf0f6fde..cc283c57d4 100644 --- a/antarest/launcher/model.py +++ b/antarest/launcher/model.py @@ -7,7 +7,7 @@ from sqlalchemy.orm import relationship # type: ignore from antarest.core.persistence import Base -from antarest.core.utils.utils import DTO +from antarest.login.model import Identity class XpansionParametersDTO(BaseModel): @@ -23,7 +23,7 @@ class LauncherParametersDTO(BaseModel): adequacy_patch: Optional[Dict[str, Any]] = None nb_cpu: Optional[int] = None post_processing: bool = False - time_limit: Optional[int] = None # 3600 <= time_limit < 864000 (10 days) + time_limit: Optional[int] = None # 3600 ≤ time_limit < 864000 (10 days) xpansion: Union[XpansionParametersDTO, bool, None] = None xpansion_r_version: bool = False archive_output: bool = True @@ -51,7 +51,7 @@ def to_suffix(self) -> str: return "out.log" elif self == LogType.STDERR: return "err.log" - else: + else: # pragma: no cover return "out.log" @@ -68,6 +68,24 @@ class JobLogType(str, enum.Enum): class JobResultDTO(BaseModel): + """ + A data transfer object (DTO) representing the job result. + + - id: The unique identifier for the task (UUID). + - study_id: The unique identifier for the Antares study (UUID). + - launcher: The name of the launcher for a simulation task, with possible values "local", "slurm" or `None`. + - launcher_params: Parameters related to the launcher. + - status: The status of the task. It can be one of the following: "pending", "failed", "success", or "running". + - creation_date: The date of creation of the task. + - completion_date: The date of completion of the task, if available. + - msg: A message associated with the task, either for the user or for error description. + - output_id: The identifier of the simulation results. + - exit_code: The exit code associated with the task. + - solver_stats: Global statistics related to the simulation, including processing time, + call count, optimization issues, and study-specific statistics (INI file-like format). + - owner_id: The unique identifier of the user or bot that executed the task. + """ + id: str study_id: str launcher: Optional[str] @@ -79,47 +97,52 @@ class JobResultDTO(BaseModel): output_id: Optional[str] exit_code: Optional[int] solver_stats: Optional[str] + owner_id: Optional[int] -class JobLog(DTO, Base): # type: ignore +class JobLog(Base): # type: ignore __tablename__ = "launcherjoblog" - id = Column(Integer(), Sequence("launcherjoblog_id_sequence"), primary_key=True) - message = Column(String, nullable=False) - job_id = Column( + id: str = Column(Integer(), Sequence("launcherjoblog_id_sequence"), primary_key=True) + message: str = Column(String, nullable=False) + job_id: str = Column( String(), ForeignKey("job_result.id", name="fk_log_job_result_id"), ) - log_type = Column(String, nullable=False) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, JobLog): - return False - return bool( - other.id == self.id - and other.message == self.message - and other.log_type == self.log_type - and other.job_id == self.job_id - ) + log_type: str = Column(String, nullable=False) + + # SQLAlchemy provides its own way to handle object comparison, which ensures + # that the comparison is based on the database identity of the objects. + # So, implementing `__eq__` and `__ne__` is not necessary. + + def __str__(self) -> str: + return f"Job log #{self.id} {self.log_type}: '{self.message}'" def __repr__(self) -> str: - return f"id={self.id}, message={self.message}, log_type={self.log_type}, job_id={self.job_id}" + return ( + f"" + ) -class JobResult(DTO, Base): # type: ignore +class JobResult(Base): # type: ignore __tablename__ = "job_result" - id = Column(String(36), primary_key=True) - study_id = Column(String(36)) - launcher = Column(String) - launcher_params = Column(String, nullable=True) - job_status = Column(Enum(JobStatus)) + id: str = Column(String(36), primary_key=True) + study_id: str = Column(String(36)) + launcher: Optional[str] = Column(String) + launcher_params: Optional[str] = Column(String, nullable=True) + job_status: JobStatus = Column(Enum(JobStatus)) creation_date = Column(DateTime, default=datetime.utcnow) completion_date = Column(DateTime) - msg = Column(String()) - output_id = Column(String()) - exit_code = Column(Integer) - solver_stats = Column(String(), nullable=True) + msg: Optional[str] = Column(String()) + output_id: Optional[str] = Column(String()) + exit_code: Optional[int] = Column(Integer) + solver_stats: Optional[str] = Column(String(), nullable=True) + owner_id: Optional[int] = Column(Integer(), ForeignKey(Identity.id, ondelete="SET NULL"), nullable=True) + logs = relationship(JobLog, uselist=True, cascade="all, delete, delete-orphan") def to_dto(self) -> JobResultDTO: @@ -135,18 +158,31 @@ def to_dto(self) -> JobResultDTO: output_id=self.output_id, exit_code=self.exit_code, solver_stats=self.solver_stats, + owner_id=self.owner_id, ) - def __eq__(self, o: Any) -> bool: - if not isinstance(o, JobResult): - return False - return o.to_dto().dict() == self.to_dto().dict() + # SQLAlchemy provides its own way to handle object comparison, which ensures + # that the comparison is based on the database identity of the objects. + # So, implementing `__eq__` and `__ne__` is not necessary. def __str__(self) -> str: - return str(self.to_dto().dict()) + return f"Job result #{self.id} (study '{self.study_id}'): {self.job_status}" def __repr__(self) -> str: - return self.__str__() + return ( + f"" + ) class JobCreationDTO(BaseModel): diff --git a/antarest/launcher/service.py b/antarest/launcher/service.py index f1fb1e8907..514cc54115 100644 --- a/antarest/launcher/service.py +++ b/antarest/launcher/service.py @@ -225,12 +225,16 @@ def run_study( study=study_info, permission_type=StudyPermissionType.RUN, ) + owner_id: int = 0 + if params.user: + owner_id = params.user.impersonator if params.user.type == "bots" else params.user.id job_status = JobResult( id=job_uuid, study_id=study_uuid, job_status=JobStatus.PENDING, launcher=launcher, launcher_params=launcher_parameters.json() if launcher_parameters else None, + owner_id=(owner_id or None), ) self.job_result_repository.save(job_status) @@ -254,9 +258,10 @@ def run_study( def kill_job(self, job_id: str, params: RequestParameters) -> JobResult: logger.info(f"Trying to cancel job {job_id}") job_result = self.job_result_repository.get(job_id) - assert job_result + if job_result is None: + raise ValueError(f"Job {job_id} not found") + study_uuid = job_result.study_id - launcher = job_result.launcher study = self.study_service.get_study(study_uuid) assert_permission( user=params.user, @@ -264,15 +269,22 @@ def kill_job(self, job_id: str, params: RequestParameters) -> JobResult: permission_type=StudyPermissionType.RUN, ) + launcher = job_result.launcher + if launcher is None: + raise ValueError(f"Job {job_id} has no launcher") self._assert_launcher_is_initialized(launcher) self.launchers[launcher].kill_job(job_id=job_id) + owner_id = 0 + if params.user: + owner_id = params.user.impersonator if params.user.type == "bots" else params.user.id job_status = JobResult( id=str(job_id), study_id=study_uuid, job_status=JobStatus.FAILED, launcher=launcher, + owner_id=(owner_id or None), ) self.job_result_repository.save(job_status) self.event_bus.push( @@ -373,6 +385,8 @@ def get_log(self, job_id: str, log_type: LogType, params: RequestParameters) -> or "" ) else: + if job_result.launcher is None: + raise ValueError(f"Job {job_id} has no launcher") self._assert_launcher_is_initialized(job_result.launcher) launcher_logs = str(self.launchers[job_result.launcher].get_log(job_id, log_type) or "") if log_type == LogType.STDOUT: @@ -667,5 +681,7 @@ def get_launch_progress(self, job_id: str, params: RequestParameters) -> float: permission_type=StudyPermissionType.READ, ) + if launcher is None: + raise ValueError(f"Job {job_id} has no launcher") launch_progress_json = self.launchers[launcher].cache.get(id=f"Launch_Progress_{job_id}") or {"progress": 0} return launch_progress_json.get("progress", 0) diff --git a/docs/architecture/1-database.md b/docs/architecture/1-database.md index f7bccad0b9..85b2cdb624 100644 --- a/docs/architecture/1-database.md +++ b/docs/architecture/1-database.md @@ -1,71 +1,67 @@ -# Database management +# Database Management -We support two database types : -- postgresql (for production deployment) -- sqlite (for the local desktop application) +We support two types of databases: +- PostgreSQL (for production deployment) +- SQLite (for the local desktop application) ## SQLAlchemy & Alembic -We use [sqlalchemy](https://www.sqlalchemy.org/) and [alembic](https://alembic.sqlalchemy.org/en/latest/) -to manage database and database entities. +We utilize [SQLAlchemy](https://www.sqlalchemy.org/) and [Alembic](https://alembic.sqlalchemy.org/en/latest/) for managing databases and their entities. -Schema is described by sqlalchemy models that are grouped and imported within -the file [dbmodel.py](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/antarest/dbmodel.py). -This file is then used by alembic [env file](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/alembic/env.py) -to create the [database migration scripts](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/alembic/versions). +The schema is described by SQLAlchemy models that are organized and imported within the file [dbmodel.py](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/antarest/dbmodel.py). +This file is then used by the Alembic [env file](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/alembic/env.py) to create the [database migration scripts](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/alembic/versions). -These migration scripts are used by alembic to update a target database defined in the env file which -uses the database url defined in an [application config]('../install/2-CONFIG.md'), whether on command line -(this is the method used on production deployment): -``` -export ANTAREST_CONF= +These migration scripts are used by Alembic to update a target database defined in the env file, which uses the database URL defined in an [application config]('../install/2-CONFIG.md'). This can be done either on the command line (the method used in production deployment): + +```shell +export ANTAREST_CONF=/path/to/your/application.yaml alembic upgrade head ``` -or within the application launch (see [this file](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/antarest/core/persistence.py)) : -``` -python antarest/main.py --auto-upgrade-db -# or with the gui (default auto upgrade) -python antarest/gui.py + +or within the application launch (refer to [this file](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/antarest/core/persistence.py)): + +```shell +python3 antarest/main.py --auto-upgrade-db +# or with the GUI (default auto-upgrade) +python3 antarest/gui.py ``` -### How to update the schema +### How to Update the Schema -When developing for antarest we use a development configuration file that target -a development database (usually sqlite but could be postgresql). After a first successful launch the database -schema is migrated to the latest version. -The schema version is stored in a table named `alembic_version` that contains the revision id of the last migration file. -This information should match with the result of the command `alembic show head` that display the last revision id of the migration file tree. +When developing for AntaREST, we use a development configuration file that targets a development database (usually SQLite but could be PostgreSQL). +After a successful initial launch, the database schema is migrated to the latest version. +The schema version is stored in a table named `alembic_version`, which contains the revision ID of the last migration file. +This information should match the result of the command `alembic show head`, which displays the last revision ID of the migration file tree. -To update the schema, there is two step. +To update the schema, there is two steps: -First we make the modification we want in the existing models (for instance in `study/model.py`, `login/model.py`, etc.) -or create **new models in a separate file that will need to be added to the [dbmodel.py](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/antarest/dbmodel.py) file**. -Most of the unit test that create the database from scratch using `sqlalchemy.sql.schema.MetaData.create_all` will do just fine but the integration tests (`tests/integration`) will probably -fail since they use the alembic migration files process. +First, we make the modifications we want in the existing models (e.g., in `study/model.py`, `login/model.py`, etc.) or create **new models in a separate file that will need to be added to the [dbmodel.py](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/antarest/dbmodel.py) file**. +Most of the unit tests that create the database from scratch using `sqlalchemy.sql.schema.MetaData.create_all` will work fine, but the integration tests (`tests/integration`) will probably fail since they use the alembic migration files process. -So second step is to create the migration file corresponding to the model change. We could create one from scratch, but most of the time, -the script [create_db_migration.sh](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/scripts/create_db_migration.sh) (that just wraps the `alembic revision` command) will do: -``` -export ANTAREST_CONF= +So the second step is to create the migration file corresponding to the model change. +We could create one from scratch, but most of the time, the script [create_db_migration.sh](https://github.com/AntaresSimulatorTeam/AntaREST/blob/master/scripts/create_db_migration.sh) (that just wraps the `alembic revision` command) will do: + +```shell +export ANTAREST_CONF=/path/to/your/application.yaml ./script/create_db_migration.sh ``` -This will create a new migration file in `alembic/versions` that contains two prefilled methods `upgrade` and `downgrade`. -Though for a newly created model the edition of this file should be minimal or nul, edition is sometimes required, especially in these cases: -- handling compatibility/specificity of the databases (eg. adding a sequence `alembic/versions/2ed6bf9f1690_add_tasks.py`) -- migrating data (eg. renaming/moving a field `alembic/versions/0146b79f723c_update_study.py`) -The `create_db_migration.sh` script will also update the `scripts/rollback.sh` which (as the name indicated) is used to rollback the database to a previous schema. +This will create a new migration file in `alembic/versions` that contains two prefilled methods `upgrade` and `downgrade`. +However, for a newly created model, the editing of this file should be minimal or null. +Editing is sometimes required, especially in these cases: +- handling compatibility/specificity of the databases (e.g., adding a sequence `alembic/versions/2ed6bf9f1690_add_tasks.py`) +- migrating data (e.g., renaming/moving a field `alembic/versions/0146b79f723c_update_study.py`) + +The `create_db_migration.sh` script will also update the `scripts/rollback.sh` which (as the name indicates) is used to roll back the database to a previous schema. -At this point the development database is not yet migrated. It is only after launching the app (or calling `alembic upgrade head`) that our -development database will be upgraded. +At this point, the development database is not yet migrated. +It is only after launching the app (or calling `alembic upgrade head`) that our development database will be upgraded. + Now if we want to: - modify the model -- checkout an other branch to test the application prior to this schema update +- checkout another branch to test the application prior to this schema update -we need to apply the `rollback.sh` script that will revert our local dev database to its previous schema. -Then we will be able to either launch the app at a previous database schema or continue modifying the model and reapply -the migration file creation process (in that case we should delete the now obsolete migration file lastly created). +we need to apply the `rollback.sh` script that will revert our local dev database to its previous schema. +Then we will be able to either launch the app at a previous database schema or continue modifying the model and reapply the migration file creation process (in that case, we should delete the now obsolete migration file lastly created). -/!\ Note that when deploying in production a new version with multiple database migration file, the revision id in `rollback.sh` file -should be the last revision id of the deployed application schema. - \ No newline at end of file +⚠️ Note that when deploying a new version in production with multiple database migration files, the revision ID in the `rollback.sh` file should be the last revision ID of the deployed application schema. diff --git a/scripts/create_db_migration.sh b/scripts/create_db_migration.sh index e3e8d243ec..7f68cb2e0b 100755 --- a/scripts/create_db_migration.sh +++ b/scripts/create_db_migration.sh @@ -1,10 +1,18 @@ #!/bin/bash +# This script creates a new database migration. +# +# usage: +# +# export ANTAREST_CONF=/path/to/application.yaml +# bash ./scripts/create_db_migration.sh -CURDIR=$(cd `dirname $0` && pwd) -BASEDIR=`dirname $CURDIR` +set -e +CUR_DIR=$(cd "$(dirname "$0")" && pwd) +BASE_DIR=$(dirname "$CUR_DIR") -pushd $BASEDIR + +pushd "$BASE_DIR" if [ -n "$1" ] ; then alembic revision --autogenerate -m "$1" @@ -13,6 +21,6 @@ else fi CURRENT_VERSION=$(alembic current) -sed -i "s/alembic downgrade .*/alembic downgrade $CURRENT_VERSION/g" $CURDIR/rollback.sh +sed -i "s/alembic downgrade .*/alembic downgrade $CURRENT_VERSION/g" "$CUR_DIR/rollback.sh" -popd \ No newline at end of file +popd diff --git a/scripts/linter.sh b/scripts/linter.sh index 7e8e257897..a214a6c8d3 100755 --- a/scripts/linter.sh +++ b/scripts/linter.sh @@ -1,7 +1,7 @@ #!/bin/bash -BASEDIR=$(dirname "$0") -PROJECT_DIR=$BASEDIR/.. +BASE_DIR=$(dirname "$0") +PROJECT_DIR=$BASE_DIR/.. cd "$PROJECT_DIR" || exit diff --git a/scripts/pre-start.sh b/scripts/pre-start.sh index e2ff85b128..f96a2435b9 100755 --- a/scripts/pre-start.sh +++ b/scripts/pre-start.sh @@ -2,12 +2,12 @@ set -e -CURDIR=$(cd `dirname $0` && pwd) -BASEDIR=`dirname $CURDIR` +CUR_DIR=$(cd "$(dirname "$0")" && pwd) +BASE_DIR=$(dirname "$CUR_DIR") -cd $BASEDIR +cd "$BASE_DIR" alembic upgrade head cd - -export PYTHONPATH=$BASEDIR -python3 $BASEDIR/antarest/tools/admin.py clean-locks -c $ANTAREST_CONF \ No newline at end of file +export PYTHONPATH=$BASE_DIR +python3 "$BASE_DIR/antarest/tools/admin.py" clean-locks -c "$ANTAREST_CONF" diff --git a/scripts/rollback.sh b/scripts/rollback.sh index 54b7bb9844..8d353593fb 100755 --- a/scripts/rollback.sh +++ b/scripts/rollback.sh @@ -1,8 +1,16 @@ #!/bin/bash +# This script downgrade the database version to the previous state. +# +# usage: +# +# export ANTAREST_CONF=/path/to/application.yaml +# bash ./scripts/rollback.sh -CURDIR=$(cd `dirname $0` && pwd) -BASEDIR=`dirname $CURDIR` +set -e -cd $BASEDIR -alembic downgrade 26c50ef2a0e1 +CUR_DIR=$(cd "$(dirname "$0")" && pwd) +BASE_DIR=$(dirname "$CUR_DIR") + +cd "$BASE_DIR" +alembic downgrade e65e0c04606b cd - diff --git a/scripts/start-dev.sh b/scripts/start-dev.sh index 120d6f1eda..e9f938a5fc 100755 --- a/scripts/start-dev.sh +++ b/scripts/start-dev.sh @@ -1,13 +1,15 @@ #!/bin/bash -BASEDIR=$(dirname "$0") -PROJECT_DIR=$BASEDIR/.. +set -e -cd "$PROJECT_DIR" || exit +BASE_DIR=$(dirname "$0") +PROJECT_DIR=$BASE_DIR/.. + +cd "$PROJECT_DIR" source ./venv/bin/activate export PYTHONPATH=$PYTHONPATH:. -sh $BASEDIR/pre-start.sh +sh "$BASE_DIR/pre-start.sh" python ./antarest/main.py -c ./resources/application.yaml diff --git a/scripts/start.sh b/scripts/start.sh index 966fa187e0..aedcca2202 100755 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -2,13 +2,13 @@ set -e -CURDIR=$(cd `dirname $0` && pwd) -BASEDIR=`dirname $CURDIR` +CUR_DIR=$(cd "$(dirname "$0")" && pwd) +BASE_DIR=$(dirname "$CUR_DIR") if [ -z "$1" ] ; then - sh $CURDIR/pre-start.sh - gunicorn --config $BASEDIR/conf/gunicorn.py --worker-class=uvicorn.workers.UvicornWorker antarest.wsgi:app + sh $CUR_DIR/pre-start.sh + gunicorn --config $BASE_DIR/conf/gunicorn.py --worker-class=uvicorn.workers.UvicornWorker antarest.wsgi:app else - export PYTHONPATH=$BASEDIR - python3 $BASEDIR/antarest/main.py -c $ANTAREST_CONF --module "$1" + export PYTHONPATH=$BASE_DIR + python3 $BASE_DIR/antarest/main.py -c $ANTAREST_CONF --module "$1" fi \ No newline at end of file diff --git a/tests/conftest_db.py b/tests/conftest_db.py index bcb4177766..f22ab16d9b 100644 --- a/tests/conftest_db.py +++ b/tests/conftest_db.py @@ -21,6 +21,7 @@ def db_engine_fixture() -> Generator[Engine, None, None]: An instance of the created SQLite database engine. """ engine = create_engine("sqlite:///:memory:") + engine.execute("PRAGMA foreign_keys = ON") Base.metadata.create_all(engine) yield engine engine.dispose() diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index dd1d2b3179..b2cec0cae6 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -65,11 +65,16 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non # login with new user # TODO mock ldap connector and test user login res = client.post("/v1/login", json={"username": "George", "password": "mypass"}) + res.raise_for_status() george_credentials = res.json() + res = client.post("/v1/login", json={"username": "Fred", "password": "mypass"}) + res.raise_for_status() fred_credentials = res.json() + fred_id = fred_credentials["user"] + res = client.post("/v1/login", json={"username": "Harry", "password": "mypass"}) - harry_credentials = res.json() + res.raise_for_status() # reject user creation from non admin res = client.post( @@ -343,7 +348,9 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non f"/v1/launcher/jobs?study_id={study_id}", headers={"Authorization": f'Bearer {fred_credentials["access_token"]}'}, ) - assert res.json()[0]["id"] == job_id + job_info = res.json()[0] + assert job_info["id"] == job_id + assert job_info["owner_id"] == fred_id # update metadata res = client.put( diff --git a/tests/launcher/test_model.py b/tests/launcher/test_model.py new file mode 100644 index 0000000000..3f05dcc314 --- /dev/null +++ b/tests/launcher/test_model.py @@ -0,0 +1,207 @@ +import re +import uuid + +from sqlalchemy.orm.session import Session # type: ignore + +from antarest.launcher.model import JobLog, JobLogType, JobResult, JobStatus, LogType +from antarest.login.model import Identity + + +class TestLogType: + def test_from_filename(self) -> None: + """ + Test the `from_filename` method of `LogType`. + """ + assert LogType.from_filename("antares-err.log") == LogType.STDERR + assert LogType.from_filename("antares-out.log") == LogType.STDOUT + assert LogType.from_filename("antares-xxx.log") is None + + def test_to_suffix(self) -> None: + """ + Test the `to_suffix` method of `LogType`. + """ + assert LogType.STDERR.to_suffix() == "err.log" + assert LogType.STDOUT.to_suffix() == "out.log" + + +class TestJobResult: + def test_create(self, db_session: Session) -> None: + """ + Test the creation of a `JobResult` instance in the database. + """ + job_result_id = str(uuid.uuid4()) + study_id = str(uuid.uuid4()) + with db_session as db: + job_result = JobResult( + id=job_result_id, + study_id=study_id, + job_status=JobStatus.RUNNING, + msg="Running", + exit_code=0, + ) + db.add(job_result) + db.commit() + + with db_session as db: + jr = db.query(JobResult).one() + assert jr.id == job_result_id + assert jr.study_id == study_id + assert jr.launcher is None + assert jr.launcher_params is None + assert jr.job_status is JobStatus.RUNNING + assert jr.creation_date is not None + assert jr.completion_date is None + assert jr.msg == "Running" + assert jr.output_id is None + assert jr.exit_code == 0 + assert jr.solver_stats is None + assert jr.logs == [] + assert jr.owner_id is None + assert re.match(rf"Job\s*result\s+#{jr.id}", str(jr), flags=re.I) + assert re.fullmatch(rf"", repr(jr), flags=re.I) + + def test_create_with_owner(self, db_session: Session) -> None: + """ + Test the creation of a `JobResult` instance associated with an owner in the database. + """ + with db_session as db: + identity = Identity() + db.add(identity) + db.commit() + owner_id = identity.id + + job_result = JobResult(id=str(uuid.uuid4()), owner_id=owner_id) + db.add(job_result) + db.commit() + job_result_id = job_result.id + + with db_session as db: + jr = db.get(JobResult, job_result_id) + assert jr.owner_id == owner_id + + def test_update_with_owner(self, db_session: Session) -> None: + """ + Test the update of a `JobResult` instance with an owner in the database. + """ + with db_session as db: + # Create a job result without owner + job_result = JobResult(id=str(uuid.uuid4())) + db.add(job_result) + db.commit() + job_result_id = job_result.id + + with db_session as db: + # Create an owner identity + identity = Identity() + db.add(identity) + db.commit() + owner_id = identity.id + + with db_session as db: + # Update the job result with the owner + job_result = db.get(JobResult, job_result_id) + job_result.owner_id = owner_id + db.commit() + + with db_session as db: + jr = db.get(JobResult, job_result_id) + assert jr.owner_id == owner_id + + def test_delete_with_owner(self, db_session: Session) -> None: + """ + Test the deletion of an owner and check if the associated `JobResult`'s `owner_id` is set to None. + """ + with db_session as db: + identity = Identity() + db.add(identity) + db.commit() + owner_id = identity.id + + job_result = JobResult(id=str(uuid.uuid4()), owner_id=owner_id) + db.add(job_result) + db.commit() + job_result_id = job_result.id + + with db_session as db: + identity = db.get(Identity, owner_id) + db.delete(identity) + db.commit() + + with db_session as db: + # check `ondelete="SET NULL"` + jr = db.get(JobResult, job_result_id) + assert jr.owner_id is None + + +class TestJobLog: + def test_create(self, db_session: Session) -> None: + """ + Test the creation of a `JobResult` instance in the database. + """ + job_result_id = str(uuid.uuid4()) + study_id = str(uuid.uuid4()) + with db_session as db: + job_result = JobResult( + id=job_result_id, + study_id=study_id, + job_status=JobStatus.RUNNING, + msg="Running", + exit_code=0, + ) + db.add(job_result) + db.commit() + job_result_id = job_result.id + + job_log = JobLog( + message="Log message", + job_id=job_result_id, + log_type=JobLogType.BEFORE, + ) + db.add(job_log) + db.commit() + + with db_session as db: + jl = db.query(JobLog).one() + assert jl.id == 1 + assert jl.message == "Log message" + assert jl.job_id == job_result_id + assert jl.log_type == JobLogType.BEFORE + assert re.match(rf"Job\s*log\s+#{jl.id}", str(jl), flags=re.I) + assert re.fullmatch(rf"", repr(jl), flags=re.I) + + def test_delete_job_result(self, db_session: Session) -> None: + """ + Test the creation of a `JobResult` instance in the database. + """ + job_result_id = str(uuid.uuid4()) + study_id = str(uuid.uuid4()) + with db_session as db: + job_result = JobResult( + id=job_result_id, + study_id=study_id, + job_status=JobStatus.RUNNING, + msg="Running", + exit_code=0, + ) + db.add(job_result) + db.commit() + job_result_id = job_result.id + + job_log = JobLog( + message="Log message", + job_id=job_result_id, + log_type=JobLogType.BEFORE, + ) + db.add(job_log) + db.commit() + job_log_id = job_log.id + + with db_session as db: + jr = db.get(JobResult, job_result_id) + db.delete(jr) + db.commit() + + with db_session as db: + # check `cascade="all, delete, delete-orphan"` + jl = db.get(JobLog, job_log_id) + assert jl is None diff --git a/tests/launcher/test_repository.py b/tests/launcher/test_repository.py index 570a2e073c..1bee383473 100644 --- a/tests/launcher/test_repository.py +++ b/tests/launcher/test_repository.py @@ -3,12 +3,11 @@ from uuid import uuid4 import pytest -from sqlalchemy import create_engine -from antarest.core.persistence import Base -from antarest.core.utils.fastapi_sqlalchemy import DBSessionMiddleware, db +from antarest.core.utils.fastapi_sqlalchemy import db from antarest.launcher.model import JobLog, JobLogType, JobResult, JobStatus from antarest.launcher.repository import JobResultRepository +from antarest.login.model import Identity from antarest.study.model import RawStudy from antarest.study.repository import StudyMetadataRepository from tests.helpers import with_db_context @@ -93,67 +92,59 @@ def test_job_result() -> None: @pytest.mark.unit_test +@with_db_context def test_update_object(): - engine = create_engine("sqlite:///:memory:", echo=False) - Base.metadata.create_all(engine) - # noinspection SpellCheckingInspection - DBSessionMiddleware( - None, - custom_engine=engine, - session_args={"autocommit": False, "autoflush": False}, + identity = Identity(id=1, name="test") + db.session.add(identity) + db.session.commit() + owner_id = identity.id + + a = JobResult( + id=str(uuid4()), + study_id="a", + job_status=JobStatus.SUCCESS, + msg="Hello, World!", + exit_code=0, + owner_id=owner_id, + ) + b = JobResult( + id=str(uuid4()), + study_id="b", + job_status=JobStatus.FAILED, + msg="You failed !!", + exit_code=1, + owner_id=owner_id, ) - with db(): - repo = JobResultRepository() - uuid = str(uuid4()) - a = JobResult( - id=uuid, - study_id="a", - job_status=JobStatus.SUCCESS, - msg="Hello, World!", - exit_code=0, - ) - b = JobResult( - id=uuid, - study_id="b", - job_status=JobStatus.FAILED, - msg="You failed !!", - exit_code=1, - ) - - c = repo.save(a) - d = repo.save(b) - assert c != d + repo = JobResultRepository() + c = repo.save(a) + d = repo.save(b) + assert c != d +@pytest.mark.unit_test +@with_db_context def test_logs(): - engine = create_engine("sqlite:///:memory:", echo=False) - Base.metadata.create_all(engine) - # noinspection SpellCheckingInspection - DBSessionMiddleware( - None, - custom_engine=engine, - session_args={"autocommit": False, "autoflush": False}, + repo = JobResultRepository() + uuid = str(uuid4()) + a = JobResult( + id=uuid, + study_id="a", + job_status=JobStatus.SUCCESS, + msg="Hello, World!", + exit_code=0, ) - with db(): - repo = JobResultRepository() - uuid = str(uuid4()) - a = JobResult( - id=uuid, - study_id="a", - job_status=JobStatus.SUCCESS, - msg="Hello, World!", - exit_code=0, - ) - - repo.save(a) - a.logs.append(JobLog(job_id=uuid, message="a", log_type=str(JobLogType.BEFORE))) - repo.save(a) - job_log_id = a.logs[0].id - a.logs.append(JobLog(job_id=uuid, message="b", log_type=str(JobLogType.BEFORE))) - a.logs.append(JobLog(job_id=uuid, message="c", log_type=str(JobLogType.AFTER))) - b = repo.save(a) - c = repo.get(uuid) - assert b.logs == c.logs - assert repr(b.logs[0]) == f"id={job_log_id}, message=a, log_type=JobLogType.BEFORE, job_id={uuid}" + repo.save(a) + a.logs.append(JobLog(job_id=uuid, message="a", log_type=JobLogType.BEFORE)) + repo.save(a) + job_log_id = a.logs[0].id + a.logs.append(JobLog(job_id=uuid, message="b", log_type=JobLogType.BEFORE)) + a.logs.append(JobLog(job_id=uuid, message="c", log_type=JobLogType.AFTER)) + b = repo.save(a) + c = repo.get(uuid) + assert b.logs == c.logs + assert b.logs[0].id == job_log_id + assert b.logs[0].message == "a" + assert b.logs[0].log_type == JobLogType.BEFORE + assert b.logs[0].job_id == uuid diff --git a/tests/launcher/test_service.py b/tests/launcher/test_service.py index a6177c5e61..5f0f1bd9b7 100644 --- a/tests/launcher/test_service.py +++ b/tests/launcher/test_service.py @@ -94,6 +94,7 @@ def test_service_run_study(self, get_current_user_mock) -> None: ) launcher_service._generate_new_id = lambda: str(uuid) + storage_service_mock.get_user_name.return_value = "fake_user" job_id = launcher_service.run_study( "study_uuid", "local", @@ -108,7 +109,17 @@ def test_service_run_study(self, get_current_user_mock) -> None: ) assert job_id == str(uuid) - repository.save.assert_called_once_with(pending) + + repository.save.assert_called_once() + + # SQLAlchemy provides its own way to handle object comparison, which ensures + # that the comparison is based on the database identity of the objects. + # But, here, in that unit test, objects are not in a database session, + # so we need to compare them manually. + mock_call = repository.save.mock_calls[0] + actual_obj: JobResult = mock_call.args[0] + assert actual_obj.to_dto().dict() == pending.to_dto().dict() + event_bus.push.assert_called_once_with( Event( type=EventType.STUDY_JOB_STARTED, @@ -127,6 +138,7 @@ def test_service_get_result_from_launcher(self) -> None: msg="Hello, World!", exit_code=0, launcher="local", + owner_id=1, ) factory_launcher_mock = Mock() factory_launcher_mock.build_launcher.return_value = {"local": launcher_mock} @@ -163,6 +175,7 @@ def test_service_get_result_from_database(self) -> None: job_status=JobStatus.SUCCESS, msg="Hello, World!", exit_code=0, + owner_id=1, ) launcher_mock.get_result.return_value = None factory_launcher_mock = Mock() @@ -201,6 +214,7 @@ def test_service_get_jobs_from_database(self) -> None: job_status=JobStatus.SUCCESS, msg="Hello, World!", exit_code=0, + owner_id=1, ) ] returned_faked_execution_results = [ @@ -211,6 +225,7 @@ def test_service_get_jobs_from_database(self) -> None: msg="Hello, World!", exit_code=0, creation_date=now, + owner_id=1, ), JobResult( id="2", @@ -219,6 +234,7 @@ def test_service_get_jobs_from_database(self) -> None: msg="Hello, World!", exit_code=0, creation_date=now, + owner_id=1, ), ] all_faked_execution_results = returned_faked_execution_results + [ @@ -229,6 +245,7 @@ def test_service_get_jobs_from_database(self) -> None: msg="Hello, World!", exit_code=0, creation_date=now - timedelta(days=ORPHAN_JOBS_VISIBILITY_THRESHOLD + 1), + owner_id=1, ) ] launcher_mock.get_result.return_value = None @@ -565,6 +582,7 @@ def test_service_kill_job(self, tmp_path: Path) -> None: job_result_mock.id = job_id job_result_mock.study_id = "study_id" job_result_mock.launcher = launcher + job_result_mock.owner_id = 36 launcher_service.job_result_repository.get.return_value = job_result_mock launcher_service.launchers = {"slurm": Mock()} @@ -822,50 +840,67 @@ def test_save_solver_stats(self, tmp_path: Path) -> None: job_id = "job_id" study_id = "study_id" - job_result = JobResult(id=job_id, study_id=study_id, job_status=JobStatus.SUCCESS) + job_result = JobResult( + id=job_id, + study_id=study_id, + job_status=JobStatus.SUCCESS, + owner_id=1, + ) output_path = tmp_path / "some-output" output_path.mkdir() launcher_service._save_solver_stats(job_result, output_path) - launcher_service.job_result_repository.save.assert_not_called() + repository = launcher_service.job_result_repository + repository.save.assert_not_called() expected_saved_stats = """#item duration_ms NbOccurences - mc_years 216328 1 - study_loading 4304 1 - survey_report 158 1 - total 244581 1 - tsgen_hydro 1683 1 - tsgen_load 2702 1 - tsgen_solar 21606 1 - tsgen_thermal 407 2 - tsgen_wind 2500 1 - """ + mc_years 216328 1 + study_loading 4304 1 + survey_report 158 1 + total 244581 1 + tsgen_hydro 1683 1 + tsgen_load 2702 1 + tsgen_solar 21606 1 + tsgen_thermal 407 2 + tsgen_wind 2500 1 + """ (output_path / EXECUTION_INFO_FILE).write_text(expected_saved_stats) launcher_service._save_solver_stats(job_result, output_path) - launcher_service.job_result_repository.save.assert_called_with( - JobResult( - id=job_id, - study_id=study_id, - job_status=JobStatus.SUCCESS, - solver_stats=expected_saved_stats, - ) + assert repository.save.call_count == 1 + + # SQLAlchemy provides its own way to handle object comparison, which ensures + # that the comparison is based on the database identity of the objects. + # But, here, in that unit test, objects are not in a database session, + # so we need to compare them manually. + mock_call = repository.save.mock_calls[0] + actual_obj: JobResult = mock_call.args[0] + expected_obj = JobResult( + id=job_id, + study_id=study_id, + job_status=JobStatus.SUCCESS, + solver_stats=expected_saved_stats, + owner_id=1, ) + assert actual_obj.to_dto().dict() == expected_obj.to_dto().dict() zip_file = tmp_path / "test.zip" with ZipFile(zip_file, "w", ZIP_DEFLATED) as output_data: output_data.writestr(EXECUTION_INFO_FILE, "0\n1") launcher_service._save_solver_stats(job_result, zip_file) - launcher_service.job_result_repository.save.assert_called_with( - JobResult( - id=job_id, - study_id=study_id, - job_status=JobStatus.SUCCESS, - solver_stats="0\n1", - ) + assert repository.save.call_count == 2 + mock_call = repository.save.mock_calls[-1] + actual_obj: JobResult = mock_call.args[0] + expected_obj = JobResult( + id=job_id, + study_id=study_id, + job_status=JobStatus.SUCCESS, + solver_stats="0\n1", + owner_id=1, ) + assert actual_obj.to_dto().dict() == expected_obj.to_dto().dict() def test_get_load(self, tmp_path: Path) -> None: study_service = Mock() diff --git a/tests/launcher/test_web.py b/tests/launcher/test_web.py index 1a40f2aa7a..99799abbde 100644 --- a/tests/launcher/test_web.py +++ b/tests/launcher/test_web.py @@ -63,6 +63,7 @@ def test_result() -> None: job_status=JobStatus.SUCCESS, msg="hello world", exit_code=0, + owner_id=1, ) service = Mock() @@ -87,6 +88,7 @@ def test_jobs() -> None: job_status=JobStatus.SUCCESS, msg="hello world", exit_code=0, + owner_id=1, ) service = Mock()