From 43cbd2364ba9d75cd8ca18d77d94b57e8389bc50 Mon Sep 17 00:00:00 2001 From: Paul Bui-Quang Date: Tue, 20 Jul 2021 13:50:56 +0200 Subject: [PATCH] Add alembic db management (#421) --- .github/workflows/deploy.yml | 2 +- Dockerfile | 5 +- README.md | 65 +++---- alembic.ini | 89 ++++++++++ alembic/README | 1 + alembic/env.py | 91 ++++++++++ alembic/script.py.mako | 24 +++ alembic/versions/47ab888dc44d_add_dataset.py | 94 +++++++++++ alembic/versions/6a04e38b8704_base.py | 159 ++++++++++++++++++ antarest/core/config.py | 2 + antarest/core/persistence.py | 30 ---- antarest/core/utils/utils.py | 30 +++- antarest/launcher/model.py | 3 +- antarest/main.py | 31 ++-- .../rawstudy/model/filesystem/config/model.py | 2 +- pyproject.toml | 2 +- requirements.txt | 4 +- scripts/create_db_migration.sh | 18 ++ scripts/it_examples.py | 125 -------------- scripts/pre-start.sh | 8 + scripts/rollback.sh | 8 + scripts/server-docker.sh | 17 -- scripts/{server.sh => start-dev.sh} | 3 + scripts/start.sh | 10 ++ tests/conftest.py | 3 +- tests/integration/conftest.py | 16 ++ 26 files changed, 620 insertions(+), 222 deletions(-) create mode 100644 alembic.ini create mode 100644 alembic/README create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 alembic/versions/47ab888dc44d_add_dataset.py create mode 100644 alembic/versions/6a04e38b8704_base.py create mode 100755 scripts/create_db_migration.sh delete mode 100644 scripts/it_examples.py create mode 100755 scripts/pre-start.sh create mode 100755 scripts/rollback.sh delete mode 100755 scripts/server-docker.sh rename scripts/{server.sh => start-dev.sh} (88%) create mode 100755 scripts/start.sh diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index d161cfcb1e..0f45545649 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -38,7 +38,7 @@ jobs: run: | python -m pip install --upgrade pip pip install pydantic --no-binary pydantic - pip install -r requirements-dev.txt + pip install -r requirements.txt - name: Generate binary Unix if: matrix.os != 'windows-latest' run: | diff --git a/Dockerfile b/Dockerfile index 09191c73c5..5a02fccf9d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,6 +10,9 @@ RUN mkdir -p examples/studies COPY ./requirements.txt /conf/ COPY ./antarest /antarest COPY ./resources /resources +COPY ./scripts /scripts +COPY ./alembic /alembic +COPY ./alembic.ini /alembic.ini COPY ./antares-launcher /antares-launcher RUN ln -s /antares-launcher/antareslauncher /antareslauncher @@ -19,4 +22,4 @@ RUN cp /antares-launcher/requirements.txt /conf/antares-launcher/requirements.tx RUN pip3 install --upgrade pip \ && pip3 install -r /conf/requirements.txt -ENTRYPOINT gunicorn --config /conf/gunicorn.py --worker-class=uvicorn.workers.UvicornWorker antarest.wsgi:app +ENTRYPOINT ./scripts/start.sh diff --git a/README.md b/README.md index 11ffe1bdc2..3d6df51e0c 100644 --- a/README.md +++ b/README.md @@ -11,22 +11,44 @@ First clone the projet: ```shell script -git clone https://github.com/AntaresSimulatorTeam/api-iso-antares.git -cd api-iso-antares +git clone https://github.com/AntaresSimulatorTeam/AntaREST.git +cd AntaREST +git submodule init +git submodule update ``` +Install back dependencies + +```shell script +python -m pip install --upgrade pip +pip install pydantic --no-binary pydantic +pip install -r requirements.txt # use requirements-dev.txt if building a single binary with pyinstaller +``` + +Build front + +```shell script +cd webapp +npm install +cd .. +NODE_OPTIONS="--max-old-space-size=8192" ./scripts/build-front.sh +``` + + ### Using pyinstaller Linux system: ```shell script -pyinstaller -F api_iso_antares/main.py -n server --add-data resources:resources +git log -1 HEAD --format=%H > ./resources/commit_id +pyinstaller -F antarest/main.py -n server --additional-hooks-dir extra-hooks --add-data resources:resources ``` Windows system: ```shell script -pyinstaller -F api_iso_antares\main.py -n server --add-data ".\resources;.\resources" +git log -1 HEAD --format=%H > .\resources\commit_id +pyinstaller -F api_iso_antares\main.py -n server --additional-hooks-dir extra-hooks --add-data ".\resources;.\resources" ``` You can test the build is ok using: @@ -41,7 +63,7 @@ dist\server.exe -v # Windows system To build the docker image, use the following command: ```shell script -docker build --tag api-iso-antares -f docker/Dockerfile . +docker build --tag antarest -f docker/Dockerfile . ``` ## Start the API @@ -62,7 +84,7 @@ docker run \ -p 80:5000 \ -e GUNICORN_WORKERS=4 \ -v $STUDIES_ABSOLUTE_PATH:/studies \ - api-iso-antares + antarest ``` * Setting the environment variable GUNICORN_WORKERS to *ALL_AVAILABLE* will make GUNICORN use 2 * nb_cpu +1 workers @@ -115,13 +137,13 @@ The address (the port mostly) depends of the way you started the server. If you To test the server, you can list the available studies in your workspace using: ```shell script -curl http://0.0.0.0:8080/studies +curl http://localhost:8080/v1/studies ``` Or data of a specific study using: ```shell script -curl http://0.0.0.0:8080/studies/{study_uuid} +curl http://localhost:8080/v1/studies/{study_uuid} ``` The current API handle hundreds of html end point (get and post) to manipulate your studies. @@ -129,28 +151,9 @@ The best way to discover the API is using it's swagger documentation (see below) ## Swagger -The ANTARES API do not have a public UI swagger available for the moment. -Use the following command to save the swagger metadata of the API ANTARES into a json file. +The ANTARES API doc is available within the application (open your browser to `http://localhost:8080`) +You can also fetch the raw open api spec : ```shell script -curl http://0.0.0.0:8080/swagger > swagger.json -``` - -Then, use the script *script/swagger-ui.sh* to start a Swagger UI. - -```shell script -chmod a+x ./script/swagger-ui.sh -./script/swagger-ui.sh -``` - -Do not forget to start the API ANTARES alongside (and to modify the port you decide to use into the Swagger UI). - - -## Strategies for JssonSchema Engines - -| Strategy | Description | -|----------|---------------------------------------| -| S1 | Mix folder with complete set of zones | -| S... | | -| Sn | | - +curl http://localhost:8080/openapi.json > swagger.json +``` \ No newline at end of file diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000000..d17e355e31 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,89 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks=black +# black.type=console_scripts +# black.entrypoint=black +# black.options=-l 79 + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000000..98e4f9c44e --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000000..7a8a468c26 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,91 @@ +from logging.config import fileConfig +import os +from pathlib import Path + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context +from antarest.core.config import Config +from antarest import main + + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +# uncomment this to have +#fileConfig(config.config_file_name) + +config_path = os.getenv('ANTAREST_CONF') or main.get_default_config_path() +if config_path and Path(config_path).exists(): + antarest_conf = Config.from_yaml_file(config_path) + config.set_main_option("sqlalchemy.url", antarest_conf.db_admin_url or antarest_conf.db_url) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata + + +target_metadata = main.Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + render_as_batch=True, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata, render_as_batch=True, + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000000..2c0156303a --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/47ab888dc44d_add_dataset.py b/alembic/versions/47ab888dc44d_add_dataset.py new file mode 100644 index 0000000000..19261fc63b --- /dev/null +++ b/alembic/versions/47ab888dc44d_add_dataset.py @@ -0,0 +1,94 @@ +"""add_dataset + +Revision ID: 47ab888dc44d +Revises: 6a04e38b8704 +Create Date: 2021-07-20 13:41:42.420567 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '47ab888dc44d' +down_revision = '6a04e38b8704' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('dataset', + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.Column('owner_id', sa.Integer(), nullable=True), + sa.Column('public', sa.Boolean(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['owner_id'], ['identities.id'], name='fk_matrixdataset_identities_id'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('id'), + sa.UniqueConstraint('id') + ) + op.create_table('dataset_matrices', + sa.Column('dataset_id', sa.String(), nullable=False), + sa.Column('matrix_id', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['dataset_id'], ['dataset.id'], name='fk_matrixdatasetrelation_dataset_id'), + sa.ForeignKeyConstraint(['matrix_id'], ['matrix.id'], name='fk_matrixdatasetrelation_matrix_id'), + sa.PrimaryKeyConstraint('dataset_id', 'matrix_id', 'name') + ) + op.create_table('matrix_dataset_group', + sa.Column('dataset_id', sa.String(length=64), nullable=False), + sa.Column('group_id', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['dataset_id'], ['dataset.id'], ), + sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ), + sa.PrimaryKeyConstraint('dataset_id', 'group_id') + ) + op.drop_table('matrix_user_metadata') + op.drop_table('matrix_group') + op.drop_table('matrix_metadata') + with op.batch_alter_table('matrix', schema=None) as batch_op: + batch_op.add_column(sa.Column('width', sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column('height', sa.Integer(), nullable=True)) + batch_op.drop_column('freq') + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('matrix', schema=None) as batch_op: + batch_op.add_column(sa.Column('freq', sa.VARCHAR(length=7), nullable=True)) + batch_op.drop_column('height') + batch_op.drop_column('width') + + op.create_table('matrix_metadata', + sa.Column('matrix_id', sa.VARCHAR(length=64), nullable=False), + sa.Column('owner_id', sa.INTEGER(), nullable=False), + sa.Column('key', sa.VARCHAR(), nullable=False), + sa.Column('value', sa.VARCHAR(), nullable=True), + sa.ForeignKeyConstraint(['matrix_id'], ['matrix.id'], ), + sa.ForeignKeyConstraint(['owner_id'], ['identities.id'], ), + sa.PrimaryKeyConstraint('matrix_id', 'owner_id', 'key') + ) + op.create_table('matrix_group', + sa.Column('matrix_id', sa.VARCHAR(length=64), nullable=False), + sa.Column('owner_id', sa.INTEGER(), nullable=False), + sa.Column('group_id', sa.VARCHAR(length=36), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ), + sa.ForeignKeyConstraint(['matrix_id'], ['matrix.id'], ), + sa.ForeignKeyConstraint(['owner_id'], ['identities.id'], ), + sa.PrimaryKeyConstraint('matrix_id', 'owner_id', 'group_id') + ) + op.create_table('matrix_user_metadata', + sa.Column('matrix_id', sa.VARCHAR(length=64), nullable=False), + sa.Column('owner_id', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['matrix_id'], ['matrix.id'], name='fk_matrix_user_metadata_matrix_id'), + sa.ForeignKeyConstraint(['owner_id'], ['identities.id'], name='fk_matrix_user_metadata_identities_id'), + sa.PrimaryKeyConstraint('matrix_id', 'owner_id') + ) + op.drop_table('matrix_dataset_group') + op.drop_table('dataset_matrices') + op.drop_table('dataset') + # ### end Alembic commands ### diff --git a/alembic/versions/6a04e38b8704_base.py b/alembic/versions/6a04e38b8704_base.py new file mode 100644 index 0000000000..cc3e3d3421 --- /dev/null +++ b/alembic/versions/6a04e38b8704_base.py @@ -0,0 +1,159 @@ +"""base + +Revision ID: 6a04e38b8704 +Revises: +Create Date: 2021-07-13 15:42:32.381300 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '6a04e38b8704' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('groups', + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('name', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('id'), + sa.UniqueConstraint('id') + ) + op.create_table('identities', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('type', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('job_result', + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('study_id', sa.String(length=36), nullable=True), + sa.Column('launcher', sa.String(), nullable=True), + sa.Column('job_status', sa.Enum('PENDING', 'FAILED', 'SUCCESS', 'RUNNING', name='jobstatus'), nullable=True), + sa.Column('creation_date', sa.DateTime(), nullable=True), + sa.Column('completion_date', sa.DateTime(), nullable=True), + sa.Column('msg', sa.String(), nullable=True), + sa.Column('output_id', sa.String(), nullable=True), + sa.Column('exit_code', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('matrix', + sa.Column('id', sa.String(length=64), nullable=False), + sa.Column('freq', sa.Enum('HOURLY', 'DAILY', 'WEEKLY', 'MONTHLY', 'ANNUAL', name='matrixfreq'), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('matrix_group', + sa.Column('matrix_id', sa.String(length=64), nullable=False), + sa.Column('owner_id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ), + sa.ForeignKeyConstraint(['matrix_id'], ['matrix.id'], ), + sa.ForeignKeyConstraint(['owner_id'], ['identities.id'], ), + sa.PrimaryKeyConstraint('matrix_id', 'owner_id', 'group_id') + ) + op.create_table('matrix_metadata', + sa.Column('matrix_id', sa.String(length=64), nullable=False), + sa.Column('owner_id', sa.Integer(), nullable=False), + sa.Column('key', sa.String(), nullable=False), + sa.Column('value', sa.String(), nullable=True), + sa.ForeignKeyConstraint(['matrix_id'], ['matrix.id'], ), + sa.ForeignKeyConstraint(['owner_id'], ['identities.id'], ), + sa.PrimaryKeyConstraint('matrix_id', 'owner_id', 'key') + ) + op.create_table('matrix_user_metadata', + sa.Column('matrix_id', sa.String(length=64), nullable=False), + sa.Column('owner_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['matrix_id'], ['matrix.id'], name='fk_matrix_user_metadata_matrix_id'), + sa.ForeignKeyConstraint(['owner_id'], ['identities.id'], name='fk_matrix_user_metadata_identities_id'), + sa.PrimaryKeyConstraint('matrix_id', 'owner_id') + ) + op.create_table('roles', + sa.Column('type', sa.Enum('ADMIN', 'RUNNER', 'WRITER', 'READER', name='roletype'), nullable=True), + sa.Column('identity_id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ), + sa.ForeignKeyConstraint(['identity_id'], ['identities.id'], ), + sa.PrimaryKeyConstraint('identity_id', 'group_id') + ) + op.create_table('study', + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('type', sa.String(length=50), nullable=True), + sa.Column('version', sa.String(length=255), nullable=True), + sa.Column('author', sa.String(length=255), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.Column('public_mode', sa.Enum('NONE', 'READ', 'EXECUTE', 'EDIT', 'FULL', name='publicmode'), nullable=True), + sa.Column('owner_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['owner_id'], ['identities.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('id'), + sa.UniqueConstraint('id') + ) + op.create_table('users', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('_pwd', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['id'], ['identities.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('users_ldap', + sa.Column('id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['id'], ['identities.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('bots', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('owner', sa.Integer(), nullable=True), + sa.Column('is_author', sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(['id'], ['identities.id'], ), + sa.ForeignKeyConstraint(['owner'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('group_metadata', + sa.Column('group_id', sa.String(length=36), nullable=True), + sa.Column('study_id', sa.String(length=36), nullable=True), + sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ), + sa.ForeignKeyConstraint(['study_id'], ['study.id'], ) + ) + op.create_table('rawstudy', + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('content_status', sa.Enum('VALID', 'WARNING', 'ERROR', name='studycontentstatus'), nullable=True), + sa.Column('workspace', sa.String(length=255), nullable=True), + sa.Column('path', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['id'], ['study.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('rawstudy') + op.drop_table('group_metadata') + op.drop_table('bots') + op.drop_table('users_ldap') + op.drop_table('users') + op.drop_table('study') + op.drop_table('roles') + op.drop_table('matrix_user_metadata') + op.drop_table('matrix_metadata') + op.drop_table('matrix_group') + op.drop_table('matrix') + op.drop_table('job_result') + op.drop_table('identities') + op.drop_table('groups') + # ### end Alembic commands ### + + op.execute("DROP TYPE jobstatus;") + op.execute("DROP TYPE matrixfreq;") + op.execute("DROP TYPE publicmode;") + op.execute("DROP TYPE roletype;") + op.execute("DROP TYPE studycontentstatus;") + diff --git a/antarest/core/config.py b/antarest/core/config.py index 9ff30ac147..276aa06042 100644 --- a/antarest/core/config.py +++ b/antarest/core/config.py @@ -246,6 +246,7 @@ class Config: matrixstore: MatrixStoreConfig = MatrixStoreConfig() tmp_dir: Path = Path(tempfile.gettempdir()) db_url: str = "" + db_admin_url: Optional[str] = None logging: LoggingConfig = LoggingConfig() debug: bool = True resources_path: Path = Path() @@ -269,6 +270,7 @@ def from_dict(data: JSON, res: Optional[Path] = None) -> "Config": launcher=LauncherConfig.from_dict(data["launcher"]), matrixstore=MatrixStoreConfig.from_dict(data["matrixstore"]), db_url=data["db"]["url"], + db_admin_url=data["db"].get("admin_url", None), logging=LoggingConfig.from_dict(data["logging"]), debug=data["debug"], resources_path=res or Path(), diff --git a/antarest/core/persistence.py b/antarest/core/persistence.py index fc50c777a4..93b07a8d44 100644 --- a/antarest/core/persistence.py +++ b/antarest/core/persistence.py @@ -1,33 +1,3 @@ -from typing import Any - from sqlalchemy.ext.declarative import declarative_base # type: ignore Base = declarative_base() - - -class DTO: - """ - Implement basic method for DTO objects - """ - - def __hash__(self) -> int: - return hash(tuple(sorted(self.__dict__.items()))) - - def __eq__(self, other: Any) -> bool: - return ( - isinstance(other, type(self)) and self.__dict__ == other.__dict__ - ) - - def __str__(self) -> str: - return "{}({})".format( - type(self).__name__, - ", ".join( - [ - "{}={}".format(k, str(self.__dict__[k])) - for k in sorted(self.__dict__) - ] - ), - ) - - def __repr__(self) -> str: - return self.__str__() diff --git a/antarest/core/utils/utils.py b/antarest/core/utils/utils.py index 2b8c0568df..36d618c07d 100644 --- a/antarest/core/utils/utils.py +++ b/antarest/core/utils/utils.py @@ -1,10 +1,38 @@ from pathlib import Path -from typing import IO +from typing import IO, Any from zipfile import ZipFile, BadZipFile from antarest.core.exceptions import BadZipBinary +class DTO: + """ + Implement basic method for DTO objects + """ + + def __hash__(self) -> int: + return hash(tuple(sorted(self.__dict__.items()))) + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, type(self)) and self.__dict__ == other.__dict__ + ) + + def __str__(self) -> str: + return "{}({})".format( + type(self).__name__, + ", ".join( + [ + "{}={}".format(k, str(self.__dict__[k])) + for k in sorted(self.__dict__) + ] + ), + ) + + def __repr__(self) -> str: + return self.__str__() + + def extract_zip(stream: IO[bytes], dst: Path) -> None: """ Extract zip archive diff --git a/antarest/launcher/model.py b/antarest/launcher/model.py index d1ca3bb275..2cb0aae610 100644 --- a/antarest/launcher/model.py +++ b/antarest/launcher/model.py @@ -5,7 +5,8 @@ from sqlalchemy import Integer, Column, Enum, String, DateTime # type: ignore from antarest.core.custom_types import JSON -from antarest.core.persistence import Base, DTO +from antarest.core.persistence import Base +from antarest.core.utils.utils import DTO class LogType(enum.Enum): diff --git a/antarest/main.py b/antarest/main.py index 2b4ebb63dc..e98ae1fa30 100644 --- a/antarest/main.py +++ b/antarest/main.py @@ -11,19 +11,20 @@ from fastapi import FastAPI, HTTPException from fastapi_jwt_auth import AuthJWT # type: ignore from pydantic.main import BaseModel -from sqlalchemy import create_engine from starlette.middleware.cors import CORSMiddleware from starlette.requests import Request from starlette.responses import JSONResponse from starlette.staticfiles import StaticFiles from starlette.templating import Jinja2Templates -from antarest import __version__ from antarest.core.config import Config from antarest.core.core_blueprint import create_utils_routes from antarest.core.persistence import Base from antarest.core.utils.fastapi_sqlalchemy import DBSessionMiddleware from antarest.core.utils.web import tags_metadata +from sqlalchemy import create_engine + +from antarest import __version__ from antarest.eventbus.main import build_eventbus from antarest.launcher.main import build_launcher from antarest.login.auth import Auth @@ -58,7 +59,7 @@ def parse_arguments() -> argparse.Namespace: return parser.parse_args() -def get_default_config_path() -> Path: +def get_default_config_path() -> Optional[Path]: config = Path("config.yaml") if config.exists(): return config @@ -66,10 +67,16 @@ def get_default_config_path() -> Path: config = Path.home() / ".antares/config.yaml" if config.exists(): return config + return None - raise ValueError( - "Config file not found. Set it by '-c' with command line or place it at ./config.yaml or ~/.antares/config.yaml" - ) + +def get_default_config_path_or_raise() -> Path: + config_path = get_default_config_path() + if not config_path: + raise ValueError( + "Config file not found. Set it by '-c' with command line or place it at ./config.yaml or ~/.antares/config.yaml" + ) + return config_path def get_arguments() -> Tuple[Path, bool, bool]: @@ -79,7 +86,9 @@ def get_arguments() -> Tuple[Path, bool, bool]: if display_version: return Path("."), display_version, arguments.no_front - config_file = Path(arguments.config_file or get_default_config_path()) + config_file = Path( + arguments.config_file or get_default_config_path_or_raise() + ) return config_file, display_version, arguments.no_front @@ -124,18 +133,20 @@ def fastapi_app( ) -> FastAPI: res = resource_path or get_local_path() / "resources" config = Config.from_yaml_file(res=res, file=config_file) - configure_logger(config) logging.getLogger(__name__).info("Initiating application") # Database + connect_args = {} + if config.db_url.startswith("sqlite"): + connect_args["check_same_thread"] = False + engine = create_engine( config.db_url, echo=config.debug, - connect_args={"check_same_thread": False}, + connect_args=connect_args, ) - Base.metadata.create_all(engine) application = FastAPI( title="AntaREST", diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/model.py b/antarest/study/storage/rawstudy/model/filesystem/config/model.py index c72cfeda7a..7ecdba5644 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/model.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/model.py @@ -2,7 +2,7 @@ from typing import Optional, List, Dict from antarest.core.custom_types import JSON -from antarest.core.persistence import DTO +from antarest.core.utils.utils import DTO class ThermalCluster(DTO): diff --git a/pyproject.toml b/pyproject.toml index b9bf8a2282..dafe49cb52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,4 @@ [tool.black] target-version = ["py36"] line-length = 79 -exclude = "antares-?launcher/*" \ No newline at end of file +exclude = "(antares-?launcher/*|alembic/*)" \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 2079761a07..a7202e4a49 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,4 +23,6 @@ locust~=1.5.1 MarkupSafe~=1.1.1 checksumdir~=1.2.0 pydantic~=1.8.2 -gunicorn \ No newline at end of file +gunicorn +alembic +psycopg2-binary diff --git a/scripts/create_db_migration.sh b/scripts/create_db_migration.sh new file mode 100755 index 0000000000..e3e8d243ec --- /dev/null +++ b/scripts/create_db_migration.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +CURDIR=$(cd `dirname $0` && pwd) +BASEDIR=`dirname $CURDIR` + + +pushd $BASEDIR + +if [ -n "$1" ] ; then + alembic revision --autogenerate -m "$1" +else + alembic revision --autogenerate +fi + +CURRENT_VERSION=$(alembic current) +sed -i "s/alembic downgrade .*/alembic downgrade $CURRENT_VERSION/g" $CURDIR/rollback.sh + +popd \ No newline at end of file diff --git a/scripts/it_examples.py b/scripts/it_examples.py deleted file mode 100644 index 8ca985b7e2..0000000000 --- a/scripts/it_examples.py +++ /dev/null @@ -1,125 +0,0 @@ -import hashlib -import json -import shutil -from pathlib import Path -from typing import Optional - -import requests - -OKGREEN = "\033[92m" -FAIL = "\033[91m" -ENDC = "\033[0m" - - -def export(host: str, study: str) -> Optional[bytes]: - """ - Send exportation request. - - Args: - host: server url - study: study id - - Returns: zip file if request success - - """ - res = requests.get(f"{host}/studies/{study}/export?compact") - if res.status_code == 200: - print(f"{OKGREEN}EXPORT SUCCESS{ENDC}", end=" | ") - return res.content - else: - print(f"{FAIL}FAIL to EXPORT{ENDC}") - return None - - -def importation(host: str, study: bytes) -> Optional[str]: - """ - Send importation request. - - Args: - host: url server - study: zip file - - Returns: study id if request request - - """ - headers = {"Content-Length": str(len(study))} - res = requests.post( - f"{host}/studies", headers=headers, files={"study": study} - ) - if res.status_code == 201: - url: str = res.json() - print(f"{OKGREEN}IMPORT SUCCESS{ENDC}", end=" | ") - return url[len("/studies/") :] - else: - print(f"{FAIL}FAIL to EXPORT{ENDC}") - return None - - -def excluded(name: str) -> bool: - # some file as ghost like sets.ini -> ._sets.ini - if name[:2] == "._": - return True - - # reference file not used - if name == "reference": - return True - - return False - - -def compare(origin: Path, copy: Path) -> bool: - """ - Compare file by file to folder. - - Args: - origin: origin folder - copy: copy folder to compare - - Returns: True if all files and folders are same - - """ - if origin.is_file(): - if not copy.is_file(): - print(f"{FAIL}file {origin} not present in copy{ENDC}") - return False - return True - - else: - if not copy.is_dir(): - print(f"{FAIL}file {origin} not present in copy{ENDC}") - return False - return all( - compare(child, copy / child.name) - for child in origin.iterdir() - if not excluded(child.name) - ) - - -def main(path: Path, host: str) -> None: - res = requests.get(f"{host}/studies") - - if res.status_code != 200: - print( - f"{FAIL}Error to fetch studies list return code {res.status_code}{ENDC}" - ) - return - - studies = res.json() - print(f"There are {len(studies)} studies to test") - - for study in studies: - print(f"{study[:3]}", end="\t") - - data = export(host, study) - if data: - uuid = importation(host, data) - if uuid: - res = compare(origin=path / study, copy=path / uuid) - print(f"{OKGREEN}COMPARE SUCCESS{ENDC}") if res else print("") - shutil.rmtree(path / uuid) - - -if __name__ == "__main__": - path = Path("/Volumes/Crucial X8/antares/short-tests") - host = "http://localhost:8080" - main(path=path, host=host) diff --git a/scripts/pre-start.sh b/scripts/pre-start.sh new file mode 100755 index 0000000000..d530e95a4c --- /dev/null +++ b/scripts/pre-start.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +CURDIR=$(cd `dirname $0` && pwd) +BASEDIR=`dirname $CURDIR` + +cd $BASEDIR +alembic upgrade head +cd - \ No newline at end of file diff --git a/scripts/rollback.sh b/scripts/rollback.sh new file mode 100755 index 0000000000..ca1eaacfc0 --- /dev/null +++ b/scripts/rollback.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +CURDIR=$(cd `dirname $0` && pwd) +BASEDIR=`dirname $CURDIR` + +cd $BASEDIR +alembic downgrade 6a04e38b8704 +cd - diff --git a/scripts/server-docker.sh b/scripts/server-docker.sh deleted file mode 100755 index a7e791ec79..0000000000 --- a/scripts/server-docker.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -set -x - -BASEDIR=$(dirname "$0") -PROJECT_DIR=$BASEDIR/.. - -cd "$PROJECT_DIR" || exit - -docker build --tag antarest . - -STUDIES_ABSOLUTE_PATH=$(realpath "$PROJECT_DIR"/examples/studies) - -docker run \ - -p 80:5000 \ - -v "$STUDIES_ABSOLUTE_PATH":/studies \ - antarest \ No newline at end of file diff --git a/scripts/server.sh b/scripts/start-dev.sh similarity index 88% rename from scripts/server.sh rename to scripts/start-dev.sh index c220edc2e9..120d6f1eda 100755 --- a/scripts/server.sh +++ b/scripts/start-dev.sh @@ -7,4 +7,7 @@ cd "$PROJECT_DIR" || exit source ./venv/bin/activate export PYTHONPATH=$PYTHONPATH:. + +sh $BASEDIR/pre-start.sh + python ./antarest/main.py -c ./resources/application.yaml diff --git a/scripts/start.sh b/scripts/start.sh new file mode 100755 index 0000000000..85fc47f466 --- /dev/null +++ b/scripts/start.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -e + +CURDIR=$(cd `dirname $0` && pwd) +BASEDIR=`dirname $CURDIR` + +sh $CURDIR/pre-start.sh + +gunicorn --config $BASEDIR/conf/gunicorn.py --worker-class=uvicorn.workers.UvicornWorker antarest.wsgi:app \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 9aaf25f46f..5762b0b6ca 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,6 @@ import sys -from functools import wraps from pathlib import Path -from typing import Any, Callable +from typing import Any import pytest diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index e58e737b6e..fbfab957c1 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -5,8 +5,13 @@ import jinja2 import pytest +from alembic import command +from alembic.config import Config +from sqlalchemy import create_engine +from antarest import main from antarest.main import fastapi_app +from tests.conftest import project_dir @pytest.fixture @@ -44,6 +49,17 @@ def app(tmp_path: str, sta_mini_zip_path: Path, project_path: Path): ) ) + alembic_cfg = Config() + alembic_cfg.set_main_option( + "script_location", str(project_dir / "alembic") + ) + alembic_cfg.set_main_option("sqlalchemy.url", db_url) + command.upgrade(alembic_cfg, "head") + + engine = create_engine( + db_url, + ) + return fastapi_app( config_path, project_path / "resources", mount_front=False )