From 328f1ec3fd1bec38d8e23231cba825c6644c03b0 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 13 Mar 2023 11:44:46 +0100 Subject: [PATCH 001/142] refactor api rest --- backend/gn_modulator/blueprint.py | 3 + backend/gn_modulator/module/config/base.py | 1 - backend/gn_modulator/module/config/utils.py | 27 -- backend/gn_modulator/routes/__init__.py | 0 backend/gn_modulator/routes/exports.py | 43 ++ backend/gn_modulator/routes/rest.py | 72 ++++ backend/gn_modulator/routes/utils/__init__.py | 0 .../gn_modulator/routes/utils/decorators.py | 46 +++ backend/gn_modulator/routes/utils/params.py | 52 +++ .../gn_modulator/routes/utils/repository.py | 154 +++++++ backend/gn_modulator/schema/__init__.py | 2 - backend/gn_modulator/schema/api.py | 391 ------------------ backend/gn_modulator/schema/base.py | 25 ++ .../schema/models/column_properties.py | 3 +- .../schema/repositories/filters.py | 100 ----- backend/gn_modulator/tests/test_rest_api.py | 25 ++ backend/gn_modulator/tests/utils/rest.py | 119 ++++++ backend/gn_modulator/utils/filters.py | 104 +++++ doc/changelog.md | 4 + frontend/app/services/config.service.ts | 4 +- frontend/app/services/data.service.ts | 2 +- 21 files changed, 652 insertions(+), 525 deletions(-) create mode 100644 backend/gn_modulator/routes/__init__.py create mode 100644 backend/gn_modulator/routes/exports.py create mode 100644 backend/gn_modulator/routes/rest.py create mode 100644 backend/gn_modulator/routes/utils/__init__.py create mode 100644 backend/gn_modulator/routes/utils/decorators.py create mode 100644 backend/gn_modulator/routes/utils/params.py create mode 100644 backend/gn_modulator/routes/utils/repository.py delete mode 100644 backend/gn_modulator/schema/api.py create mode 100644 backend/gn_modulator/tests/test_rest_api.py create mode 100644 backend/gn_modulator/tests/utils/rest.py create mode 100644 backend/gn_modulator/utils/filters.py diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index 4df159ab..ade1e1e6 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -16,6 +16,9 @@ blueprint = Blueprint(MODULE_CODE.lower(), __name__) +from gn_modulator.routes.rest import * # noqa +from gn_modulator.routes.exports import * # noqa + # Creation des commandes pour modules blueprint.cli.short_help = "Commandes pour l' administration du module MODULES" for cmd in commands: diff --git a/backend/gn_modulator/module/config/base.py b/backend/gn_modulator/module/config/base.py index e4dc6eb4..ae8fcafa 100644 --- a/backend/gn_modulator/module/config/base.py +++ b/backend/gn_modulator/module/config/base.py @@ -78,7 +78,6 @@ def init_module_config(cls, module_code): if module_config.get("registred"): cls.process_module_params(module_code) - cls.process_module_api(module_code) return module_config diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index 60cab22d..f5065106 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -242,30 +242,3 @@ def process_module_objects(cls, module_code): # # mise en cache pour pouvoir s'en reservir par ailleurs # set_global_cache(["exports", export_code], export_definition) - - @classmethod - def process_module_api(cls, module_code): - """ - ouvre les routes pour un module - """ - - module_config = cls.module_config(module_code) - - bp = Blueprint(module_code, __name__) - - # pour tous les object d'un module - for object_code, object_definition in module_config["objects"].items(): - # on récupère schema methodes - sm = SchemaMethods(object_definition["schema_code"]) - - # ouverture des routes pour ce schema - # - avec les options:'object_definition' - # en particulier le cruved - sm.register_api(bp, module_code, object_code, copy.deepcopy(object_definition)) - - # les prefiltres définis dans les objects ne servent que dans les ouverture de route ??? - if "prefilters" in object_definition: - del object_definition["prefilters"] - - # enregistrement du blueprint pour ce module - current_app.register_blueprint(bp, url_prefix=f"/{module_code.lower()}") diff --git a/backend/gn_modulator/routes/__init__.py b/backend/gn_modulator/routes/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/gn_modulator/routes/exports.py b/backend/gn_modulator/routes/exports.py new file mode 100644 index 00000000..943490b2 --- /dev/null +++ b/backend/gn_modulator/routes/exports.py @@ -0,0 +1,43 @@ +from gn_modulator import DefinitionMethods, ModuleMethods, SchemaMethods +from gn_modulator.blueprint import blueprint +from .utils.decorators import check_rest_route +from .utils.params import parse_request_args + + +@blueprint.route("/exports///", methods=["GET"]) +@check_rest_route("E") +def api_export(module_code, object_code, export_code): + """ + Route pour les exports + """ + + # récupération de la configuration de l'export + export_definition = DefinitionMethods.get_definition("export", export_code) + + object_definition = ModuleMethods.object_config(module_code, object_code) + schema_code = ModuleMethods.schema_code(module_code, object_code) + # renvoie une erreur si l'export n'est pas trouvé + if export_definition is None: + return "L'export correspondant au code {export_code} n'existe pas", 403 + + # definitions des paramètres + + # - query params + object_definition + params = parse_request_args(object_definition) + + # - export_definition + # - on force fields a être + # - TODO faire l'intersection de params['fields'] et export_definition['fields'] (si params['fields'] est défini) + params["fields"] = export_definition["fields"] + # - TODO autres paramètres ???? + + cruved_type = params.get("cruved_type") or "R" + + # recupération de la liste + sm = SchemaMethods(schema_code) + + query_list = sm.query_list(module_code=module_code, cruved_type=cruved_type, params=params) + + # on assume qu'il n'y que des export csv + # TODO ajouter query param export_type (csv, shape, geosjon, etc) et traiter les différents cas + return sm.process_export_csv(module_code, query_list, params) diff --git a/backend/gn_modulator/routes/rest.py b/backend/gn_modulator/routes/rest.py new file mode 100644 index 00000000..07c6542b --- /dev/null +++ b/backend/gn_modulator/routes/rest.py @@ -0,0 +1,72 @@ +from gn_modulator.blueprint import blueprint +from .utils.decorators import check_rest_route +from .utils.repository import ( + get_list_rest, + get_one_rest, + get_page_number_and_list, + post_rest, + patch_rest, + delete_rest, +) + + +@blueprint.route("/rest///", methods=["GET"]) +@check_rest_route("R") +def api_rest_get_list(module_code, object_code): + """ + Route pour récupérer les listes + """ + + return get_list_rest(module_code, object_code) + + +@blueprint.route("/rest///", methods=["GET"]) +@check_rest_route("R") +def api_rest_get_one(module_code, object_code, value): + """ + Route pour récupérer une ligne + """ + + return get_one_rest(module_code, object_code, value) + + +@blueprint.route("/page_number_and_list///", methods=["GET"]) +@check_rest_route("R") +def api_rest_get_page_number_and_list(module_code, object_code, value): + """ + Route pour récupérer une liste à partir d'un ligne + dont on va chercher le numero de page + et renvoyer la liste de la page + """ + + return get_page_number_and_list(module_code, object_code, value) + + +@blueprint.route("/rest///", methods=["POST"]) +@check_rest_route("C") +def api_rest_post(module_code, object_code): + """ + Route pour créer une nouvelle ligne + """ + + return post_rest(module_code, object_code) + + +@blueprint.route("/rest///", methods=["PATCH"]) +@check_rest_route("U") +def api_rest_patch(module_code, object_code, value): + """ + Route pour modifier une ligne + """ + + return patch_rest(module_code, object_code, value) + + +@blueprint.route("/rest///", methods=["DELETE"]) +@check_rest_route("D") +def api_rest_delete(module_code, object_code, value): + """ + Route pour supprimer une ligne + """ + + return delete_rest(module_code, object_code, value) diff --git a/backend/gn_modulator/routes/utils/__init__.py b/backend/gn_modulator/routes/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/gn_modulator/routes/utils/decorators.py b/backend/gn_modulator/routes/utils/decorators.py new file mode 100644 index 00000000..939091de --- /dev/null +++ b/backend/gn_modulator/routes/utils/decorators.py @@ -0,0 +1,46 @@ +from functools import wraps +from gn_modulator import ModuleMethods +from geonature.core.gn_permissions.decorators import check_cruved_scope +from werkzeug.exceptions import Forbidden + + +def check_rest_route(action): + def _check_rest_route(fn): + """ + decorateur qui va vérifier si la route est bien définie + pour un module un object et un action (CRUVED) donnés + puis effectue check_cruved_scope pour vérifier le droit de l'utilateur à accéder à cette route + """ + + @wraps(fn) + def __check_rest_route(*args, **kwargs): + module_code = kwargs["module_code"] + object_code = kwargs["object_code"] + + module_config = ModuleMethods.module_config(module_code) + + if not module_config: + raise Forbidden(description=f"Module {module_code} does not exists") + + if not module_config.get("registred"): + raise Forbidden(description=f"Module {module_code} is not registred") + + object_config = ModuleMethods.object_config(module_code, object_code) + + if not object_config: + raise Forbidden( + description=f"object {object_code} of module {module_code} is not defined" + ) + + cruved = object_config.get("cruved", "") + + if action not in cruved: + raise Forbidden( + description=f"action {action} is not defined for object {object_code} of module {module_code}" + ) + + return check_cruved_scope(action, module_code=module_code)(fn)(*args, **kwargs) + + return __check_rest_route + + return _check_rest_route diff --git a/backend/gn_modulator/routes/utils/params.py b/backend/gn_modulator/routes/utils/params.py new file mode 100644 index 00000000..739542d7 --- /dev/null +++ b/backend/gn_modulator/routes/utils/params.py @@ -0,0 +1,52 @@ +import json +from flask import request +from gn_modulator.utils.filters import parse_filters + + +def parse_request_args(object_definition={}): + params = { + "as_geojson": load_param(request.args.get("as_geojson", "false")), + "flat_keys": load_param(request.args.get("flat_keys", "false")), + "compress": load_param(request.args.get("compress", "false")), + "fields": load_array_param(request.args.get("fields")), + "field_name": load_param(request.args.get("field_name", "null")), + "filters": parse_filters(request.args.get("filters")), + "prefilters": parse_filters(request.args.get("prefilters")), + "page": load_param(request.args.get("page", "null")), + "page_size": load_param(request.args.get("page_size", "null")), + "sort": load_array_param(request.args.get("sort")), + "value": load_param(request.args.get("value", "null")), + "as_csv": load_param(request.args.get("as_csv", "false")), + "cruved_type": load_param(request.args.get("cruved_type", "null")), + "sql": "sql" in request.args, + "test": load_param(request.args.get("test", "null")), + } + + if "prefilters" in object_definition: + params["prefilters"] = ( + parse_filters(object_definition["prefilters"]) + params["prefilters"] + ) + + return params + + +def load_array_param(param): + """ + pour les cas ou params est une chaine de caractère séparée par des ',' + """ + + if not param: + return [] + + return param.split(",") + + +def load_param(param): + if param == "undefined": + return None + + # pour traiter les true false + try: + return json.loads(param) + except Exception: + return param diff --git a/backend/gn_modulator/routes/utils/repository.py b/backend/gn_modulator/routes/utils/repository.py new file mode 100644 index 00000000..b19798fb --- /dev/null +++ b/backend/gn_modulator/routes/utils/repository.py @@ -0,0 +1,154 @@ +from flask import request, make_response +from gn_modulator import ModuleMethods, SchemaMethods +from .params import parse_request_args +from sqlalchemy import orm + + +def get_list_rest(module_code, object_code, additional_params={}): + object_definition = ModuleMethods.object_config(module_code, object_code) + schema_code = ModuleMethods.schema_code(module_code, object_code) + sm = SchemaMethods(schema_code) + + params = { + **parse_request_args(object_definition), + **additional_params, + } + + cruved_type = params.get("cruved_type") or "R" + query_infos = sm.get_query_infos( + module_code=module_code, + cruved_type=cruved_type, + params=params, + url=request.url, + ) + + query_list = sm.query_list(module_code=module_code, cruved_type=cruved_type, params=params) + + if params.get("sql"): + sql_txt = sm.cls.query_as_txt(query_list) + response = make_response( + sm.cls.pprint_sql(sql_txt), + 200, + ) + response.mimetype = "text/plain" + return response + + res_list = query_list.all() + + out = { + **query_infos, + "data": sm.serialize_list( + res_list, + fields=params.get("fields"), + as_geojson=params.get("as_geojson"), + flat_keys=params.get("flat_keys"), + ), + } + + return out + + +def get_one_rest(module_code, object_code, value): + object_definition = ModuleMethods.object_config(module_code, object_code) + schema_code = ModuleMethods.schema_code(module_code, object_code) + sm = SchemaMethods(schema_code) + + params = parse_request_args(object_definition) + + try: + m = sm.get_row( + value, + field_name=params.get("field_name"), + module_code=module_code, + cruved_type="R", + params=params, + ).one() + + except sm.errors.SchemaUnsufficientCruvedRigth as e: + return f"Erreur Cruved : {str(e)}", 403 + + except orm.exc.NoResultFound as e: + return ( + f"Pas de resultats trouvé pour {schema_code} avec ({params.get('field_name') or sm.pk_field_name()})=({value})", + 404, + ) + + return sm.serialize(m, fields=params.get("fields"), as_geojson=params.get("as_geojson")) + + +def post_rest(module_code, object_code): + object_definition = ModuleMethods.object_config(module_code, object_code) + schema_code = ModuleMethods.schema_code(module_code, object_code) + sm = SchemaMethods(schema_code) + + data = request.get_json() + params = parse_request_args(object_definition) + + try: + m = sm.insert_row(data) + + except sm.errors.SchemaUnsufficientCruvedRigth as e: + return "Erreur Cruved : {}".format(str(e)), 403 + + return sm.serialize(m, fields=params.get("fields"), as_geojson=params.get("as_geojson")) + + +def patch_rest(module_code, object_code, value): + object_definition = ModuleMethods.object_config(module_code, object_code) + schema_code = ModuleMethods.schema_code(module_code, object_code) + sm = SchemaMethods(schema_code) + + data = request.get_json() + params = parse_request_args(object_definition) + + try: + m, _ = sm.update_row( + value, + data, + field_name=params.get("field_name"), + module_code=module_code, + params=params, + ) + + except sm.errors.SchemaUnsufficientCruvedRigth as e: + return "Erreur Cruved : {}".format(str(e)), 403 + + return sm.serialize(m, fields=params.get("fields"), as_geojson=params.get("as_geojson")) + + +def delete_rest(module_code, object_code, value): + object_definition = ModuleMethods.object_config(module_code, object_code) + schema_code = ModuleMethods.schema_code(module_code, object_code) + sm = SchemaMethods(schema_code) + + params = parse_request_args(object_definition) + + m = sm.get_row( + value, + field_name=params.get("field_name"), + module_code=module_code, + cruved_type="D", + params=params, + ).one() + dict_out = sm.serialize(m, fields=params.get("fields"), as_geojson=params.get("as_geojson")) + + try: + sm.delete_row(value, field_name=params.get("field_name")) + + except sm.errors.SchemaUnsufficientCruvedRigth as e: + return "Erreur Cruved : {}".format(str(e)), 403 + + return dict_out + + pass + + +def get_page_number_and_list(module_code, object_code, value): + object_definition = ModuleMethods.object_config(module_code, object_code) + schema_code = ModuleMethods.schema_code(module_code, object_code) + sm = SchemaMethods(schema_code) + + params = parse_request_args(object_definition) + page_number = sm.get_page_number(value, module_code, params.get("cruved_type") or "R", params) + + return get_list_rest(module_code, object_code, additional_params={"page": page_number}) diff --git a/backend/gn_modulator/schema/__init__.py b/backend/gn_modulator/schema/__init__.py index 742f114c..d2055ce7 100644 --- a/backend/gn_modulator/schema/__init__.py +++ b/backend/gn_modulator/schema/__init__.py @@ -4,7 +4,6 @@ class gathering methods from mixins """ -from .api import SchemaApi from .auto import SchemaAuto from .base import SchemaBase from .commands import SchemaCommands @@ -23,7 +22,6 @@ class gathering methods from mixins class SchemaMethods( - SchemaApi, SchemaAuto, SchemaBase, SchemaCommands, diff --git a/backend/gn_modulator/schema/api.py b/backend/gn_modulator/schema/api.py deleted file mode 100644 index d8ccaa62..00000000 --- a/backend/gn_modulator/schema/api.py +++ /dev/null @@ -1,391 +0,0 @@ -""" - SchemaMethods : api -""" - -import json -from flask.views import MethodView -from flask import request, make_response -from geonature.core.gn_permissions import decorators as permissions -from geonature.utils.env import db - -# from geonature.utils.config import config -from gn_modulator import MODULE_CODE -from gn_modulator.definition import DefinitionMethods - - -class SchemaApi: - """ - class for schema api processing - - doc: - - https://flask.palletsprojects.com/en/2.0.x/views/ - - """ - - def method_view_name(self, module_code, object_code, view_type): - object_code_undot = object_code.replace(".", "_") - return f"MV_{module_code}_{object_code_undot}_{view_type}" - - def view_name(self, module_code, object_code, view_type): - """ """ - object_code_undot = object_code.replace(".", "_") - return f"MV_{module_code}_{object_code_undot}_{view_type}" - - @classmethod - def base_url(cls): - """ - base url (may differ with apps (GN, UH, TH, ...)) - - TODO process apps ? - - """ - from geonature.utils.config import config - - return "{}/{}".format(config["API_ENDPOINT"], MODULE_CODE.lower()) - - def url(self, post_url, full_url=False): - """ - /{schema_code}{post_url} - - - full/url renvoie l'url complet - - TODO gérer par type d'url ? - """ - - url = self.attr("meta.url", "/{}{}".format(self.schema_code(), post_url)) - - if full_url: - url = "{}{}".format(self.cls.base_url(), url) - - return url - - def parse_request_args(self, request, object_definition={}): - """ - TODO !!! à refaire avec repo get_list - parse request flask element - - filters - - prefilters - - fields - - field_name - - sort - - page - - page_size - - TODO plusieurs possibilités pour le parametrage - - par exemple au format tabulator ou autre .... - """ - - # params = json.loads(params_txt) - params = { - "as_geojson": self.load_param(request.args.get("as_geojson", "false")), - "flat_keys": self.load_param(request.args.get("flat_keys", "false")), - "compress": self.load_param(request.args.get("compress", "false")), - "fields": self.load_array_param(request.args.get("fields")), - "field_name": self.load_param(request.args.get("field_name", "null")), - "filters": self.parse_filters(request.args.get("filters")), - "prefilters": self.parse_filters(request.args.get("prefilters")), - "page": self.load_param(request.args.get("page", "null")), - "page_size": self.load_param(request.args.get("page_size", "null")), - "sort": self.load_array_param(request.args.get("sort")), - "value": self.load_param(request.args.get("value", "null")), - "as_csv": self.load_param(request.args.get("as_csv", "false")), - "cruved_type": self.load_param(request.args.get("cruved_type", "null")), - "sql": "sql" in request.args, - "test": self.load_param(request.args.get("test", "null")), - } - - if "prefilters" in object_definition: - params["prefilters"] = ( - self.parse_filters(object_definition["prefilters"]) + params["prefilters"] - ) - - return params - - def load_array_param(self, param): - """ - pour les cas ou params est une chaine de caractère séparée par des ',' - """ - - if not param: - return [] - - return param.split(",") - - def load_param(self, param): - if param == "undefined": - return None - - # pour traiter les true false - try: - return json.loads(param) - except Exception: - return param - - def schema_api_dict(self, module_code, object_definition): - """ - object_definition : dict - - prefilters - """ - - def get_rest(self_mv, value=None): - if value: - try: - return get_one_rest(value) - except self.errors.SchemaUnsufficientCruvedRigth: - return f"Vous n'avez pas les droits suffisants pour accéder à cette requête (schema_code: {self.schema_code()}, module_code: {module_code})" - - else: - return get_list_rest() - - def get_one_rest(value): - params = self.parse_request_args(request, object_definition) - - try: - m = self.get_row( - value, - field_name=params.get("field_name"), - module_code=module_code, - cruved_type="R", - params=params, - ).one() - - except self.errors.SchemaUnsufficientCruvedRigth as e: - return "Erreur Cruved : {}".format(str(e)), 403 - - return self.serialize( - m, fields=params.get("fields"), as_geojson=params.get("as_geojson") - ) - - def get_list_rest(additional_params={}): - params = { - **self.parse_request_args(request, object_definition), - **additional_params, - } - - cruved_type = params.get("cruved_type") or "R" - query_infos = self.get_query_infos( - module_code=module_code, - cruved_type=cruved_type, - params=params, - url=request.url, - ) - - query_list = self.query_list( - module_code=module_code, cruved_type=cruved_type, params=params - ) - - if params.get("sql"): - sql_txt = self.cls.query_as_txt(query_list) - response = make_response( - self.cls.pprint_sql(sql_txt), - 200, - ) - response.mimetype = "text/plain" - return response - - res_list = query_list.all() - - out = { - **query_infos, - "data": self.serialize_list( - res_list, - fields=params.get("fields"), - as_geojson=params.get("as_geojson"), - flat_keys=params.get("flat_keys"), - ), - } - - return out - - def post_rest(self_mv): - data = request.get_json() - params = self.parse_request_args(request, object_definition) - - try: - m = self.insert_row(data) - - except self.errors.SchemaUnsufficientCruvedRigth as e: - return "Erreur Cruved : {}".format(str(e)), 403 - - return self.serialize( - m, fields=params.get("fields"), as_geojson=params.get("as_geojson") - ) - - def patch_rest(self_mv, value): - data = request.get_json() - params = self.parse_request_args(request, object_definition) - - try: - m, _ = self.update_row( - value, - data, - field_name=params.get("field_name"), - module_code=module_code, - params=params, - ) - - except self.errors.SchemaUnsufficientCruvedRigth as e: - return "Erreur Cruved : {}".format(str(e)), 403 - - return self.serialize( - m, fields=params.get("fields"), as_geojson=params.get("as_geojson") - ) - - def delete_rest(self_mv, value): - params = self.parse_request_args(request, object_definition) - - m = self.get_row( - value, - field_name=params.get("field_name"), - module_code=module_code, - cruved_type="D", - params=params, - ).one() - dict_out = self.serialize( - m, fields=params.get("fields"), as_geojson=params.get("as_geojson") - ) - - try: - self.delete_row(value, field_name=params.get("field_name")) - - except self.errors.SchemaUnsufficientCruvedRigth as e: - return "Erreur Cruved : {}".format(str(e)), 403 - - return dict_out - - def get_page_number(self_mv, value): - """ """ - - params = self.parse_request_args(request, object_definition) - page_number = self.get_page_number( - value, module_code, params.get("cruved_type") or "R", params - ) - - return get_list_rest(additional_params={"page": page_number}) - # return { - # "page": self.get_page_number( - # value, module_code, params.get("cruved_type") or "R", params - # ) - # } - - def get_export(self_mv, export_code): - """ - methode pour gérer la route d'export - - récupération de la configuration de l'export - """ - - # récupération de la configuration de l'export - export_definition = DefinitionMethods.get_definition("export", export_code) - - # renvoie une erreur si l'export n'est pas trouvé - if export_definition is None: - return "L'export correspondant au code {export_code} n'existe pas", 403 - - # definitions des paramètres - - # - query params + object_definition - params = self.parse_request_args(request, object_definition) - - # - export_definition - # - on force fields a être - # - TODO faire l'intersection de params['fields'] et export_definition['fields'] (si params['fields'] est défini) - params["fields"] = export_definition["fields"] - # - TODO autres paramètres ???? - - cruved_type = params.get("cruved_type") or "R" - - # recupération de la liste - query_list = self.query_list( - module_code=module_code, cruved_type=cruved_type, params=params - ) - - # on assume qu'il n'y que des export csv - # TODO ajouter query param export_type (csv, shape, geosjon, etc) et traiter les différents cas - return self.process_export_csv(module_code, query_list, params) - - return { - "rest": { - "get": permissions.check_cruved_scope("R", module_code=module_code)(get_rest), - "post": permissions.check_cruved_scope("C", module_code=module_code)(post_rest), - "patch": permissions.check_cruved_scope("U", module_code=module_code)(patch_rest), - "delete": permissions.check_cruved_scope("D", module_code=module_code)( - delete_rest - ), - }, - "export": { - "get": permissions.check_cruved_scope("E", module_code=module_code)(get_export) - }, - "page_number": { - "get": permissions.check_cruved_scope("R", module_code=module_code)( - get_page_number - ) - }, - } - - def schema_view_func(self, view_type, module_code, object_definition): - """ - c'est ici que ce gère le CRUVED pour l'accès aux routes - """ - - schema_api_dict = self.schema_api_dict(module_code, object_definition)[view_type] - - MV = type( - self.method_view_name(module_code, object_definition["object_code"], view_type), - (MethodView,), - schema_api_dict, - ) - return MV.as_view(self.view_name(module_code, object_definition["object_code"], view_type)) - - def register_api(self, bp, module_code, object_code, object_definition={}): - """ - Fonction qui enregistre une api pour un schema - - TODO s - -comment gérer la config pour limiter les routes selon le cruved - """ - - cruved = object_definition.get("cruved", "") - - # rest api - view_func_rest = self.schema_view_func("rest", module_code, object_definition) - view_func_page_number = self.schema_view_func( - "page_number", module_code, object_definition - ) - view_func_export = self.schema_view_func("export", module_code, object_definition) - - # read: GET (liste et one_row) - if "R" in cruved: - bp.add_url_rule( - f"/{object_code}/", - defaults={"value": None}, - view_func=view_func_rest, - methods=["GET"], - ) - bp.add_url_rule(f"/{object_code}/", view_func=view_func_rest, methods=["GET"]) - bp.add_url_rule( - f"/{object_code}/page_number/", - view_func=view_func_page_number, - methods=["GET"], - ) - - # create : POST - if "C" in cruved: - bp.add_url_rule(f"/{object_code}/", view_func=view_func_rest, methods=["POST"]) - - # update : PATCH - if "U" in cruved: - bp.add_url_rule(f"/{object_code}/", view_func=view_func_rest, methods=["PATCH"]) - - # delete : DELETE - if "D" in cruved: - bp.add_url_rule( - f"/{object_code}/", view_func=view_func_rest, methods=["DELETE"] - ) - - # export - if "E" in cruved: # and object_definition.get("exports"): - bp.add_url_rule( - f"/{object_code}/exports/", - view_func=view_func_export, - methods=["GET"], - ) diff --git a/backend/gn_modulator/schema/base.py b/backend/gn_modulator/schema/base.py index da7408eb..b67560a4 100644 --- a/backend/gn_modulator/schema/base.py +++ b/backend/gn_modulator/schema/base.py @@ -8,6 +8,7 @@ import copy import json +from gn_modulator import MODULE_CODE from gn_modulator.utils.cache import get_global_cache column_types = [ @@ -338,3 +339,27 @@ def process_csv_data(self, key, data, options={}): return labels[1] if len(labels) > 2 else None return data + + @classmethod + def base_url(cls): + """ + base url (may differ with apps (GN, UH, TH, ...)) + TODO process apps ? + """ + from geonature.utils.config import config + + return "{}/{}".format(config["API_ENDPOINT"], MODULE_CODE.lower()) + + def url(self, post_url, full_url=False): + """ + /{schema_code}{post_url} + - full/url renvoie l'url complet + TODO gérer par type d'url ? + """ + + url = self.attr("meta.url", "/{}{}".format(self.schema_code(), post_url)) + + if full_url: + url = "{}{}".format(self.cls.base_url(), url) + + return url diff --git a/backend/gn_modulator/schema/models/column_properties.py b/backend/gn_modulator/schema/models/column_properties.py index f3e73fa6..9a5470a9 100644 --- a/backend/gn_modulator/schema/models/column_properties.py +++ b/backend/gn_modulator/schema/models/column_properties.py @@ -9,6 +9,7 @@ cast, ) from geonature.utils.env import db +from gn_modulator.utils.filters import parse_filters from .. import errors @@ -98,7 +99,7 @@ def column_property_util_relation_where_conditions(self, key, column_property_de if column_property_def.get("filters") is not None: condition_filters, conditions = rel.process_filter_array( relation.mapper.entity, - self.parse_filters(column_property_def.get("filters")), + parse_filters(column_property_def.get("filters")), query=conditions, condition=True, ) diff --git a/backend/gn_modulator/schema/repositories/filters.py b/backend/gn_modulator/schema/repositories/filters.py index bfdcdb40..9143e89a 100644 --- a/backend/gn_modulator/schema/repositories/filters.py +++ b/backend/gn_modulator/schema/repositories/filters.py @@ -113,106 +113,6 @@ def process_filter_array(self, Model, filter_array, query=None, condition=None): cur_filter = loop_filter return cur_filter, query - def find_index_close(self, index_open, filters): - """ - pour trouver l'index de la parenthèse fermante ] correspondante - """ - cpt_open = 0 - for index in range(index_open + 1, len(filters)): - if filters[index] == "[": - cpt_open += 1 - if filters[index] == "]": - if cpt_open == 0: - return index - else: - cpt_open -= 1 - filters[index_open] = f" {filters[index_open]} " - raise Exception(f"Pas de parenthèse fermante trouvée {','.join(filters[index_open:])}") - - def parse_filters(self, filters): - """ - traite une liste de chaine de caractères représentant des filtres - """ - - if not filters: - return [] - - if isinstance(filters, str): - return self.parse_filters(filters.split(",")) - - filters_out = [] - - nb_filters = len(filters) - index = 0 - while index < nb_filters: - # calcul du filtre {field, type, value} - filter = self.parse_filter(filters[index]) - - # si on tombe sur une parenthèse ouvrante - if filter == "[": - # on cherche l'index de la parenthèse fermante ] correspondante - index_close = self.find_index_close(index, filters) - - # on calcule les filtres entre les deux [...] - filters_out.append(self.parse_filters(filters[index + 1 : index_close])) - - # on passe à l'index qui suit index_close - index = index_close + 1 - # de l'indice du ']' correspondant - - # si on tombe sur une parenthère fermante => pb - elif filter == "]": - filters[index] = f" {filters[index]} " - raise SchemaRepositoryFilterError( - f"Parenthese fermante non appariée trouvée dans {','.join(filters)}" - ) - - # sinon on ajoute le filtre à la liste et on passe à l'index suivant - else: - filters_out.append(filter) - index += 1 - - return filters_out - - def parse_filter(self, str_filter): - """ - renvoie un filtre a partir d'une chaine de caractère - id_truc=5 => { field: id_truc type: = value: 5 } etc... - """ - - if str_filter in "*|![]": - return str_filter - - index_min = None - filter_type_min = None - for filter_type in ["=", "<", ">", ">=", "<=", "like", "ilike", "in", "~"]: - try: - index = str_filter.index(f" {filter_type} ") - except ValueError: - continue - - if ( - (index_min is None) - or (index < index_min) - or (index_min == index and len(filter_type) > len(filter_type_min)) - ): - index_min = index - filter_type_min = filter_type - - if not filter_type_min: - return None - - filter = { - "field": str_filter[:index_min], - "type": filter_type_min, - "value": str_filter[index_min + len(filter_type_min) + 2 :], - } - - if filter_type_min == "in": - filter["value"] = filter["value"].split(";") - - return filter - def get_filter(self, Model, filter, query=None, condition=None): """ get filter diff --git a/backend/gn_modulator/tests/test_rest_api.py b/backend/gn_modulator/tests/test_rest_api.py new file mode 100644 index 00000000..93069881 --- /dev/null +++ b/backend/gn_modulator/tests/test_rest_api.py @@ -0,0 +1,25 @@ +""" + Test pour valider les fonctionalité repository + - get_one + - insert + - update + - delete + - list ?? +""" + +import pytest +from .utils.rest import test_schema_rest +from .data import commons as data_commons + + +@pytest.mark.usefixtures("client_class", "temporary_transaction") +class TestRest: + def test_gn_commons_module(self, client, users): + test_schema_rest( + client, + users["admin_user"], + "MODULATOR", + "commons.module", + data_commons.module(), + data_commons.module_update(), + ) diff --git a/backend/gn_modulator/tests/utils/rest.py b/backend/gn_modulator/tests/utils/rest.py new file mode 100644 index 00000000..f2120ec4 --- /dev/null +++ b/backend/gn_modulator/tests/utils/rest.py @@ -0,0 +1,119 @@ +import pytest + +from flask import url_for +from gn_modulator import SchemaMethods, ModuleMethods +from geonature.tests.utils import set_logged_user_cookie, unset_logged_user_cookie + + +@pytest.mark.skip() +def test_schema_rest(client, user, module_code, object_code, data_post, data_update): + """ + Test chainage sur les api rest + - get (vide) + - post + - get + - patch + - delete + - get(vide) + """ + + # patch cruved for tests + object_config = ModuleMethods.object_config(module_code, object_code) + assert object_config is not None + + object_config["cruved"] = "CRUVED" + + # INIT + set_logged_user_cookie(client, user) + schema_code = ModuleMethods.schema_code(module_code, object_code) + assert schema_code is not None + sm = SchemaMethods(schema_code) + field_name = sm.attr("meta.unique") + data_unique = ",".join(list(map(lambda x: data_post[x], field_name))) + + # GET VIDE + r = client.get( + url_for( + "modulator.api_rest_get_one", + value=data_unique, + module_code=module_code, + object_code=object_code, + field_name=field_name, + ) + ) + assert r.status_code == 404, "La donnée ne devrait pas exister" + + # POST + fields = list(data_post.keys()) + fields.append(sm.pk_field_name()) + + r = client.post( + url_for( + "modulator.api_rest_post", + module_code=module_code, + object_code=object_code, + fields=",".join(fields), + ), + data=data_post, + ) + + assert r.status_code == 200, "Erreur avec POST" + + data_from_post = r.json + assert all(data_post[k] == data_from_post[k] for k in list(data_post.keys())) + + assert sm.pk_field_name() in data_from_post + id = data_from_post[sm.pk_field_name()] + + # GET OK + r = client.get( + url_for( + "modulator.api_rest_get_one", + value=id, + module_code=module_code, + object_code=object_code, + ) + ) + assert r.status_code == 200, "Erreur avec GET" + + # PATCH + r = client.patch( + url_for( + "modulator.api_rest_patch", + value=id, + module_code=module_code, + object_code=object_code, + fields=",".join(list(data_update.keys())), + ), + data=data_update, + ) + + assert r.status_code == 200, "Erreur avec PATCH" + data_from_patch = r.json + assert all(data_update[k] == data_from_patch[k] for k in list(data_update.keys())) + + # DELETE + r = client.delete( + url_for( + "modulator.api_rest_delete", + value=id, + module_code=module_code, + object_code=object_code, + ) + ) + + assert r.status_code == 200, "Erreur avec DELETE" + # GET VIDE + r = client.get( + url_for( + "modulator.api_rest_get_one", + value=data_unique, + module_code=module_code, + object_code=object_code, + field_name=field_name, + ) + ) + assert r.status_code == 404, "La donnée n'a pas été effacée" + + # FINALIZE + unset_logged_user_cookie(client) diff --git a/backend/gn_modulator/utils/filters.py b/backend/gn_modulator/utils/filters.py new file mode 100644 index 00000000..eb60a8a3 --- /dev/null +++ b/backend/gn_modulator/utils/filters.py @@ -0,0 +1,104 @@ +from gn_modulator.schema.errors import SchemaRepositoryFilterError, SchemaRepositoryFilterTypeError + + +def parse_filters(filters): + """ + traite une liste de chaine de caractères représentant des filtres + """ + + if not filters: + return [] + + if isinstance(filters, str): + return parse_filters(filters.split(",")) + + filters_out = [] + + nb_filters = len(filters) + index = 0 + while index < nb_filters: + # calcul du filtre {field, type, value} + filter = parse_filter(filters[index]) + + # si on tombe sur une parenthèse ouvrante + if filter == "[": + # on cherche l'index de la parenthèse fermante ] correspondante + index_close = find_index_close(index, filters) + + # on calcule les filtres entre les deux [...] + filters_out.append(parse_filters(filters[index + 1 : index_close])) + + # on passe à l'index qui suit index_close + index = index_close + 1 + # de l'indice du ']' correspondant + + # si on tombe sur une parenthère fermante => pb + elif filter == "]": + filters[index] = f" {filters[index]} " + raise SchemaRepositoryFilterError( + f"Parenthese fermante non appariée trouvée dans {','.join(filters)}" + ) + + # sinon on ajoute le filtre à la liste et on passe à l'index suivant + else: + filters_out.append(filter) + index += 1 + + return filters_out + + +def parse_filter(str_filter): + """ + renvoie un filtre a partir d'une chaine de caractère + id_truc=5 => { field: id_truc type: = value: 5 } etc... + """ + + if str_filter in "*|![]": + return str_filter + + index_min = None + filter_type_min = None + for filter_type in ["=", "<", ">", ">=", "<=", "like", "ilike", "in", "~"]: + try: + index = str_filter.index(f" {filter_type} ") + except ValueError: + continue + + if ( + (index_min is None) + or (index < index_min) + or (index_min == index and len(filter_type) > len(filter_type_min)) + ): + index_min = index + filter_type_min = filter_type + + if not filter_type_min: + return None + + filter = { + "field": str_filter[:index_min], + "type": filter_type_min, + "value": str_filter[index_min + len(filter_type_min) + 2 :], + } + + if filter_type_min == "in": + filter["value"] = filter["value"].split(";") + + return filter + + +def find_index_close(index_open, filters): + """ + pour trouver l'index de la parenthèse fermante ] correspondante + """ + cpt_open = 0 + for index in range(index_open + 1, len(filters)): + if filters[index] == "[": + cpt_open += 1 + if filters[index] == "]": + if cpt_open == 0: + return index + else: + cpt_open -= 1 + filters[index_open] = f" {filters[index_open]} " + raise Exception(f"Pas de parenthèse fermante trouvée {','.join(filters[index_open:])}") diff --git a/doc/changelog.md b/doc/changelog.md index bf6459cc..6e20e311 100644 --- a/doc/changelog.md +++ b/doc/changelog.md @@ -1,6 +1,10 @@ # Changelog +## 1.0.6 (unreleased) +**✨ Améliorations** + +- Clarification dans la gestion des routes REST ## 1.0.5 (13-03-2023) **✨ Améliorations** diff --git a/frontend/app/services/config.service.ts b/frontend/app/services/config.service.ts index 3a332597..7337d49f 100644 --- a/frontend/app/services/config.service.ts +++ b/frontend/app/services/config.service.ts @@ -141,7 +141,7 @@ export class ModulesConfigService { exportUrl(moduleCode, objectCode, exportCode, options: any = {}) { const url = this._mRequest.url( - `${this.backendUrl()}/${moduleCode.toLowerCase()}/${objectCode}/exports/${exportCode}`, + `${this.backendUrl()}/modulator/exports/${moduleCode.toLowerCase()}/${objectCode}/${exportCode}`, { prefilters: options.prefilters, filters: options.filters, @@ -151,7 +151,7 @@ export class ModulesConfigService { } objectUrl(moduleCode, objectCode, value = '', urlSuffix = '') { - return `${this.backendUrl()}/${moduleCode.toLowerCase()}/${objectCode}/${urlSuffix}${ + return `${this.backendUrl()}/modulator/${urlSuffix || 'rest'}/${moduleCode}/${objectCode}/${ value || '' }`; } diff --git a/frontend/app/services/data.service.ts b/frontend/app/services/data.service.ts index 875f820f..36d4e84e 100644 --- a/frontend/app/services/data.service.ts +++ b/frontend/app/services/data.service.ts @@ -63,7 +63,7 @@ export class ModulesDataService { return this.dataRequest('get', moduleCode, objectCode, { value, params, - urlSuffix: 'page_number/', + urlSuffix: 'page_number_and_list', }); } From 690b05b16f2518bdf1d413e5349597a9b422ce43 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 13 Mar 2023 12:02:30 +0100 Subject: [PATCH 002/142] refactor button & color --- config/layouts/utils/utils.button_edit.layout.yml | 2 +- .../layouts/utils/utils.buttons_filter.layout.yml | 15 +++++++-------- .../layouts/utils/utils.buttons_form.layout.yml | 2 +- frontend/app/components/base/base.scss | 12 ++++++++++++ 4 files changed, 21 insertions(+), 10 deletions(-) diff --git a/config/layouts/utils/utils.button_edit.layout.yml b/config/layouts/utils/utils.button_edit.layout.yml index 6a6bc528..fe65cf55 100644 --- a/config/layouts/utils/utils.button_edit.layout.yml +++ b/config/layouts/utils/utils.button_edit.layout.yml @@ -4,7 +4,7 @@ title: boutton edit description: Bouton 'editer', destiné aux page de detail de monitoring layout: type: button - color: primary + color: success title: Éditer description: __f__o.label_edit(x) action: edit diff --git a/config/layouts/utils/utils.buttons_filter.layout.yml b/config/layouts/utils/utils.buttons_filter.layout.yml index 11c4b5d1..0ed7d704 100644 --- a/config/layouts/utils/utils.buttons_filter.layout.yml +++ b/config/layouts/utils/utils.buttons_filter.layout.yml @@ -9,15 +9,14 @@ layout: items: - flex: "0" type: button - color: primary + color: error + icon: refresh + description: Réinitialiser filtres + action: clear-filters + - flex: "0" + type: button + color: success title: Rechercher icon: done description: Effectuer une recherche avec les filtre définis ci-dessus action: filter - - flex: "0" - type: button - color: primary - title: Réinitialiser - icon: refresh - description: RAZ des filtres - action: clear-filters diff --git a/config/layouts/utils/utils.buttons_form.layout.yml b/config/layouts/utils/utils.buttons_form.layout.yml index d7d79942..15168131 100644 --- a/config/layouts/utils/utils.buttons_form.layout.yml +++ b/config/layouts/utils/utils.buttons_form.layout.yml @@ -8,7 +8,7 @@ layout: items: - flex: "0" type: button - color: primary + color: success title: Valider icon: done description: Enregistrer le contenu du formulaire diff --git a/frontend/app/components/base/base.scss b/frontend/app/components/base/base.scss index f9bf991a..c722c1e4 100644 --- a/frontend/app/components/base/base.scss +++ b/frontend/app/components/base/base.scss @@ -29,6 +29,18 @@ width: 50%; } +// couleur custom des bouttons +.layout-buttons .mat-success { + background-color: green; + color: #fff; +} + +// couleur custom des bouttons +.layout-buttons .mat-error { + background-color: red; + color: #fff; +} + .content-container { min-width: 50%; } From 1233af69643dc669394b0c6b14024a84f9f4311f Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 20 Mar 2023 22:23:54 +0100 Subject: [PATCH 003/142] fix #32 overflow-y and scrolls --- .../layouts/utils/utils.object_details.layout.yml | 3 +-- .../utils/utils.object_form_map.layout.yml | 3 +-- config/modules/contrib/m_sipaf/config.yml | 15 +++++++++------ doc/changelog.md | 1 + .../layout/base/layout-section.component.html | 8 ++++++-- .../components/layout/base/layout.component.ts | 14 +++++++++----- .../object/layout-object-filters.component.ts | 1 - 7 files changed, 27 insertions(+), 18 deletions(-) diff --git a/config/layouts/utils/utils.object_details.layout.yml b/config/layouts/utils/utils.object_details.layout.yml index ab613413..6c01c415 100644 --- a/config/layouts/utils/utils.object_details.layout.yml +++ b/config/layouts/utils/utils.object_details.layout.yml @@ -10,7 +10,6 @@ layout: items: - title: __f__o.title_details(x) flex: "0" - - overflow: true - items: __LAYOUT__ + - items: __LAYOUT__ - code: utils.button_edit flex: "0" diff --git a/config/layouts/utils/utils.object_form_map.layout.yml b/config/layouts/utils/utils.object_form_map.layout.yml index 52a21cb3..e4f3449c 100644 --- a/config/layouts/utils/utils.object_form_map.layout.yml +++ b/config/layouts/utils/utils.object_form_map.layout.yml @@ -28,7 +28,6 @@ layout: flex: "0" - title: __f__o.title_create_edit(x) flex : "0" - - overflow: true - items: __LAYOUT__ + - items: __LAYOUT__ - code: utils.buttons_form flex: '0' diff --git a/config/modules/contrib/m_sipaf/config.yml b/config/modules/contrib/m_sipaf/config.yml index cebd7977..3e1f2ac4 100644 --- a/config/modules/contrib/m_sipaf/config.yml +++ b/config/modules/contrib/m_sipaf/config.yml @@ -1,5 +1,6 @@ site_filters_fields: display: tabs + overflow: true items: - label: Infos items: @@ -102,6 +103,7 @@ site_map_popup_fields: site_details_fields: display: tabs + overflow: true items: - label: Propriétés items: @@ -165,7 +167,7 @@ site_details_fields: - key: nomenclatures_ouvrage_type.cd_nomenclature hidden: true - key: ouvrage_type_autre - hidden: __f__!data.nomenclatures_ouvrage_type?.some(n => n.cd_nomenclature == 'AUT') + hidden: __f__!data?.nomenclatures_ouvrage_type?.some(n => n.cd_nomenclature == 'AUT') - direction: row items: - pi_ou_ps @@ -185,7 +187,7 @@ site_details_fields: - diametre - title: Banquette (Ouvrages hydrauliques) display: fieldset - hidden: "__f__!data.ouvrage_hydrau" + hidden: "__f__!data?.ouvrage_hydrau" items: - direction: row items: @@ -202,6 +204,7 @@ site_details_fields: site_form_fields: display: tabs + overflow: true items: - label: Propriétés items: @@ -280,10 +283,10 @@ site_form_fields: return_object: true additional_fields: ["cd_nomenclature"] - key: ouvrage_type_autre - description: __f__data.nomenclatures_ouvrage_type?.some(n => n.cd_nomenclature == 'AUT') + description: __f__data?.nomenclatures_ouvrage_type?.some(n => n.cd_nomenclature == 'AUT') type: string - hidden: __f__!data.nomenclatures_ouvrage_type?.some(n => n.cd_nomenclature == 'AUT') - required: __f__data.nomenclatures_ouvrage_type?.some(n => n.cd_nomenclature == 'AUT') + hidden: __f__!data?.nomenclatures_ouvrage_type?.some(n => n.cd_nomenclature == 'AUT') + required: __f__data?.nomenclatures_ouvrage_type?.some(n => n.cd_nomenclature == 'AUT') - title: Dimensions display: fieldset @@ -299,7 +302,7 @@ site_form_fields: - hauteur_dispo_faune - diametre - title: Banquette - hidden: "__f__!data.ouvrage_hydrau" + hidden: "__f__!data?.ouvrage_hydrau" display: fieldset items: - direction: row diff --git a/doc/changelog.md b/doc/changelog.md index 6e20e311..564645fe 100644 --- a/doc/changelog.md +++ b/doc/changelog.md @@ -5,6 +5,7 @@ **✨ Améliorations** - Clarification dans la gestion des routes REST +- Meilleure gestion des `tabs` et des `scrolls` ## 1.0.5 (13-03-2023) **✨ Améliorations** diff --git a/frontend/app/components/layout/base/layout-section.component.html b/frontend/app/components/layout/base/layout-section.component.html index 5fc7e8ba..f873c273 100644 --- a/frontend/app/components/layout/base/layout-section.component.html +++ b/frontend/app/components/layout/base/layout-section.component.html @@ -7,7 +7,7 @@
- + on remet à 0 + const overflowStyle = {}; + if (this.computedLayout.display != 'tabs') { + overflowStyle['overflow-y'] = 'scroll'; + } if (this.docHeightSave > docHeight || !this.docHeightSave) { this.computedLayout.style = { ...(this.computedLayout.style || {}), height: '200px', - 'overflow-y': 'scroll', + ...overflowStyle, }; this.layout.style = { ...(this.layout.style || {}), height: `200px`, - 'overflow-y': 'scroll', + ...overflowStyle, }; } @@ -447,13 +451,13 @@ export class ModulesLayoutComponent implements OnInit { this.layout.style = { ...(this.layout.style || {}), height: `${height}px`, - 'overflow-y': 'scroll', + ...overflowStyle, }; this.computedLayout.style = { ...(this.computedLayout.style || {}), height: `${height}px`, - 'overflow-y': 'scroll', + ...overflowStyle, }; }, 200); } @@ -508,7 +512,7 @@ export class ModulesLayoutComponent implements OnInit { } const elementHeight = elem && `${elem.clientHeight}px`; - const bodyHeight = `${document.body.clientHeight - elem.offsetTop}px`; + const bodyHeight = `${document.body.clientHeight - elem.offsetTop - 4}px`; // si la taille de l'élément correspond à la taille de la page // -> on ne fait rien diff --git a/frontend/app/components/layout/object/layout-object-filters.component.ts b/frontend/app/components/layout/object/layout-object-filters.component.ts index b6a67316..5b8afb69 100644 --- a/frontend/app/components/layout/object/layout-object-filters.component.ts +++ b/frontend/app/components/layout/object/layout-object-filters.component.ts @@ -38,7 +38,6 @@ export class ModulesLayoutObjectFiltersComponent }, { items: this.layout.items, - overflow: true, }, { flex: '0', From be5fa6c9e9b3d77e268a38d43eb2c0f0f369e20e Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 21 Mar 2023 22:23:10 +0100 Subject: [PATCH 004/142] add pr_sipaf models --- backend/gn_modulator/schema/auto.py | 1 - .../gn_modulator/schema/repositories/base.py | 4 + backend/gn_modulator/schema/sql/base.py | 32 ++-- config/modules/contrib/m_sipaf/config.yml | 2 +- .../definitions/m_sipaf.actor.schema.yml | 6 +- .../m_sipaf/definitions/m_sipaf.pf.schema.yml | 141 +----------------- .../m_sipaf/exports/m_sipaf.pf.export.yml | 2 +- 7 files changed, 37 insertions(+), 151 deletions(-) diff --git a/backend/gn_modulator/schema/auto.py b/backend/gn_modulator/schema/auto.py index 83956bae..e512ac37 100644 --- a/backend/gn_modulator/schema/auto.py +++ b/backend/gn_modulator/schema/auto.py @@ -48,7 +48,6 @@ def get_autoschema(self): return schema_definition Model = get_class_from_path(self.attr("meta.model")) - if Model is None: raise SchemaAutoError( "Pas de modèles trouvé pour la table {}".format(schema_dot_table) diff --git a/backend/gn_modulator/schema/repositories/base.py b/backend/gn_modulator/schema/repositories/base.py index 2eca617e..acff8233 100644 --- a/backend/gn_modulator/schema/repositories/base.py +++ b/backend/gn_modulator/schema/repositories/base.py @@ -255,6 +255,10 @@ def query_list(self, module_code=MODULE_CODE, cruved_type="R", params={}, query_ """ Model = self.Model() + + if Model is None: + raise Exception(f"Model not found for {self.schema_code()}") + model_pk_fields = [ getattr(Model, pk_field_name) for pk_field_name in self.pk_field_names() ] diff --git a/backend/gn_modulator/schema/sql/base.py b/backend/gn_modulator/schema/sql/base.py index ebf02674..50c1202e 100644 --- a/backend/gn_modulator/schema/sql/base.py +++ b/backend/gn_modulator/schema/sql/base.py @@ -30,6 +30,25 @@ def auto_sql_schemas_dot_tables(cls): return auto_sql_schemas_dot_tables + @classmethod + def get_tables(cls): + if tables := get_global_cache(["schema_dot_tables"]): + return tables + + sql_txt_tables = f""" + SELECT + concat(t.table_schema, '.', t.table_name) + FROM + information_schema.tables t + WHERE + CONCAT(t.table_schema, '.', t.table_name) IN ('{"', '".join(cls.auto_sql_schemas_dot_tables())}') + + """ + res = cls.c_sql_exec_txt(sql_txt_tables) + tables = [r[0] for r in res] + set_global_cache(["schema_dot_tables"], tables) + return tables + @classmethod def get_table_columns(cls, schema_dot_table): table_schema = schema_dot_table.split(".")[0] @@ -181,20 +200,9 @@ def c_sql_schema_dot_table_exists(cls, sql_schema_dot_table): sql_table_name = sql_schema_dot_table.split(".")[1] return cls.c_sql_table_exists(sql_schema_name, sql_table_name) - @classmethod - def table_names(cls, sql_schema_name): - table_names = get_global_cache(["table_names", sql_schema_name]) - if table_names is None: - inspector = inspect(db.engine) - table_names = inspector.get_table_names(sql_schema_name) + inspector.get_view_names( - sql_schema_name - ) - set_global_cache(["table_names", sql_schema_name], table_names) - return table_names - @classmethod def c_sql_table_exists(cls, sql_schema_name, sql_table_name): - return sql_table_name.lower() in cls.table_names(sql_schema_name) + return f"{sql_schema_name}.{sql_table_name}".lower() in cls.get_tables() @classmethod def c_sql_schema_exists(cls, sql_schema_name): diff --git a/config/modules/contrib/m_sipaf/config.yml b/config/modules/contrib/m_sipaf/config.yml index 3e1f2ac4..23366262 100644 --- a/config/modules/contrib/m_sipaf/config.yml +++ b/config/modules/contrib/m_sipaf/config.yml @@ -193,7 +193,7 @@ site_details_fields: items: - nomenclature_ouvrage_hydrau_position.label_fr - nomenclature_ouvrage_hydrau_banq_caract.label_fr - - nomenclature_ouvrage_hydro_banq_type.label_fr + - nomenclature_ouvrage_hydrau_banq_type.label_fr - ouvrag_hydrau_tirant_air - label: __f__`Médias (${data?.medias?.length || 0})` items: diff --git a/config/modules/contrib/m_sipaf/definitions/m_sipaf.actor.schema.yml b/config/modules/contrib/m_sipaf/definitions/m_sipaf.actor.schema.yml index 70b97997..a364b2fb 100644 --- a/config/modules/contrib/m_sipaf/definitions/m_sipaf.actor.schema.yml +++ b/config/modules/contrib/m_sipaf/definitions/m_sipaf.actor.schema.yml @@ -7,9 +7,11 @@ required: - id_passage_faune - id_nomenclature_type_actor meta: + autoschema: true schema_code: m_sipaf.actor - sql_processing: true - sql_schema_dot_table: pr_sipaf.cor_actor_pf + # sql_processing: true + # sql_schema_dot_table: pr_sipaf.cor_actor_pf + model: m_sipaf.models.Actor genre: M label: Acteur label_field_name: id_nomenclature_type_actor diff --git a/config/modules/contrib/m_sipaf/definitions/m_sipaf.pf.schema.yml b/config/modules/contrib/m_sipaf/definitions/m_sipaf.pf.schema.yml index 3aa26b54..e6fe3d0e 100644 --- a/config/modules/contrib/m_sipaf/definitions/m_sipaf.pf.schema.yml +++ b/config/modules/contrib/m_sipaf/definitions/m_sipaf.pf.schema.yml @@ -4,8 +4,11 @@ title: schema passage faune description: schema pour les passages à faune meta: - sql_processing: true - sql_schema_dot_table: pr_sipaf.t_passages_faune + # sql_processing: true + # sql_schema_dot_table: pr_sipaf.t_passages_faune + # unique_in_db: true + autoschema: true + model: m_sipaf.models.PassageFaune genre: M label: passage à faune labels: passages à faune @@ -13,300 +16,170 @@ meta: geometry_field_name: geom unique: - code_passage_faune - unique_in_db: true check_cruved: true properties: id_passage_faune: - type: integer title: ID - primary_key: true code_passage_faune: - type: string title: Code passage faune - required: true description: "Code permettant d'identifier le passage à faune de manière unique (texte)" uuid_passage_faune: - type: uuid title: UUID - default: uuid description: Identifiant universel unique au format UUID (uuid_pf) id_digitiser: - type: integer - foreign_key: true - schema_code: user.role title: Numérisateur description: Personne qui a saisi la donnée digitiser: - type: relation - relation_type: n-1 - schema_code: user.role title: Numérisateur description: Personne qui a saisi la donnée - local_key: id_digitiser pi_ou_ps: - type: boolean title: Positionnement description: Positionnement du passage vis-à vis de l’infrastructure (inférieur (False) ou supérieur (True)) labels: - Supérieur - Inférieur geom: - type: geometry - geometry_type: geometry - srid: 4326 title: Geometrie (4326) description: Géometrie du passage à faune (SRID=4326) - index: true - required: true geom_local: - type: geometry - geometry_type: geometry - srid: __LOCAL_SRID__ title: Geometrie locale (__LOCAL_SRID__) description: Géométrie locale du passage à faune (SRID=__LOCAL_SRID__) - index: true - trigger: - name: copy_geom - key: geom pk: - type: number title: Point kilométrique description: Point kilométrique min: 0 pr: - type: integer title: Point Repère description: Point repère min: 0 pr_abs: - type: integer title: Point repère abscisse (m) description: Distance en abscisse curviligne depuis le dernier PR min: 0 code_ouvrage_gestionnaire: - type: string title: Code ouvrage gestionnaire description: Code de l’ouvrage (pour le gestionnaire) nom_usuel_passage_faune: - type: string title: Nom Passage Faune description: "Nom usuel utilisé pour dénommer l'ouvrage (nom_usuel_pf)" issu_requalification: - type: boolean title: Requalification description: "L'ouvrage est issu d'une opération de requalification ?" labels: - Oui - Non date_creation_ouvrage: - type: date title: Date de réalisation description: "Date de la réalisation de l'ouvrage" date_requalification_ouvrage: - type: date title: Date de requalification description: "Date de la requalification de l'ouvrage" largeur_ouvrage: - type: number title: Largeur ouvrage (m) description: "Largeur de l'ouvrage en mètre" min: 0 hauteur_ouvrage: - type: number title: Hauteur ouvrage (m) description: "Hauteur de l'ouvrage en mètre" min: 0 longueur_franchissement: - type: number title: Longueur de franchissement (m) description: "Longueur de franchissement de l'ouvrage en mètres (ne prend pas en compte l'épaisseur des matériaux et éventuels obstacles)" min: 0 diametre: - type: number title: Diamètre (m) description: Diamètre de la buse en mètre min: 0 largeur_dispo_faune: - type: number title: Largeur disponible (m) description: "Largeur de l'ouvrage effectivement disponible pour la faune en mètre" min: 0 hauteur_dispo_faune: - type: number title: Hauteur disponible (m) description: "Hauteur de l'ouvrage effectivement disponible pour la faune en mètre" min: 0 id_nomenclature_ouvrage_specificite: - type: integer title: Spécificité du passage faune description: Exclusivité pour le passage faune (specificite) - foreign_key: true - schema_code: ref_nom.nomenclature nomenclature_type: PF_OUVRAGE_SPECIFICITE nomenclatures_ouvrage_type: - type: relation - relation_type: n-n title: "Type d'ouvrage" description: "Type d'ouvrage d'art (lb_type_ouvrage)" - schema_dot_table: pr_sipaf.cor_pf_nomenclature_ouvrage_type - schema_code: ref_nom.nomenclature nomenclature_type: PF_OUVRAGE_TYPE ouvrage_type_autre: type: string title: Autre type d'ouvrage nomenclatures_ouvrage_materiaux: - type: relation - relation_type: n-n - schema_code: ref_nom.nomenclature - schema_dot_table: pr_sipaf.cor_pf_nomenclature_ouvrage_materiaux title: Matériaux description: "Matériaux composants l'ouvrage d'art (lb_materiaux)" nomenclature_type: PF_OUVRAGE_MATERIAUX ouvrage_hydrau: - type: boolean title: Ouvrage hydraulique description: Ouvrage hydraulique ou non labels: - Oui - Non id_nomenclature_ouvrage_hydrau_position: - type: integer title: Ouvrage hydraulique Position description: Ouvrage hydraulique Position (ouvrage_hydrau_position) - foreign_key: true - schema_code: ref_nom.nomenclature nomenclature_type: PF_OUVRAGE_HYDRAULIQUE_POSITION id_nomenclature_ouvrage_hydrau_banq_caract: - type: integer title: Caractérisation banquette description: "Caractérisation de la banquette dans le cas d'un ouvrage hydraulique (ouvrage_hydrau_caract_banquette)" nomenclature_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_CARACT - foreign_key: true schema_code: ref_nom.nomenclature id_nomenclature_ouvrage_hydrau_banq_type: - type: integer title: Type de banquette description: "Type de la banquette dans le cas d'un ouvrage hydraulique (ouvrage_hydrau_type_banquette)" - foreign_key: true - schema_code: ref_nom.nomenclature nomenclature_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE ouvrag_hydrau_tirant_air: - type: number title: "Tirant d'air banquette (m)" description: " Tirant d'air existant entre la banquette et le plafond de l'ouvrage, en mètre" source: - type: string title: Source description: Source de la donnée nomenclature_ouvrage_specificite: - type: relation - relation_type: n-1 title: Spécificité du passage faune description: Exclusivité pour le passage faune - local_key: id_nomenclature_ouvrage_specificite - schema_code: ref_nom.nomenclature nomenclature_ouvrage_hydrau_position: - type: relation - relation_type: n-1 - schema_code: ref_nom.nomenclature title: Position banquette - local_key: id_nomenclature_ouvrage_hydrau_position nomenclature_ouvrage_hydrau_banq_caract: - type: relation - relation_type: n-1 - schema_code: ref_nom.nomenclature title: OH Caractérisation banquette - local_key: id_nomenclature_ouvrage_hydrau_banq_caract - nomenclature_ouvrage_hydro_banq_type: - type: relation - relation_type: n-1 - schema_code: ref_nom.nomenclature + nomenclature_ouvrage_hydrau_banq_type: title: OH type de banquette - local_key: id_nomenclature_ouvrage_hydrau_banq_type meta_create_date: - type: datetime title: Date de création (en base) meta_update_date: - type: datetime title: Date de modification (en base) areas: - type: relation title: Areas - relation_type: n-n - schema_code: ref_geo.area - schema_dot_table: pr_sipaf.cor_area_pf - trigger: - name: intersect_ref_geo - key: geom_local - 'on': geom linears: - type: relation title: Linéaires - relation_type: n-n - schema_code: ref_geo.linear - schema_dot_table: pr_sipaf.cor_linear_pf - trigger: - name: d_within - distance: 100 - key: geom_local - 'on': geom - partition: - - id_type actors: - type: relation - relation_type: 1-n - foreign_key: id_passage_faune - schema_code: m_sipaf.actor title: Acteurs description: Acteurs du passage à faune medias: - type: relation title: medias - relation_type: 1-n - foreign_key: uuid_attached_row - local_key: uuid_passage_faune - schema_code: commons.media + geom_x: type: number title: Latitude - column_property: st_x - key: geom geom_y: type: number title: Longitude - column_property: st_y - key: geom geom_text: type: string title: Geométrie (text) - column_property: st_astext - key: geom label_infrastructures: type: string title: Infrastructure traversée - column_property: label - relation_key: linears - label_key: linear_name - filters: type.type_code = RTE label_communes: type: string title: Commune(s) - column_property: label - relation_key: areas - label_key: area_name - filters: area_type.type_code in COM label_departements: type: string title: Département(s) - column_property: label - relation_key: areas - label_key: area_name - filters: area_type.type_code in DEP label_regions: type: string title: Région(s) - column_property: label - relation_key: areas - label_key: area_name - filters: area_type.type_code in REG diff --git a/config/modules/contrib/m_sipaf/exports/m_sipaf.pf.export.yml b/config/modules/contrib/m_sipaf/exports/m_sipaf.pf.export.yml index e7f590ab..f665db0e 100644 --- a/config/modules/contrib/m_sipaf/exports/m_sipaf.pf.export.yml +++ b/config/modules/contrib/m_sipaf/exports/m_sipaf.pf.export.yml @@ -30,7 +30,7 @@ fields: - ouvrag_hydrau_tirant_air - nomenclature_ouvrage_hydrau_position.label_fr - nomenclature_ouvrage_hydrau_banq_caract.label_fr - - nomenclature_ouvrage_hydro_banq_type.label_fr + - nomenclature_ouvrage_hydrau_banq_type.label_fr - label_communes - label_departements - label_infrastructures From d2ee2dce640104567e6f9b4bda4d8d931ed3cbc1 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Fri, 3 Mar 2023 12:38:08 +0100 Subject: [PATCH 005/142] test api import --- backend/gn_modulator/tests/test_import_api.py | 17 ++++++++++++++++- config/modules/MODULATOR.module.yml | 2 ++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/backend/gn_modulator/tests/test_import_api.py b/backend/gn_modulator/tests/test_import_api.py index 6d37fd8e..15d79dac 100644 --- a/backend/gn_modulator/tests/test_import_api.py +++ b/backend/gn_modulator/tests/test_import_api.py @@ -1,7 +1,22 @@ import pytest # noqa +from flask import url_for +from werkzeug.datastructures import Headers + +from geonature.tests.utils import set_logged_user_cookie + from gn_modulator.utils.env import import_test_dir @pytest.mark.usefixtures("client_class", "temporary_transaction", scope="session") class TestImportApi: - pass + def test_import_synthese(self, users): + set_logged_user_cookie(self.client, users["admin_user"]) + with open(import_test_dir / "synthese_1.csv", "rb") as f: + data = {"file": (f, "synthese.csv"), "object_code": "syn.synthese"} + r = self.client.post( + url_for("modulator.api_import", module_code="MODULATOR"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + print(r.data) + assert r.status_code == 200, r.data diff --git a/config/modules/MODULATOR.module.yml b/config/modules/MODULATOR.module.yml index 2fef08b1..8d59ed4e 100644 --- a/config/modules/MODULATOR.module.yml +++ b/config/modules/MODULATOR.module.yml @@ -27,4 +27,6 @@ objects: tax.taxref: cruved: R tax.taxsearch: + cruved: R + syn.synthese: cruved: R \ No newline at end of file From 81a38c5adb8171ba370d3f200b085e24e84495ad Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Fri, 3 Mar 2023 14:10:35 +0100 Subject: [PATCH 006/142] import securize preprocess --- .../gn_modulator/schema/imports/preprocess.py | 62 +++++- .../tests/import_test/route/pp_linear.sql | 21 +-- .../import_test/route/pp_linear_group.sql | 9 +- .../imports/m_sipaf.pf_exemples.import.yml | 16 -- .../m_sipaf/imports/scripts/ppi_actor.sql | 3 - .../m_sipaf/imports/scripts/ppi_actor_V1.sql | 3 - .../imports/scripts/ppi_groupe_route_na.sql | 1 - .../m_sipaf/imports/scripts/ppi_organism.sql | 3 - .../imports/scripts/ppi_organism_V1.sql | 3 - .../m_sipaf/imports/scripts/ppi_pf.sql | 178 ------------------ .../m_sipaf/imports/scripts/ppi_pf_V1.sql | 61 +++--- .../imports/scripts/ppi_srce_reservoir.sql | 6 +- .../imports/scripts/ppi_troncon_route_na.sql | 21 +-- doc/import.md | 5 +- 14 files changed, 106 insertions(+), 286 deletions(-) delete mode 100644 config/modules/contrib/m_sipaf/imports/m_sipaf.pf_exemples.import.yml delete mode 100644 config/modules/contrib/m_sipaf/imports/scripts/ppi_pf.sql diff --git a/backend/gn_modulator/schema/imports/preprocess.py b/backend/gn_modulator/schema/imports/preprocess.py index 2d34db98..d566079f 100644 --- a/backend/gn_modulator/schema/imports/preprocess.py +++ b/backend/gn_modulator/schema/imports/preprocess.py @@ -1,3 +1,6 @@ +import re + + class SchemaPreProcessImports: @classmethod def import_raw(cls, import_number, schema_code, from_table, dest_table): @@ -33,19 +36,60 @@ def import_preprocess( msg=f"Le fichier de preprocess {pre_process_file_path} n'existe pas", ) return - with open(pre_process_file_path, "r") as f: - txt_pre_process_raw_import_view = ( - f.read() - .replace(":raw_import_table", from_table) - .replace(":pre_processed_import_view", dest_table) - .replace("%", "%%") - ) + with open(pre_process_file_path, "r") as f: + preprocess_select = f.read().upper().replace(";", "").replace("%", "%%") + + forbidden_words = [] + for forbidden_word in [ + "INSERT ", + "DROP ", + "DELETE ", + "UPDATE ", + "EXECUTE", + "TRUNCATE", + ]: + if forbidden_word in preprocess_select: + forbidden_words.append(forbidden_word.strip()) + + if forbidden_words: + cls.import_add_error( + import_number, + schema_code, + code="ERR_IMPORT_PRE_PROCESS_FORBIDEN_WORD", + msg=f"Le fichier de preprocess {pre_process_file_path} contient le ou les mots interdits {', '.join(forbidden_word)}", + ) + return + + for word in ["WHERE", "ORDER BY", "LIMIT"]: + if word in preprocess_select: + preprocess_select = preprocess_select.replace( + f"{word}", "\nFROM {from_table}\n{word}" + ) + break + + if "FROM" not in preprocess_select: + preprocess_select += f"\nFROM {from_table}" + + txt_pre_process_raw_import_view = f""" +DROP VIEW IF EXISTS {dest_table}; +CREATE VIEW {dest_table} AS +{preprocess_select} +; + """ cls.import_set_infos( import_number, schema_code, "sql.preprocess", txt_pre_process_raw_import_view ) - cls.c_sql_exec_txt(txt_pre_process_raw_import_view) - + try: + cls.c_sql_exec_txt(txt_pre_process_raw_import_view) + except Exception as e: + cls.import_add_error( + import_number, + schema_code, + code="ERR_IMPORT_PRE_PROCESS_CREATE_VIEW", + msg=f"La vue de preprocess n'a pas être crée : {str(e)}", + ) + return cls.count_and_check_table(import_number, schema_code, dest_table, "preprocess") @classmethod diff --git a/backend/gn_modulator/tests/import_test/route/pp_linear.sql b/backend/gn_modulator/tests/import_test/route/pp_linear.sql index 221c6d67..e5b66b9f 100644 --- a/backend/gn_modulator/tests/import_test/route/pp_linear.sql +++ b/backend/gn_modulator/tests/import_test/route/pp_linear.sql @@ -1,13 +1,8 @@ -DROP VIEW IF EXISTS :pre_processed_import_view CASCADE; -CREATE VIEW :pre_processed_import_view AS - SELECT - 'RTE' AS id_type, - id AS linear_code, - numero || '_' || substring(id, 9)::bigint AS linear_name, - wkt as geom, - true as enable, - 'https://geoservices.ign.fr/bdtopo#telechargementshpreg' AS source, - numero as groups -- n-n ++ - FROM :raw_import_table -; - +SELECT + 'RTE' AS id_type, + id AS linear_code, + numero || '_' || substring(id, 9) :: bigint AS linear_name, + wkt as geom, + true as enable, + 'https://geoservices.ign.fr/bdtopo#telechargementshpreg' AS source, + numero as groups -- n-n ++ \ No newline at end of file diff --git a/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql b/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql index afb5e4ca..8ef52ba8 100644 --- a/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql +++ b/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql @@ -1,7 +1,4 @@ -DROP VIEW IF EXISTS :pre_processed_import_view CASCADE; -CREATE VIEW :pre_processed_import_view AS -SELECT DISTINCT - 'RTE' AS id_type, +SELECT + DISTINCT 'RTE' AS id_type, numero AS code, - cl_admin || ' ' || numero AS name - FROM :raw_import_table tis + cl_admin || ' ' || numero AS name \ No newline at end of file diff --git a/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_exemples.import.yml b/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_exemples.import.yml deleted file mode 100644 index ae0ce67d..00000000 --- a/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_exemples.import.yml +++ /dev/null @@ -1,16 +0,0 @@ -type: import -code: m_sipaf.pf_exemples -title: Données d'exemple m_sipaf -description: import données d'exemple de passage à faune pour SIPAF -items: - - schema_code: user.organisme - data: pf.csv - pre_process: scripts/ppi_organism.sql - - schema_code: m_sipaf.pf - data: pf.csv - pre_process: scripts/ppi_pf.sql - keep_raw: true - - schema_code: m_sipaf.actor - data: pf.csv - pre_process: scripts/ppi_actor.sql - keep_raw: true diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql index 2924e66d..8d0826d4 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql @@ -1,5 +1,3 @@ -DROP VIEW IF EXISTS :pre_processed_import_view; -CREATE VIEW :pre_processed_import_view AS SELECT uuid_pf AS id_passage_faune, CASE @@ -11,7 +9,6 @@ SELECT END AS id_nomenclature_type_actor, nom_organism AS id_organism, NULL AS id_role - FROM :raw_import_table t WHERE nom_organism IS NOT NULL AND nom_organism != '' ; diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql index e4976651..0a52176f 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql @@ -1,11 +1,8 @@ -DROP VIEW IF EXISTS :pre_processed_import_view; -CREATE VIEW :pre_processed_import_view AS SELECT uuid_pf AS id_passage_faune, 'CON' AS id_nomenclature_type_actor, concess AS id_organism, NULL AS id_role - FROM :raw_import_table t WHERE concess IS NOT NULL AND concess != '' ; diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql index afb5e4ca..1eed5d7c 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql @@ -4,4 +4,3 @@ SELECT DISTINCT 'RTE' AS id_type, numero AS code, cl_admin || ' ' || numero AS name - FROM :raw_import_table tis diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql index c1c36010..112ad275 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql @@ -1,8 +1,5 @@ -DROP VIEW IF EXISTS :pre_processed_import_view; -CREATE VIEW :pre_processed_import_view AS SELECT DISTINCT nom_organism AS nom_organisme, 'SIPAF' AS adresse_organisme - FROM :raw_import_table t WHERE nom_organism IS NOT NULL AND nom_organism != '' ORDER BY nom_organism diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql index 77c7aede..a6267ec9 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql @@ -1,8 +1,5 @@ -DROP VIEW IF EXISTS :pre_processed_import_view; -CREATE VIEW :pre_processed_import_view AS SELECT DISTINCT concess AS nom_organisme, 'SIPAF' AS adresse_organisme - FROM :raw_import_table t WHERE concess IS NOT NULL AND concess != '' ORDER BY concess diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf.sql deleted file mode 100644 index 491a2530..00000000 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf.sql +++ /dev/null @@ -1,178 +0,0 @@ - - ---DROP FUNCTION IF EXISTS process_number; -CREATE OR REPLACE FUNCTION process_number(n_in text) RETURNS NUMERIC AS $$ -DECLARE x NUMERIC; -DECLARE inter TEXT; -BEGIN - inter := n_in; - SELECT INTO inter REPLACE(inter, ',', '.'); - x = inter::NUMERIC; - RETURN x; -EXCEPTION - WHEN others THEN - RETURN CASE - WHEN n_in IS NULL OR n_in = '' THEN NULL - ELSE NULL - END; -END; -$$ -STRICT -LANGUAGE plpgsql IMMUTABLE; - ---DROP FUNCTION IF EXISTS process_integer; -CREATE OR REPLACE FUNCTION process_integer(n_in text) RETURNS INTEGER AS $$ -DECLARE x INTEGER; -DECLARE inter TEXT; -BEGIN - inter := n_in; - x = inter::INTEGER; - RETURN x; -EXCEPTION - WHEN others THEN - RETURN CASE - WHEN n_in IS NULL OR n_in = '' THEN NULL - ELSE NULL - END; -END; -$$ -STRICT -LANGUAGE plpgsql IMMUTABLE; - - ---DROP FUNCTION IF EXISTS check_number(TEXT, TEXT, TEXT) ; -CREATE OR REPLACE FUNCTION check_number( - col_num_in TEXT, - col_id_in TEXT, - table_in TEXT -) -RETURNS TABLE(id text, value text) AS -$$ - BEGIN - RETURN QUERY EXECUTE FORMAT(' - SELECT %I::text as id, %I::text as value - FROM %s - WHERE process_number(%I) = double precision ''NaN'' - ', col_id_in, col_num_in, table_in, col_num_in); - END; -$$ -STRICT -LANGUAGE plpgsql IMMUTABLE; - -DROP TABLE IF EXISTS corr_type_ouv CASCADE; -CREATE TABLE corr_type_ouv( -cd_nomenclature TEXT, -mot TEXT, -UNIQUE(cd_nomenclature, mot) -); - - -INSERT INTO corr_type_ouv -VALUES ('BUS', 'buse'), -('ARC', 'Arc'), -('CAD', 'cadre'), -('DAL', 'dalle'), -('VIA', 'Viaduc'), -('VOU', 'Voute'), -('VOU', 'Vo?t?'), -('PON', 'Pont'), -('CAN', 'Canalisation'), -('DIAB', 'Diabolo'), -('DALO', 'Dalot'), -('TRA', 'Tranch'), -('TUN', 'Tunnel'), -('POR', 'portique') -ON CONFLICT DO NOTHING; - -DROP VIEW IF EXISTS :pre_processed_import_view; -CREATE VIEW :pre_processed_import_view AS -WITH - doublons AS ( - SELECT MIN(id_import) AS id_import, uuid_pf - FROM :raw_import_table - WHERE uuid_pf != '' OR uuid_pf IS NOT NULL - GROUP BY uuid_pf - ORDER BY uuid_pf - ), - type_ouv AS ( - SELECT - uuid_pf, - string_agg(DISTINCT cd_nomenclature, ',') AS nomenclatures_ouvrage_type - FROM :raw_import_table tis - JOIN corr_type_ouv cto - ON UNACCENT(tis.lb_typ_ouv) ILIKE '%' || cto.mot || '%' - WHERE lb_typ_ouv != '' - GROUP BY uuid_pf -) -SELECT - -- tis.uuid_pf AS id_passage_faune, - tis.uuid_pf AS code_passage_faune, - CASE - WHEN pi_ou_ps LIKE 'PI%' THEN FALSE - WHEN pi_ou_ps LIKE 'PS%' THEN TRUE - WHEN pi_ou_ps = '' THEN NULL - ELSE NULL - END AS pi_ou_ps, - st_asewkt(st_makepoint(process_number(x), process_number(y), 4326)) AS GEOM, - process_number(pk) AS pk, - process_integer(pr) AS pr, - process_integer(pr_abs) AS pr_abs, - id_op AS code_ouvrage_gestionnaire, - nom_pf AS nom_usuel_passage_faune, - CASE - WHEN process_number(date_creat) != double PRECISION 'NaN' - THEN TO_DATE(process_number(date_creat)::text, 'yyyy') - ELSE NULL - END AS date_creation_ouvrage, - CASE - WHEN process_number(date_requal) != double PRECISION 'NaN' - THEN TO_DATE(process_number(date_requal)::text, 'yyyy') - ELSE NULL - END AS date_requalification_ouvrage, - CASE - WHEN process_number(date_requal) != double PRECISION 'NaN' - THEN date_requal != '' - ELSE NULL - END AS issu_requalification, - process_number(larg_ouvrag) AS largeur_ouvrage, - process_number(haut_ouvrag) AS hauteur_ouvrage, - process_number(long_franch) AS longueur_franchissement, - process_number(diam) AS diametre, - process_number(larg_disp) AS largeur_dispo_faune, - process_number(haut_disp) AS hauteur_dispo_faune, - CASE - WHEN specificit ILIKE '%mixte%' THEN 'MIX' - WHEN - specificit ILIKE '%sp%cifique%' - OR specificit IN ('Amphibiens', 'Batraciens', 'Boviduc', 'Crapauduc', 'Faune') - OR specificit ILIKE '%GF%' - OR specificit ILIKE '%PPF%' - OR specificit ILIKE '%PF%' - OR specificit ILIKE '%BESTIAUX%' - OR specificit ILIKE '%gibier%' - OR specificit ILIKE '%pas%' - OR specificit IN ('PF', 'PGF', 'PPF', 'PB', 'PP', 'S') - THEN 'SPE' - WHEN - specificit ILIKE '%non dedie%' - OR specificit IN ('H', 'M', 'Non') - THEN 'ND' - WHEN specificit IN ('', '?', 'coudée', 'immergée') THEN NULL - ELSE '???' - END AS id_nomenclature_ouvrage_specificite, - tou.nomenclatures_ouvrage_type, - CASE - WHEN UNACCENT(TRIM(lb_materiaux)) ILIKE '%Beton%' THEN 'BET' - WHEN UNACCENT(TRIM(lb_materiaux)) ILIKE '%Metal%' THEN 'MET' - ELSE NULL - END AS nomenclatures_ouvrage_materiaux, - CASE - WHEN oh = 'Oui' THEN TRUE - WHEN oh = 'Non' THEN FALSE - ELSE NULL - END AS ouvrage_hydrau - FROM :raw_import_table tis - JOIN doublons dbl ON dbl.id_import = tis.id_import - LEFT JOIN type_ouv tou ON tou.uuid_pf = tis.uuid_pf - ORDER BY tis.uuid_pf -; \ No newline at end of file diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql index 594b1ce3..3ab0ac68 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql @@ -1,31 +1,34 @@ -- import V1 -- (sans les données spécificité, matériaux, et ouvrage_type) -DROP VIEW IF EXISTS :pre_processed_import_view; -CREATE VIEW :pre_processed_import_view AS - select - uuid_pf as code_passage_faune, - CASE - WHEN pi_ou_ps = 'PI' THEN FALSE - WHEN pi_ou_ps = 'PS' THEN TRUE - WHEN pi_ou_ps = '' THEN NULL - ELSE NULL - END AS pi_ou_ps, - pr, - pr_abs, - st_asewkt(st_makepoint(replace(X, ',', '.')::numeric, replace(y, ',', '.')::numeric, 4326)) AS GEOM, - ID_PF_GEST AS code_ouvrage_gestionnaire, - NOM_PF AS nom_usuel_passage_faune, - CASE - WHEN ISSU_REQA = 'oui' THEN TRUE - ELSE NULL - END AS issu_requalification, - replace(larg_ouvra, ',', '.')::numeric AS largeur_ouvrage, - replace(haut_ouvra, ',', '.')::NUMERIC AS hauteur_ouvrage, - replace(long_franc, ',', '.')::NUMERIC AS longueur_franchissement, - replace(diam, ',', '.')::NUMERIC AS diametre, - replace(larg_disp, ',', '.')::NUMERIC AS largeur_dispo_faune, - replace(haut_disp, ',', '.')::NUMERIC AS hauteur_dispo_faune, - source - FROM :raw_import_table tis - ORDER BY tis.uuid_pf -; \ No newline at end of file +select + uuid_pf as code_passage_faune, + CASE + WHEN pi_ou_ps = 'PI' THEN FALSE + WHEN pi_ou_ps = 'PS' THEN TRUE + WHEN pi_ou_ps = '' THEN NULL + ELSE NULL + END AS pi_ou_ps, + pr, + pr_abs, + st_asewkt( + st_makepoint( + replace(X, ',', '.') :: numeric, + replace(y, ',', '.') :: numeric, + 4326 + ) + ) AS GEOM, + ID_PF_GEST AS code_ouvrage_gestionnaire, + NOM_PF AS nom_usuel_passage_faune, + CASE + WHEN ISSU_REQA = 'oui' THEN TRUE + ELSE NULL + END AS issu_requalification, + replace(larg_ouvra, ',', '.') :: numeric AS largeur_ouvrage, + replace(haut_ouvra, ',', '.') :: NUMERIC AS hauteur_ouvrage, + replace(long_franc, ',', '.') :: NUMERIC AS longueur_franchissement, + replace(diam, ',', '.') :: NUMERIC AS diametre, + replace(larg_disp, ',', '.') :: NUMERIC AS largeur_dispo_faune, + replace(haut_disp, ',', '.') :: NUMERIC AS hauteur_dispo_faune, + source +ORDER BY + tis.uuid_pf; \ No newline at end of file diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_srce_reservoir.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_srce_reservoir.sql index daf3c810..20f13523 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_srce_reservoir.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_srce_reservoir.sql @@ -1,5 +1,3 @@ -DROP VIEW IF EXISTS :pre_processed_import_view; -CREATE VIEW :pre_processed_import_view AS SELECT 'RESV_SRCE' AS id_type, id_resv AS area_code, @@ -9,6 +7,4 @@ SELECT END AS area_name, wkt AS geom, TRUE AS enable, - 'https://inpn.mnhn.fr/docs/TVB/N_SRCE_RESERVOIR_S_000.zip' AS source - FROM :raw_import_table -; \ No newline at end of file + 'https://inpn.mnhn.fr/docs/TVB/N_SRCE_RESERVOIR_S_000.zip' AS source \ No newline at end of file diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_troncon_route_na.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_troncon_route_na.sql index 221c6d67..e5b66b9f 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_troncon_route_na.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_troncon_route_na.sql @@ -1,13 +1,8 @@ -DROP VIEW IF EXISTS :pre_processed_import_view CASCADE; -CREATE VIEW :pre_processed_import_view AS - SELECT - 'RTE' AS id_type, - id AS linear_code, - numero || '_' || substring(id, 9)::bigint AS linear_name, - wkt as geom, - true as enable, - 'https://geoservices.ign.fr/bdtopo#telechargementshpreg' AS source, - numero as groups -- n-n ++ - FROM :raw_import_table -; - +SELECT + 'RTE' AS id_type, + id AS linear_code, + numero || '_' || substring(id, 9) :: bigint AS linear_name, + wkt as geom, + true as enable, + 'https://geoservices.ign.fr/bdtopo#telechargementshpreg' AS source, + numero as groups -- n-n ++ \ No newline at end of file diff --git a/doc/import.md b/doc/import.md index 10b51478..b1739ac1 100644 --- a/doc/import.md +++ b/doc/import.md @@ -26,11 +26,9 @@ Cette commande va intégrer (s'il y a correspondance des champs), pour chaque li geonature modules import -s m_sipaf.pf -d -p ``` -où `pre_process` est une vue qui va transformer les colonnes du fichier csv en colonnes assimilables par la table destinataire. (il est très important de garder les noms `:pre_processed_import_view` et `:raw_import_table`) +où `pre_process` est une vue qui va transformer les colonnes du fichier csv en colonnes assimilables par la table destinataire. ``` -DROP VIEW IF EXISTS :pre_processed_import_view; -CREATE VIEW :pre_processed_import_view AS SELECT uuid_pf AS id_passage_faune, CASE @@ -42,7 +40,6 @@ SELECT END AS id_nomenclature_type_actor, nom_organism AS id_organism, NULL AS id_role - FROM :raw_import_table t WHERE nom_organism IS NOT NULL AND nom_organism != '' ; ``` From 5749cf833293cff619827fb15cc56967db6cffd1 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 6 Mar 2023 15:42:17 +0100 Subject: [PATCH 007/142] add model TImport and methods --- backend/gn_modulator/blueprint.py | 12 +- backend/gn_modulator/imports/__init__.py | 5 - backend/gn_modulator/imports/api.py | 64 ----- .../gn_modulator/imports/mixins/__init__.py | 56 +++++ backend/gn_modulator/imports/mixins/data.py | 126 ++++++++++ backend/gn_modulator/imports/mixins/insert.py | 65 ++++++ .../gn_modulator/imports/mixins/mapping.py | 80 +++++++ .../gn_modulator/imports/mixins/process.py | 220 ++++++++++++++++++ backend/gn_modulator/imports/mixins/raw.py | 119 ++++++++++ .../gn_modulator/imports/mixins/relation.py | 127 ++++++++++ backend/gn_modulator/imports/mixins/update.py | 113 +++++++++ backend/gn_modulator/imports/mixins/utils.py | 56 +++++ backend/gn_modulator/imports/models.py | 46 ++++ backend/gn_modulator/imports/routes.py | 53 +++++ backend/gn_modulator/imports/utils/files.py | 10 + backend/gn_modulator/tests/test_import.py | 36 +-- backend/gn_modulator/tests/utils/imports.py | 34 +-- .../layouts/m_sipaf.site_list.layout.yml | 72 +++--- doc/import.md | 2 +- 19 files changed, 1143 insertions(+), 153 deletions(-) delete mode 100644 backend/gn_modulator/imports/api.py create mode 100644 backend/gn_modulator/imports/mixins/__init__.py create mode 100644 backend/gn_modulator/imports/mixins/data.py create mode 100644 backend/gn_modulator/imports/mixins/insert.py create mode 100644 backend/gn_modulator/imports/mixins/mapping.py create mode 100644 backend/gn_modulator/imports/mixins/process.py create mode 100644 backend/gn_modulator/imports/mixins/raw.py create mode 100644 backend/gn_modulator/imports/mixins/relation.py create mode 100644 backend/gn_modulator/imports/mixins/update.py create mode 100644 backend/gn_modulator/imports/mixins/utils.py create mode 100644 backend/gn_modulator/imports/models.py create mode 100644 backend/gn_modulator/imports/routes.py create mode 100644 backend/gn_modulator/imports/utils/files.py diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index ade1e1e6..484b674d 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -2,11 +2,9 @@ from flask import Blueprint, request, g, current_app from .commands import commands from .schema import SchemaMethods -from .definition import DefinitionMethods from sqlalchemy.exc import NoForeignKeysError from gn_modulator.module import ModuleMethods from gn_modulator.layout import LayoutMethods -from gn_modulator.imports import ImportMethods from gn_modulator import init_gn_modulator from gn_modulator.utils.api import process_dict_path from gn_modulator.utils.errors import get_errors, errors_txt @@ -16,8 +14,12 @@ blueprint = Blueprint(MODULE_CODE.lower(), __name__) +<<<<<<< HEAD from gn_modulator.routes.rest import * # noqa from gn_modulator.routes.exports import * # noqa +======= +from gn_modulator.imports.routes import * # noqa +>>>>>>> add model TImport and methods # Creation des commandes pour modules blueprint.cli.short_help = "Commandes pour l' administration du module MODULES" @@ -89,12 +91,6 @@ def api_breadcrumbs(module_code, page_code): return ModuleMethods.breadcrumbs(module_code, page_code, request.args.to_dict()) -@check_cruved_scope("R") # object import ?? -@blueprint.route("import/", methods=["POST"]) -def api_import(module_code): - return ImportMethods.process_api_import(module_code) - - @blueprint.route("/layouts/", methods=["GET"]) @blueprint.route("/layouts/", methods=["GET"], defaults={"config_path": None}) def api_layout(config_path): diff --git a/backend/gn_modulator/imports/__init__.py b/backend/gn_modulator/imports/__init__.py index ee131d2e..e69de29b 100644 --- a/backend/gn_modulator/imports/__init__.py +++ b/backend/gn_modulator/imports/__init__.py @@ -1,5 +0,0 @@ -from .api import ImportApi - - -class ImportMethods(ImportApi): - pass diff --git a/backend/gn_modulator/imports/api.py b/backend/gn_modulator/imports/api.py deleted file mode 100644 index 8b4a0e7b..00000000 --- a/backend/gn_modulator/imports/api.py +++ /dev/null @@ -1,64 +0,0 @@ -import pathlib -from flask import request, jsonify -from gn_modulator.schema import SchemaMethods -from gn_modulator.module import ModuleMethods -from gn_modulator.utils.env import IMPORT_DIR -from geonature.core.gn_commons.file_manager import upload_file, remove_file, rename_file - - -class ImportApi: - @classmethod - def upload_file(cls, module_code, object_code, import_number, file): - IMPORT_DIR.mkdir(parents=True, exist_ok=True) - - file_name = f"{import_number}_{file.name}" - return pathlib.Path(upload_file(file, IMPORT_DIR, file_name)) - - @classmethod - def process_api_import(cls, module_code): - import_number = SchemaMethods.generate_import_number() - - object_code = None - if request.form: - object_code = request.form.get("object_code") - - schema_code = ModuleMethods.schema_code(module_code, object_code) - - if not schema_code: - return { - "errors": [ - { - "msg": f"Il n'y pas de schema pour module_code={module_code} et object_code={object_code}", - "code": "ERR_IMPORT_SCHEMA_CODE", - } - ] - } - - files_path = {} - if request.files: - for file_key in request.files: - file = request.files.get(file_key) - files_path[file_key] = cls.upload_file( - module_code, object_code, import_number, file - ) - data_file_path = files_path.get("data_file") - - if not (data_file_path): - return { - "errors": [ - { - "msg": "Il n'y a pas de fichier de données", - "code": "ERR_IMPORT_NO_DATA_FILE", - } - ] - } - - import_number = SchemaMethods.process_import_schema( - schema_code, data_file_path, import_number=import_number, commit=True - ) - import_infos = SchemaMethods.import_get_infos(import_number, schema_code) - print(SchemaMethods.import_pretty_infos(import_number, schema_code)) # __DEBUG - - import_infos.pop("data_file_path", None) - - return jsonify(import_infos) diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py new file mode 100644 index 00000000..6c88be91 --- /dev/null +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -0,0 +1,56 @@ +from .data import ImportMixinData +from .insert import ImportMixinInsert +from .mapping import ImportMixinMapping +from .process import ImportMixinProcess +from .raw import ImportMixinRaw +from .relation import ImportMixinRelation +from .update import ImportMixinUpdate +from .utils import ImportMixinUtils + + +class ImportMixin( + ImportMixinRelation, + ImportMixinData, + ImportMixinInsert, + ImportMixinMapping, + ImportMixinProcess, + ImportMixinRaw, + ImportMixinUpdate, + ImportMixinUtils, +): + def process_import_schema(self, _insert_data=False): + self._insert_data = _insert_data + + self.process_data_table() + if self.errors: + return self + + self.process_mapping_view() + if self.errors: + return self + + self.process_raw_view() + if self.errors: + return self + + self.process_view() + if self.errors: + return self + + self.process_insert() + if self.errors: + return self + + self.process_update() + if self.errors: + return self + + self.process_relations() + if self.errors: + return self + + self.res["nb_unchanged"] = ( + self.res["nb_process"] - self.res["nb_insert"] - self.res["nb_update"] + ) + + return self diff --git a/backend/gn_modulator/imports/mixins/data.py b/backend/gn_modulator/imports/mixins/data.py new file mode 100644 index 00000000..15c0c4bf --- /dev/null +++ b/backend/gn_modulator/imports/mixins/data.py @@ -0,0 +1,126 @@ +from pathlib import Path +from .utils import ImportMixinUtils +from gn_modulator.schema import SchemaMethods +from geonature.utils.env import db + + +class ImportMixinData(ImportMixinUtils): + def process_data_table(self): + if self.tables.get("data"): + return + + self.tables["data"] = self.table_name("data") + + if Path(self.data_file_path).suffix == ".csv": + self.data_type = "csv" + self.import_csv_file(self.tables["data"]) + + # TODO traiter autres types de fichier + + else: + self.add_error( + code="ERR_IMPORT_DATA_FILE_TYPE_NOT_FOUND", + msg=f"Le type du fichier d'import {self.data_file_path} n'est pas traité", + ) + return + + self.count_and_check_table("data", self.tables["data"]) + + def import_csv_file(self, dest_table): + if not Path(self.data_file_path).exists(): + self.add_error( + code="ERR_IMPORT_DATA_FILE_NOT_FOUND", + msg=f"Le fichier d'import {self.data_file_path} n'existe pas", + ) + return + + with open(self.data_file_path, "r") as f: + # on récupère la premiere ligne du csv pour avoir le nom des colonnes + first_line = f.readline() + + self.csv_delimiter = ";" if ";" in first_line else "," if "," in first_line else None + + if self.csv_delimiter is None: + self.add_error( + code="ERR_IMPORT_CSV_FILE_DELIMITER_NOT_FOUND", + msg=f"Pas de séparateur trouvé pour le fichier csv {self.data_file_path}", + ) + return + + import_table_columns = first_line.replace("\n", "").split(self.csv_delimiter) + + # creation de la table temporaire + self.sql["data_table"] = self.sql_create_data_table( + self.tables["data"], import_table_columns + ) + try: + SchemaMethods.c_sql_exec_txt(self.sql["data_table"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_DATA_CREATE_TABLE", + msg=f"Erreur durant la création de la table des données: {str(e)}", + ) + return + # on copie les données dans la table temporaire + + # pour faire marcher les tests pytest on passe par un insert + # TODO faire marche copy_expert avec pytest + # manière de récupérer cursor ? + if self._insert_data: + self.insert_csv_data(f, dest_table, import_table_columns) + else: + self.copy_csv_data(f, dest_table, import_table_columns) + + def copy_csv_data(self, f, dest_table, table_columns): + columns_fields = ", ".join(table_columns) + self.sql[ + "data_copy_csv" + ] = f"""COPY {dest_table}({columns_fields}) FROM STDIN DELIMITER '{self.csv_delimiter}' QUOTE '"' CSV""" + try: + cursor = db.session.connection().connection.cursor() + cursor.copy_expert(sql=self.sql["data_copy_csv"], file=f) + except Exception as e: + self.add_error( + code="ERR_IMPORT_DATA_COPY", + msg=f"Erreur lors de la copie des données csv : {str(e)}", + ) + return + + def insert_csv_data(self, f, dest_table, table_columns): + sql_columns_fields = ", ".join(table_columns) + + values = "" + for line in f: + data = "', '".join((line.replace('"', "").replace("\n", "").split(self.csv_delimiter))) + values += f"('{data}')," + if not values: + return + + values = values[:-1] + self.sql[ + "data_insert" + ] = f"INSERT INTO {dest_table} ({sql_columns_fields}) VALUES {values}" + try: + SchemaMethods.c_sql_exec_txt(self.sql["data_insert"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_DATA_INSERT", + msg=f"Erreur lors de l'insertion des données csv : {str(e)}", + ) + return + + def sql_create_data_table(self, dest_table, table_columns): + """ + requete de creation d'une table temporaire pour import csv + tout les champs sont en varchar + """ + + columns_sql = "\n ".join(map(lambda x: f"{x} VARCHAR,", table_columns)) + pk_constraint_name = f"pk_{'_'.join(dest_table.split('.'))}_id_import" + + txt = f"""CREATE TABLE IF NOT EXISTS {dest_table} ( + id_import SERIAL NOT NULL, + {columns_sql} + CONSTRAINT {pk_constraint_name} PRIMARY KEY (id_import) +);""" + return txt diff --git a/backend/gn_modulator/imports/mixins/insert.py b/backend/gn_modulator/imports/mixins/insert.py new file mode 100644 index 00000000..07c05e93 --- /dev/null +++ b/backend/gn_modulator/imports/mixins/insert.py @@ -0,0 +1,65 @@ +from .utils import ImportMixinUtils +from gn_modulator import SchemaMethods + + +class ImportMixinInsert(ImportMixinUtils): + def process_insert(self): + from_table = self.tables["process"] + + sm = SchemaMethods(self.schema_code) + self.sql[ + "nb_insert" + ] = f"SELECT COUNT(*) FROM {from_table} WHERE {sm.pk_field_name()} IS NULL" + + try: + self.res["nb_insert"] = SchemaMethods.c_sql_exec_txt(self.sql["nb_insert"]).scalar() + except Exception as e: + self.add_error( + code="ERR_IMPORT_INSERT_COUNT", + msg=f"Erreur lors du comptage du nombre d'insert: {str(e)}", + ) + return + + if self.res["nb_insert"] == 0: + return + + self.sql["insert"] = self.sql_insert(from_table) + + try: + SchemaMethods.c_sql_exec_txt(self.sql["insert"]) + except Exception as e: + if isinstance(e, AttributeError): + raise e + self.add_error( + code="ERR_IMPORT_INSERT", + msg=f"Erreur durant l'insert de {from_table} vers {self.schema_code} : {str(e)}", + ) + + def sql_insert(self, from_table, dest_table=None, keys=None): + sm = SchemaMethods(self.schema_code) + + table_name = dest_table or sm.sql_schema_dot_table() + + columns_select = filter( + lambda x: ( + x in keys + if keys is not None + else not (sm.is_column(x) and sm.property(x).get("primary_key")) + ), + SchemaMethods.get_table_columns(from_table), + ) + + v_column_select_keys = map(lambda x: x, columns_select) + + txt_columns_select_keys = ",\n ".join(v_column_select_keys) + + txt_where = f" WHERE {sm.pk_field_name()} IS NULL" if keys is None else "" + + return f""" +INSERT INTO {table_name} ( + {txt_columns_select_keys} +) +SELECT + {txt_columns_select_keys} +FROM {from_table}{txt_where}; +""" diff --git a/backend/gn_modulator/imports/mixins/mapping.py b/backend/gn_modulator/imports/mixins/mapping.py new file mode 100644 index 00000000..9abd26a7 --- /dev/null +++ b/backend/gn_modulator/imports/mixins/mapping.py @@ -0,0 +1,80 @@ +from pathlib import Path +from .utils import ImportMixinUtils +from gn_modulator.schema import SchemaMethods + + +class ImportMixinMapping(ImportMixinUtils): + def process_mapping_view(self): + """ + Application de la vue de mappage à la la table d'import + """ + + if self.mapping_file_path is None: + return + + self.tables["mapping"] = self.table_name("mapping") + + if not Path(self.mapping_file_path).exists(): + self.add_error( + code="ERR_IMPORT_MAPPING_FILE_MISSING", + msg=f"Le fichier de preprocess {self.mapping_file_path} n'existe pas", + ) + return + + self.sql["mapping_view"] = self.sql_mapping() + + try: + SchemaMethods.c_sql_exec_txt(self.sql["mapping_view"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_PRE_PROCESS_CREATE_VIEW", + msg=f"La vue de preprocess n'a pas être crée : {str(e)}", + ) + return + + self.count_and_check_table("mapping", self.tables["mapping"]) + + def sql_mapping(self): + from_table = self.tables["data"] + dest_table = self.tables["mapping"] + + with open(self.mapping_file_path, "r") as f: + mapping_select = f.read().upper().replace(";", "").replace("%", "%%") + + forbidden_words = [] + for forbidden_word in [ + "INSERT ", + "DROP ", + "DELETE ", + "UPDATE ", + "EXECUTE", + "TRUNCATE", + ]: + if forbidden_word in mapping_select: + forbidden_words.append(forbidden_word.strip()) + + if forbidden_words: + self.add_error( + code="ERR_IMPORT_PRE_PROCESS_FORBIDEN_WORD", + msg=f"Le fichier de preprocess {self.mapping_file_path} contient le ou les mots interdits {', '.join(forbidden_word)}", + ) + return + + for word in ["WHERE", "ORDER BY", "LIMIT"]: + if word in mapping_select: + mapping_select = mapping_select.replace( + f"{word}", "\nFROM {from_table}\n{word}" + ) + break + + if "FROM" not in mapping_select: + mapping_select += f"\nFROM {from_table}" + + sql_mapping = f""" +DROP VIEW IF EXISTS {dest_table}; +CREATE VIEW {dest_table} AS +{mapping_select} +; + """ + + return sql_mapping diff --git a/backend/gn_modulator/imports/mixins/process.py b/backend/gn_modulator/imports/mixins/process.py new file mode 100644 index 00000000..e8da6db7 --- /dev/null +++ b/backend/gn_modulator/imports/mixins/process.py @@ -0,0 +1,220 @@ +from .utils import ImportMixinUtils +from gn_modulator import SchemaMethods + + +class ImportMixinProcess(ImportMixinUtils): + def process_view(self, keys=None): + from_table = self.tables["raw"] + dest_table = self.tables["process"] = self.table_name("process") + + self.sql["process_view"] = self.sql_process_view(from_table, dest_table, keys) + + try: + SchemaMethods.c_sql_exec_txt(self.sql["process_view"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_PROCESS_CREATE_VIEW", + msg=f"La vue de process n'a pas être crée : {str(e)}", + ) + return + + self.count_and_check_table("process", dest_table) + + def sql_process_view(self, from_table, dest_table, keys=None): + """ + requete pour créer une vue qui résoud les clé + """ + + sm = SchemaMethods(self.schema_code) + + v_columns = [] + v_joins = [] + + from_table_columns = SchemaMethods.get_table_columns(from_table) + + columns = list( + filter( + lambda x: ( + x in keys + if keys is not None + else sm.is_column(x) and not sm.property(x).get("primary_key") + ), + from_table_columns, + ) + ) + + solved_keys = {} + + for index, key in enumerate(columns): + txt_column, v_join = self.process_column_import_view(index, key) + if txt_column: + # TODO n-n ici ???? + if sm.has_property(key) and sm.property(key).get("relation_type") == "n-n": + rel = SchemaMethods(sm.property(key)["schema_code"]) + v_columns.append(f"{txt_column.split('.')[0]}.{rel.pk_field_name()}") + else: + v_columns.append(f"{txt_column} AS {key}") + solved_keys[key] = txt_column + v_joins += v_join + + txt_pk_column, v_join = self.resolve_key( + self.schema_code, sm.pk_field_name(), alias_join_base="j_pk", solved_keys=solved_keys + ) + v_columns.append(txt_pk_column) + v_joins += v_join + + txt_columns = ",\n ".join(v_columns) + txt_joins = "\n".join(v_joins) + + return f"""DROP VIEW IF EXISTS {dest_table} CASCADE; +CREATE VIEW {dest_table} AS +SELECT + {txt_columns} +FROM {from_table} t +{txt_joins}; +""" + + def process_raw_import_column(self, key): + """ """ + + sm = SchemaMethods(self.schema_code) + + if not sm.has_property(key): + return f"{key}" + + property = sm.property(key) + + # pour les nomenclature (on rajoute le type) + if nomenclature_type := property.get("nomenclature_type"): + return f"""CASE + WHEN {key} IS NOT NULL AND {key} NOT LIKE '%%|%%' THEN CONCAT('{nomenclature_type}|', {key}) + ELSE {key} + END AS {key}""" + + if property["type"] == "boolean": + return f"""CASE + WHEN {key}::text IN ('t', 'true') THEN TRUE + WHEN {key}::text IN ('f', 'false') THEN FALSE + ELSE NULL + END AS {key}""" + + if property["type"] == "geometry": + geometry_type = "ST_MULTI" if property["geometry_type"] == "multipolygon" else "" + return f"""{geometry_type}( + ST_SETSRID( + ST_FORCE2D( + ST_GEOMFROMEWKT({key}) + ), {sm.property(key).get('srid')} + ) + ) + AS {key}""" + + return f"{key}" + + def resolve_key( + self, schema_code, key, index=None, alias_main="t", alias_join_base="j", solved_keys={} + ): + """ + compliqué + crée le txt pour + le champs de la colonne qui doit contenir la clé + la ou les jointures nécessaire pour résoudre la clé + """ + + sm = SchemaMethods(schema_code) + + alias_join = alias_join_base if index is None else f"{alias_join_base}_{index}" + + txt_column = f"{alias_join}.{sm.pk_field_name()}" + + unique = sm.attr("meta.unique") + v_join = [] + + # resolution des cles si besoins + + # couf pour permttre de faire les liens entre les join quand il y en a plusieurs + link_joins = {} + for index_unique, k_unique in enumerate(unique): + var_key = self.var_key( + schema_code, key, k_unique, index_unique, link_joins, alias_main + ) + if sm.property(k_unique).get("foreign_key"): + if k_unique in solved_keys: + link_joins[k_unique] = solved_keys[k_unique] + else: + rel = SchemaMethods(sm.property(k_unique)["schema_code"]) + txt_column_join, v_join_inter = self.resolve_key( + rel.schema_code(), + var_key, + index=index_unique, + alias_main=alias_join, + alias_join_base=alias_join, + ) + v_join += v_join_inter + + link_joins[k_unique] = f"{alias_join}_{index_unique}.{rel.pk_field_name()}" + + # creation des joins avec les conditions + v_join_on = [] + + for index_unique, k_unique in enumerate(unique): + var_key = self.var_key( + schema_code, key, k_unique, index_unique, link_joins, alias_main + ) + # !!!(SELECT (NULL = NULL) => NULL) + cast = "::TEXT" # if var_type != main_type else '' + txt_join_on = ( + f"{alias_join}.{k_unique}{cast} = {var_key}{cast}" + if not sm.is_nullable(k_unique) or sm.is_required(k_unique) + else f"({alias_join}.{k_unique}{cast} = {var_key}{cast} OR ({alias_join}.{k_unique} IS NULL AND {var_key} IS NULL))" + # else f"({var_key} IS NOT NULL) AND ({alias_join}.{k_unique} = {var_key})" + ) + v_join_on.append(txt_join_on) + + txt_join_on = "\n AND ".join(v_join_on) + txt_join = f"LEFT JOIN {sm.sql_schema_dot_table()} {alias_join} ON\n {txt_join_on}" + + v_join.append(txt_join) + + return txt_column, v_join + + def var_key(self, schema_code, key, k_unique, index_unique, link_joins, alias_main): + """ + TODO à clarifier + """ + sm = SchemaMethods(schema_code) + + if key is None: + return f"{alias_main}.{k_unique}" + + if link_joins.get(k_unique): + return link_joins[k_unique] + + if "." in key: + return key + + if len(sm.attr("meta.unique", [])) <= 1: + return f"{alias_main}.{key}" + + return f"SPLIT_PART({alias_main}.{key}, '|', { index_unique + 1})" + + def process_column_import_view(self, index, key): + """ + process column for processed view + """ + sm = SchemaMethods(self.schema_code) + if not sm.has_property(key): + return key, [] + + property = sm.property(key) + + if property.get("foreign_key"): + return self.resolve_key(property["schema_code"], key, index) + + if property.get("relation_type") == "n-n": + return self.resolve_key(property["schema_code"], key, index) + + # txt_column, v_join = rel.resolve_key(key, index) + # return f"{txt_column.split('.')[0]}.{rel.pk_field_name()}", v_join + + return f"t.{key}", [] diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py new file mode 100644 index 00000000..02684913 --- /dev/null +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -0,0 +1,119 @@ +from gn_modulator import SchemaMethods +from .utils import ImportMixinUtils + + +class ImportMixinRaw(ImportMixinUtils): + def process_raw_view(self): + """ + creation de la vue d'import à partir de la table d'import + correction des null et association du bon typage + """ + + from_table = self.tables.get("mapping") or self.tables["data"] + dest_table = self.tables["raw"] = self.table_name("raw") + + self.sql["raw_view"] = self.sql_raw_view(from_table, dest_table) + + try: + SchemaMethods.c_sql_exec_txt(self.sql["raw_view"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_CREATE_RAW_VIEW", + msg=f"Erreur dans la creation de la vue 'raw': {str(e)}", + ) + + self.count_and_check_table("raw", dest_table) + + def sql_raw_view( + self, + from_table, + dest_table, + keys=None, + key_unnest=None, + limit=None, + ): + """ + - temporary_table : table ou sont stockées les données d'un csv + - raw_import_view : vue qui corrige les '' en NULL + Creation d'une vue d'import brute à partir d'une table accueillant des données d'un fichier csv + on passe les champs valant '' à NULL + """ + + sm = SchemaMethods(self.schema_code) + + from_table_columns = SchemaMethods.get_table_columns(from_table) + + columns = filter( + lambda x: ( + x in keys + if keys is not None + else not (sm.is_column(x) and sm.property(x).get("primary_key")) + ), + from_table_columns, + ) + + # on preprocess ttes les colonnes + v_txt_pre_process_columns = list( + map( + lambda x: self.pre_process_raw_import_columns(x, key_unnest=key_unnest), + from_table_columns, + ) + ) + + v_txt_columns = list(map(lambda x: self.process_raw_import_column(x), columns)) + + txt_primary_column = ( + f"""CONCAT({", '|', ".join(sm.attr('meta.unique'))}) AS {sm.pk_field_name()}""" + ) + v_txt_columns.insert(0, txt_primary_column) + + txt_columns = ",\n ".join(v_txt_columns) + txt_pre_process_columns = ",\n ".join(v_txt_pre_process_columns) + txt_limit = f"LIMIT {limit}" if limit else "" + + return f"""DROP VIEW IF EXISTS {dest_table} CASCADE; +CREATE VIEW {dest_table} AS +WITH pre_process AS ( +SELECT + {txt_pre_process_columns} +FROM {from_table} +{txt_limit} +) +SELECT + {txt_columns} +FROM pre_process; +""" + + def pre_process_raw_import_columns(self, key, key_unnest=None): + """ + TODO gérer les null dans l'import csv (ou dans l'insert) + """ + + sm = SchemaMethods(self.schema_code) + + if key == "id_import": + return key + + if key_unnest == key: + return f"UNNEST(STRING_TO_ARRAY({key}, ',')) AS {key}" + + if not sm.has_property(key): + return f"{key}" + + property = sm.property(key) + if property.get("foreign_key"): + return key + + if property["type"] == "number": + return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::FLOAT END AS {key}" + + if property["type"] == "date": + return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::DATE END AS {key}" + + if property["type"] == "datetime": + return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::TIMESTAMP END AS {key}" + + if property["type"] == "integer" and "schema_code" not in property: + return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::INTEGER END AS {key}" + + return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key} END AS {key}" diff --git a/backend/gn_modulator/imports/mixins/relation.py b/backend/gn_modulator/imports/mixins/relation.py new file mode 100644 index 00000000..2d340b34 --- /dev/null +++ b/backend/gn_modulator/imports/mixins/relation.py @@ -0,0 +1,127 @@ +from .utils import ImportMixinUtils +from .raw import ImportMixinRaw +from .insert import ImportMixinInsert +from .process import ImportMixinProcess +from gn_modulator import SchemaMethods + + +class ImportMixinRelation(ImportMixinInsert, ImportMixinProcess, ImportMixinRaw, ImportMixinUtils): + def process_relations(self): + from_table = self.tables.get("mapping") or self.tables["data"] + sm = SchemaMethods(self.schema_code) + + columns = SchemaMethods.get_table_columns(from_table) + + for index, key in enumerate(columns): + if not sm.is_relationship(key): + continue + property = sm.property(key) + + # on commence par les n-n + if property.get("relation_type") in ("n-n"): + self.import_relation_n_n(from_table, key) + + def import_relation_n_n(self, from_table, key): + sm = SchemaMethods(self.schema_code) + + self.sql[key] = {} + + property = sm.property(key) + cor_table = property["schema_dot_table"] + rel = SchemaMethods(property["schema_code"]) + + raw_delete_view = self.table_name("raw_delete", key) + process_delete_view = self.table_name("process_delete", key) + raw_import_view = self.table_name("raw", key) + process_import_view = self.table_name("process", key) + + # 0) clean + + SchemaMethods.c_sql_exec_txt(f"DROP VIEW IF EXISTS {process_delete_view}") + SchemaMethods.c_sql_exec_txt(f"DROP VIEW IF EXISTS {raw_delete_view}") + SchemaMethods.c_sql_exec_txt(f"DROP VIEW IF EXISTS {process_import_view}") + SchemaMethods.c_sql_exec_txt(f"DROP VIEW IF EXISTS {raw_import_view}") + + # 1) create raw_temp_table for n-n + self.sql[key]["raw_view"] = self.sql_raw_view( + from_table, raw_import_view, keys=[key], key_unnest=key + ) + try: + SchemaMethods.c_sql_exec_txt(self.sql[key]["raw_view"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_RELATION_CREATE_RAW_VIEW", + msg=f"Erreur dans la creation de la vue 'raw' pour {key}: {str(e)}", + ) + return + + self.sql[key]["process_view"] = self.sql_process_view( + raw_import_view, process_import_view, keys=[key] + ) + try: + SchemaMethods.c_sql_exec_txt(self.sql[key]["process_view"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_RELATION_CREATE_PROCESS_VIEW", + msg=f"Erreur dans la creation de la vue 'process' pour {key}: {str(e)}", + ) + return + + # 3) insert / update / delete ?? + + # - delete : tout depuis import_table + # create_view for delete + self.sql[key]["raw_delete_view"] = self.sql_raw_view( + from_table, raw_delete_view, keys=[key], key_unnest=key + ) + try: + SchemaMethods.c_sql_exec_txt(self.sql[key]["raw_delete_view"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_RELATION_CREATE_RAW_VIEW", + msg=f"Erreur dans la creation de la vue 'delete_raw' pour {key}: {str(e)}", + ) + return + + self.sql[key]["process_delete_view"] = self.sql_process_view( + raw_delete_view, process_delete_view, keys=[key] + ) + try: + SchemaMethods.c_sql_exec_txt(self.sql[key]["process_delete_view"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_RELATION_CREATE_PROCESS_DELETE_VIEW", + msg=f"Erreur dans la creation de la vue 'delete_process' pour {key}: {str(e)}", + ) + return + + self.sql[key][ + "delete" + ] = f""" +DELETE FROM {cor_table} t + USING {process_delete_view} j + WHERE t.{sm.pk_field_name()} = j.{sm.pk_field_name()}; +""" + try: + SchemaMethods.c_sql_exec_txt(self.sql[key]["delete"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_RELATION_DELETE", + msg=f"Erreur dans la suppression pour la relation {key}: {str(e)}", + ) + return + + # - insert + self.sql[key]["insert"] = self.sql_insert( + process_import_view, + keys=[sm.pk_field_name(), rel.pk_field_name()], + dest_table=cor_table, + ) + try: + SchemaMethods.c_sql_exec_txt(self.sql[key]["insert"]) + except Exception as e: + self.add_error( + code="ERR_IMPORT_RELATION_INSERT", + msg=f"Erreur dans l'insertion pour la relation {key}: {str(e)}", + ) + return diff --git a/backend/gn_modulator/imports/mixins/update.py b/backend/gn_modulator/imports/mixins/update.py new file mode 100644 index 00000000..3c52076c --- /dev/null +++ b/backend/gn_modulator/imports/mixins/update.py @@ -0,0 +1,113 @@ +from .utils import ImportMixinUtils +from gn_modulator import SchemaMethods + + +class ImportMixinUpdate(ImportMixinUtils): + def process_update(self): + from_table = self.tables["process"] + + self.sql["nb_update"] = self.sql_nb_update(from_table) + + try: + self.res["nb_update"] = SchemaMethods.c_sql_exec_txt(self.sql["nb_update"]).scalar() + except Exception as e: + self.add_error( + code="ERR_IMPORT_UPDATE_COUNT", + msg=f"Erreur lors du comptage du nombre d'update: {str(e)}", + ) + return + + if self.res["nb_update"] == 0: + return + + self.sql["update"] = self.sql_update(from_table) + + try: + SchemaMethods.c_sql_exec_txt(self.sql["update"]) + except Exception as e: + if isinstance(e, AttributeError): + raise e + self.add_error( + code="ERR_IMPORT_UPDATE", + msg=f"Erreur durant l'update de {from_table} vers {self.schema_code} : {str(e)}", + ) + + def sql_update(self, from_table): + sm = SchemaMethods(self.schema_code) + + columns = SchemaMethods.get_table_columns(from_table) + + v_column_keys = map( + lambda x: x, + filter(lambda x: sm.has_property(x) and sm.is_column(x), columns), + ) + + v_set_keys = list( + map( + lambda x: f"{x}=a.{x}", + filter( + lambda x: sm.has_property(x) + and sm.is_column(x) + and not sm.property(x).get("primary_key"), + columns, + ), + ) + ) + + v_update_condition = list( + map( + lambda x: f"(t.{x}::TEXT IS DISTINCT FROM a.{x}::TEXT)", + filter( + lambda x: sm.has_property(x) + and sm.is_column(x) + and not sm.property(x).get("primary_key"), + columns, + ), + ) + ) + + txt_set_keys = ",\n ".join(v_set_keys) + txt_columns_keys = ",\n ".join(v_column_keys) + txt_update_conditions = "NOT (" + "\n AND ".join(v_update_condition) + ")" + + return f""" +UPDATE {sm.sql_schema_dot_table()} t SET + {txt_set_keys} +FROM ( + SELECT + {txt_columns_keys} + FROM {from_table} +)a +WHERE a.{sm.pk_field_name()} = t.{sm.pk_field_name()} +AND {txt_update_conditions} +; +""" + + def sql_nb_update(self, from_table): + sm = SchemaMethods(self.schema_code) + + columns = SchemaMethods.get_table_columns(from_table) + + v_update_conditions = list( + map( + lambda x: f"(t.{x}::TEXT IS DISTINCT FROM a.{x}::TEXT)", + filter( + lambda x: sm.has_property(x) + and sm.is_column(x) + and not sm.property(x).get("primary_key"), + columns, + ), + ) + ) + + txt_update_conditions = "" + "\n OR ".join(v_update_conditions) + "" + + return f""" + SELECT + COUNT(*) + FROM {sm.sql_schema_dot_table()} t + JOIN {from_table} a + ON a.{sm.pk_field_name()} = t.{sm.pk_field_name()} + WHERE {txt_update_conditions} +; +""" diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py new file mode 100644 index 00000000..e2562b91 --- /dev/null +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -0,0 +1,56 @@ +from pathlib import Path + +from gn_modulator.schema import SchemaMethods +from gn_modulator.utils.env import schema_import + + +class ImportMixinUtils: + def pretty_infos(self): + print(self.res) + txt = "" + txt += f"\n-- import csv file {Path(self.data_file_path).name}" + txt += f" {self.res.get('nb_data')} lignes" + txt += f" - {self.schema_code}\n" + if self.res.get("nb_raw") != self.res.get("nb_process"): + txt += f" raw : {self.res.get('nb_raw'):10d}\n" + if self.res.get("nb_insert"): + txt += f" insert : {self.res['nb_insert']:10d}\n" + if self.res.get("nb_update"): + txt += f" update : {self.res['nb_update']:10d}\n" + if self.res.get("nb_unchanged"): + txt += f" unchanged : {self.res['nb_unchanged']:10d}\n" + + return txt + + def count_and_check_table(self, table_type, table_name): + try: + self.res[f"nb_{table_type}"] = SchemaMethods.c_sql_exec_txt( + f"SELECT COUNT(*) FROM {table_name}" + ).scalar() + + except Exception as e: + self.add_error( + code="ERR_IMPORT_COUNT_VIEW", + msg=f"Erreur avec la table/vue '{table_type}' {table_name}: {str(e)}", + ) + return + + if self.res[f"nb_{table_type}"] == 0: + self.add_error( + code="ERR_IMPORT_COUNT_VIEW", + msg=f"Erreur avec la table/vue '{table_type}' {table_name}: il n'y a n'a pas de données", + ) + + def table_name(self, type, key=None): + """ + nom de la table + """ + + if type == "data": + return f"{schema_import}.t_{self.id_import}_{type}" + else: + rel = f"_{key}" if key is not None else "" + return f"{schema_import}.v_{self.id_import}_{type}_{self.schema_code.replace('.', '_')}{rel}" + + def add_error(self, code=None, msg=None): + self.errors.append({"code": code, "msg": msg}) diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py new file mode 100644 index 00000000..98fa4e4a --- /dev/null +++ b/backend/gn_modulator/imports/models.py @@ -0,0 +1,46 @@ +# modeles d'import +from geonature.utils.env import db +from sqlalchemy.dialects.postgresql import JSONB +from .mixins import ImportMixin + + +class TImport(db.Model, ImportMixin): + __tablename__ = "t_imports" + __table_args__ = {"schema": "gn_modulator"} + + def __init__(self, schema_code=None, data_file_path=None, mapping_file_path=None): + self.schema_code = schema_code + self.data_file_path = data_file_path and str(data_file_path) + self.mapping_file_path = mapping_file_path and str(mapping_file_path) + + self.res = {} + self.errors = [] + self.sql = {} + self.tables = {} + + _insert = False + _keep_raw = False + + id_import = db.Column(db.Integer, primary_key=True) + + schema_code = db.Column(db.Unicode) + + data_file_path = db.Column(db.Unicode) + mapping_file_path = db.Column(db.Unicode) + + csv_delimiter = db.Column(db.Unicode) + data_type = db.Column(db.Unicode) + + res = db.column(JSONB) + tables = db.column(JSONB) + sql = db.column(JSONB) + errors = db.Column(JSONB) + + def as_dict(self): + return { + "id_import": self.id_import, + "data_type": self.data_type, + "csv_delimiter": self.csv_delimiter, + "res": self.res, + "errors": self.errors, + } diff --git a/backend/gn_modulator/imports/routes.py b/backend/gn_modulator/imports/routes.py new file mode 100644 index 00000000..8f059a17 --- /dev/null +++ b/backend/gn_modulator/imports/routes.py @@ -0,0 +1,53 @@ +from flask import request, jsonify + +from geonature.core.gn_permissions.decorators import check_cruved_scope +from geonature.utils.env import db + +from gn_modulator.blueprint import blueprint +from gn_modulator.schema import SchemaMethods +from gn_modulator.module import ModuleMethods + + +from .utils.files import upload_import_file +from .models import TImport + + +@check_cruved_scope("R") # object import ?? +@blueprint.route("import/", methods=["POST"]) +def api_import(module_code): + object_code = None + if request.form: + object_code = request.form.get("object_code") + + schema_code = ModuleMethods.schema_code(module_code, object_code) + + if not schema_code: + return { + "errors": [ + { + "msg": f"Il n'y pas de schema pour module_code={module_code} et object_code={object_code}", + "code": "ERR_IMPORT_SCHEMA_CODE", + } + ] + } + + impt = TImport(schema_code=schema_code) + db.session.add(impt) + + files_path = {} + if request.files: + for file_key in request.files: + file = request.files.get(file_key) + files_path[file_key] = upload_import_file( + module_code, object_code, impt.id_import, file + ) + + impt.data_file_path = str(files_path.get("data_file")) + db.session.flush() + + impt.process_import_schema() + print(impt.pretty_infos()) + + db.session.commit() + + return impt.as_dict() diff --git a/backend/gn_modulator/imports/utils/files.py b/backend/gn_modulator/imports/utils/files.py new file mode 100644 index 00000000..13d7aba9 --- /dev/null +++ b/backend/gn_modulator/imports/utils/files.py @@ -0,0 +1,10 @@ +import pathlib +from geonature.core.gn_commons.file_manager import upload_file +from gn_modulator.utils.env import IMPORT_DIR + + +def upload_import_file(module_code, object_code, import_number, file): + IMPORT_DIR.mkdir(parents=True, exist_ok=True) + + file_name = f"{import_number}_{module_code}_{object_code}_{file.name}" + return pathlib.Path(upload_file(file, IMPORT_DIR, file_name)) diff --git a/backend/gn_modulator/tests/test_import.py b/backend/gn_modulator/tests/test_import.py index 2eefdf00..5fe6187a 100644 --- a/backend/gn_modulator/tests/test_import.py +++ b/backend/gn_modulator/tests/test_import.py @@ -13,10 +13,12 @@ def test_synthese(self): schema_code = "syn.synthese" data_file_path = import_test_dir / "synthese_1.csv" expected_infos = { - "nb_data": 2, - "nb_insert": 2, - "nb_update": 0, - "nb_unchanged": 0, + "res.nb_data": 2, + "res.nb_insert": 2, + "res.nb_update": 0, + "res.nb_unchanged": 0, + "data_type": "csv", + "csv_delimiter": ",", } test_data_file(schema_code, data_file_path, expected_infos=expected_infos) @@ -28,10 +30,10 @@ def test_synthese2(self): schema_code = "syn.synthese" data_file_path = import_test_dir / "synthese_1.csv" expected_infos = { - "nb_data": 2, - "nb_insert": 2, - "nb_update": 0, - "nb_unchanged": 0, + "res.nb_data": 2, + "res.nb_insert": 2, + "res.nb_update": 0, + "res.nb_unchanged": 0, } test_data_file(schema_code, data_file_path, expected_infos=expected_infos) @@ -42,17 +44,17 @@ def test_ref_geo_linear(self): schema_code = "ref_geo.linear_type" data_file_path = import_test_dir / "route/linear_type.csv" - expected_infos = {"nb_data": 1} + expected_infos = {"res.nb_data": 1} test_data_file(schema_code, data_file_path, expected_infos=expected_infos) schema_code = "ref_geo.linear_group" data_file_path = import_test_dir / "route/route.csv" pre_process_file_path = import_test_dir / "route/pp_linear_group.sql" expected_infos = { - "nb_data": 1, - "nb_insert": 1, - "nb_update": 0, - "nb_unchanged": 0, + "res.nb_data": 1, + "res.nb_insert": 1, + "res.nb_update": 0, + "res.nb_unchanged": 0, } test_data_file( schema_code, data_file_path, pre_process_file_path, expected_infos=expected_infos @@ -62,10 +64,10 @@ def test_ref_geo_linear(self): data_file_path = import_test_dir / "route/route.csv" pre_process_file_path = import_test_dir / "route/pp_linear.sql" expected_infos = { - "nb_data": 1, - "nb_insert": 1, - "nb_update": 0, - "nb_unchanged": 0, + "res.nb_data": 1, + "res.nb_insert": 1, + "res.nb_update": 0, + "res.nb_unchanged": 0, } test_data_file( schema_code, data_file_path, pre_process_file_path, expected_infos=expected_infos diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index 7858ecf8..1e5fd0b5 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -1,36 +1,27 @@ -from gn_modulator.schema import SchemaMethods +from gn_modulator.imports.models import TImport from gn_modulator.utils.commons import getAttr +from geonature.utils.env import db def test_data_file( - schema_code=None, data_file_path=None, pre_process_file_path=None, expected_infos={} + schema_code=None, data_file_path=None, mapping_file_path=None, expected_infos={} ): if not (schema_code and data_file_path): return - import_number = SchemaMethods.process_import_schema( - schema_code, - data_file_path, - pre_process_file_path=pre_process_file_path, - verbose=1, - insert=True, - ) - - import_infos = SchemaMethods.import_get_infos(import_number, schema_code) - - print( - { - "nb_data": import_infos.get("nb_data"), - "nb_insert": import_infos.get("nb_insert"), - "nb_update": import_infos.get("nb_update"), - "errors:": import_infos.get("errors"), - } - ) + with db.session.begin_nested(): + impt = TImport(schema_code, data_file_path, mapping_file_path) + db.session.add(impt) + assert impt.id_import is not None + + impt.process_import_schema() + + import_infos = impt.as_dict() errors = expected_infos.pop("errors", []) if len(errors) == 0: - assert len(import_infos["errors"]) == 0 + assert len(import_infos["errors"]) == 0, import_infos["errors"] else: assert len(errors) == len(import_infos("error")) for error in errors: @@ -38,7 +29,6 @@ def test_data_file( for key in expected_infos: txt_err = f"schema_code: {schema_code}, key: {key}, expected: {expected_infos.get(key)}, import: {getAttr(import_infos, key)}" - print(txt_err) assert getAttr(import_infos, key) == expected_infos.get(key), txt_err return import_infos diff --git a/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml b/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml index c6e58bc7..64558e2e 100644 --- a/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml +++ b/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml @@ -35,42 +35,42 @@ layout: description: Télécharger les passages à faune (les filtres sont appliqués) href: __f__o.url_export(x, 'm_sipaf.pf') hidden: __f__!o.is_action_allowed(x, 'E') - # - type: button - # flex: "0" - # icon: upload - # color: primary - # description: Importer des passage à faune - # action: - # type: modal - # modal_name: import - # hidden: __f__!o.is_action_allowed(x, 'C') - # - type: modal - # modal_name: import - # items: - # title: Importer des passage à faune - # type: form - # items: - # - items: - # - key: data_file - # type: file - # title: Fichier d'import - # description: Choisir un fichier à importer - # - key: object_code - # type: string - # default: __f__context.object_code - # - direction: row - # items: - # - type: button - # color: primary - # title: Valider - # description: Valider - # action: import - # disabled: __f__!(formGroup.valid ) - # - type: button - # color: primary - # title: Annuler - # description: Annuler - # action: close + - type: button + flex: "0" + icon: upload + color: primary + description: Importer des passage à faune + action: + type: modal + modal_name: import + hidden: __f__!o.is_action_allowed(x, 'C') + - type: modal + modal_name: import + items: + title: Importer des passage à faune + type: form + items: + - items: + - key: data_file + type: file + title: Fichier d'import + description: Choisir un fichier à importer + - key: object_code + type: string + default: __f__context.object_code + - direction: row + items: + - type: button + color: primary + title: Valider + description: Valider + action: import + disabled: __f__!(formGroup.valid ) + - type: button + color: primary + title: Annuler + description: Annuler + action: close flex: "0" - type: object diff --git a/doc/import.md b/doc/import.md index b1739ac1..30131bb5 100644 --- a/doc/import.md +++ b/doc/import.md @@ -40,7 +40,7 @@ SELECT END AS id_nomenclature_type_actor, nom_organism AS id_organism, NULL AS id_role - WHERE nom_organism IS NOT NULL AND nom_organism != '' +m WHERE nom_organism IS NOT NULL AND nom_organism != '' ; ``` From 6064ca7866218d28d790f9a026dc4cd3834266bf Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 8 Mar 2023 16:09:37 +0100 Subject: [PATCH 008/142] import errors on lines REQUIRED and UNRESOLVED --- .../gn_modulator/imports/mixins/__init__.py | 21 ++++ backend/gn_modulator/imports/mixins/check.py | 88 +++++++++++++++++ backend/gn_modulator/imports/mixins/data.py | 2 +- backend/gn_modulator/imports/mixins/insert.py | 16 +-- .../gn_modulator/imports/mixins/mapping.py | 15 ++- .../gn_modulator/imports/mixins/process.py | 1 + backend/gn_modulator/imports/mixins/raw.py | 6 ++ .../gn_modulator/imports/mixins/relation.py | 1 + backend/gn_modulator/imports/mixins/utils.py | 12 ++- backend/gn_modulator/imports/models.py | 6 +- backend/gn_modulator/imports/routes.py | 15 ++- .../tests/import_test/route/pf.csv | 2 +- .../tests/import_test/route/pp_linear.sql | 1 + .../import_test/route/pp_linear_group.sql | 7 +- .../tests/pf_test_erreurs copy.csv | 4 + .../tests/import_test/tests/pf_test_ok.csv | 4 + backend/gn_modulator/tests/test_import_api.py | 26 ++++- .../layouts/m_sipaf.site_list.layout.yml | 10 ++ frontend/app/components/base/base.scss | 6 ++ .../layout/base/layout.component.html | 7 +- .../layout/form/form-element.component.ts | 1 + frontend/app/services/action.service.ts | 97 +++++++++++++++++-- frontend/app/services/form.service.ts | 12 ++- 23 files changed, 319 insertions(+), 41 deletions(-) create mode 100644 backend/gn_modulator/imports/mixins/check.py create mode 100644 backend/gn_modulator/tests/import_test/tests/pf_test_erreurs copy.csv create mode 100644 backend/gn_modulator/tests/import_test/tests/pf_test_ok.csv diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index 6c88be91..4d55b4e4 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -1,3 +1,6 @@ +from geonature.utils.env import db + +from .check import ImportMixinCheck from .data import ImportMixinData from .insert import ImportMixinInsert from .mapping import ImportMixinMapping @@ -10,6 +13,7 @@ class ImportMixin( ImportMixinRelation, + ImportMixinCheck, ImportMixinData, ImportMixinInsert, ImportMixinMapping, @@ -21,33 +25,50 @@ class ImportMixin( def process_import_schema(self, _insert_data=False): self._insert_data = _insert_data + self.init_import() + if self.errors: + return self + db.session.flush() + self.process_data_table() if self.errors: return self + db.session.flush() self.process_mapping_view() if self.errors: return self + db.session.flush() self.process_raw_view() if self.errors: return self + db.session.flush() self.process_view() if self.errors: return self + db.session.flush() + + self.process_check() + if self.errors: + return self + db.session.flush() self.process_insert() if self.errors: return self + db.session.flush() self.process_update() if self.errors: return self + db.session.flush() self.process_relations() if self.errors: return self + db.session.flush() self.res["nb_unchanged"] = ( self.res["nb_process"] - self.res["nb_insert"] - self.res["nb_update"] diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py new file mode 100644 index 00000000..6375c06d --- /dev/null +++ b/backend/gn_modulator/imports/mixins/check.py @@ -0,0 +1,88 @@ +from gn_modulator import SchemaMethods +from pypnnomenclature.repository import get_nomenclature_list + +from .utils import ImportMixinUtils + + +class ImportMixinCheck(ImportMixinUtils): + def process_check(self): + self.check_required() + self.check_resolve_keys() + + def check_required(self): + # pour toutes les colonnes de raw + # si une colonne est requise + # et que la valeur dans raw est nulle + # erreur + raw_table = self.tables["raw"] + sm = SchemaMethods(self.schema_code) + + for key in SchemaMethods.get_table_columns(raw_table): + if not sm.is_required(key): + continue + + txt_check_required = f""" +SELECT + COUNT(*), ARRAY_AGG(id_import) + FROM {raw_table} + WHERE {key} is NULL +""" + + res = SchemaMethods.c_sql_exec_txt(txt_check_required).fetchone() + nb_lines = res[0] + lines = res[1] + str_lines = lines and ", ".join(map(lambda x: str(x), lines)) or "" + if nb_lines == 0: + continue + self.add_error( + code="ERR_IMPORT_REQUIRED", + key=key, + lines=lines, + msg=f"La colonne {key} est obligatoire. {nb_lines} ligne(s) concernée(s) : [{str_lines}]", + ) + + return + + def check_resolve_keys(self): + raw_table = self.tables["raw"] + process_table = self.tables["process"] + sm = SchemaMethods(self.schema_code) + + for key in SchemaMethods.get_table_columns(raw_table): + if not (sm.has_property(key) and sm.property(key).get("foreign_key")): + continue + + txt_check_resolve_keys = f""" +SELECT COUNT(*), ARRAY_AGG(r.id_import) +FROM {raw_table} r +JOIN {process_table} p + ON r.id_import = p.id_import +WHERE + p.{key} is NULL and r.{key} is NOT NULL + """ + + res = SchemaMethods.c_sql_exec_txt(txt_check_resolve_keys).fetchone() + nb_lines = res[0] + lines = res[1] + str_lines = lines and ", ".join(map(lambda x: str(x), lines)) or "" + if nb_lines == 0: + continue + + values = None + if code_type := sm.property(key).get("nomenclature_type"): + values = list( + map( + lambda x: { + "cd_nomenclature": x["cd_nomenclature"], + "label_fr": x["label_fr"], + }, + get_nomenclature_list(code_type=code_type)["values"], + ) + ) + self.add_error( + code="ERR_IMPORT_UNRESOLVED", + key=key, + lines=lines, + msg=f"La colonne {key} est non nulle et n'a pas de correspondance. {nb_lines} ligne(s) concernée(s) : [{str_lines}]", + values=values, + ) diff --git a/backend/gn_modulator/imports/mixins/data.py b/backend/gn_modulator/imports/mixins/data.py index 15c0c4bf..9f3b46c9 100644 --- a/backend/gn_modulator/imports/mixins/data.py +++ b/backend/gn_modulator/imports/mixins/data.py @@ -75,7 +75,7 @@ def copy_csv_data(self, f, dest_table, table_columns): columns_fields = ", ".join(table_columns) self.sql[ "data_copy_csv" - ] = f"""COPY {dest_table}({columns_fields}) FROM STDIN DELIMITER '{self.csv_delimiter}' QUOTE '"' CSV""" + ] = f"""COPY {dest_table}({columns_fields}) FROM STDIN DELIMITER '{self.csv_delimiter}' QUOTE '"' CSV """ try: cursor = db.session.connection().connection.cursor() cursor.copy_expert(sql=self.sql["data_copy_csv"], file=f) diff --git a/backend/gn_modulator/imports/mixins/insert.py b/backend/gn_modulator/imports/mixins/insert.py index 07c05e93..dc50ef6a 100644 --- a/backend/gn_modulator/imports/mixins/insert.py +++ b/backend/gn_modulator/imports/mixins/insert.py @@ -40,13 +40,15 @@ def sql_insert(self, from_table, dest_table=None, keys=None): table_name = dest_table or sm.sql_schema_dot_table() - columns_select = filter( - lambda x: ( - x in keys - if keys is not None - else not (sm.is_column(x) and sm.property(x).get("primary_key")) - ), - SchemaMethods.get_table_columns(from_table), + columns_select = list( + filter( + lambda x: ( + x in keys + if keys is not None + else sm.is_column(x) and not (sm.property(x).get("primary_key")) + ), + SchemaMethods.get_table_columns(from_table), + ) ) v_column_select_keys = map(lambda x: x, columns_select) diff --git a/backend/gn_modulator/imports/mixins/mapping.py b/backend/gn_modulator/imports/mixins/mapping.py index 9abd26a7..48083392 100644 --- a/backend/gn_modulator/imports/mixins/mapping.py +++ b/backend/gn_modulator/imports/mixins/mapping.py @@ -22,12 +22,14 @@ def process_mapping_view(self): return self.sql["mapping_view"] = self.sql_mapping() + if self.errors: + return try: SchemaMethods.c_sql_exec_txt(self.sql["mapping_view"]) except Exception as e: self.add_error( - code="ERR_IMPORT_PRE_PROCESS_CREATE_VIEW", + code="ERR_IMPORT_MAPPING_CREATE_VIEW", msg=f"La vue de preprocess n'a pas être crée : {str(e)}", ) return @@ -55,7 +57,7 @@ def sql_mapping(self): if forbidden_words: self.add_error( - code="ERR_IMPORT_PRE_PROCESS_FORBIDEN_WORD", + code="ERR_IMPORT_MAPPING_FORBIDEN_WORD", msg=f"Le fichier de preprocess {self.mapping_file_path} contient le ou les mots interdits {', '.join(forbidden_word)}", ) return @@ -63,13 +65,20 @@ def sql_mapping(self): for word in ["WHERE", "ORDER BY", "LIMIT"]: if word in mapping_select: mapping_select = mapping_select.replace( - f"{word}", "\nFROM {from_table}\n{word}" + f"{word}", f"\nFROM {from_table}\n{word}" ) break if "FROM" not in mapping_select: mapping_select += f"\nFROM {from_table}" + if "ID_IMPORT" not in mapping_select: + self.add_error( + code="ERR_IMPORT_MAPPING_MISSING_IMPORT", + msg=f"La selection de mapping doit contenir le champs id_import dans {self.mapping_file_path}", + ) + return + sql_mapping = f""" DROP VIEW IF EXISTS {dest_table}; CREATE VIEW {dest_table} AS diff --git a/backend/gn_modulator/imports/mixins/process.py b/backend/gn_modulator/imports/mixins/process.py index e8da6db7..a0af32a0 100644 --- a/backend/gn_modulator/imports/mixins/process.py +++ b/backend/gn_modulator/imports/mixins/process.py @@ -69,6 +69,7 @@ def sql_process_view(self, from_table, dest_table, keys=None): return f"""DROP VIEW IF EXISTS {dest_table} CASCADE; CREATE VIEW {dest_table} AS SELECT + id_import, {txt_columns} FROM {from_table} t {txt_joins}; diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index 02684913..4b4a5f1c 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -71,6 +71,12 @@ def sql_raw_view( txt_pre_process_columns = ",\n ".join(v_txt_pre_process_columns) txt_limit = f"LIMIT {limit}" if limit else "" + if "id_import" not in txt_pre_process_columns: + txt_pre_process_columns = f"id_import, {txt_pre_process_columns}" + + if "id_import" not in txt_columns: + txt_columns = f"id_import, {txt_columns}" + return f"""DROP VIEW IF EXISTS {dest_table} CASCADE; CREATE VIEW {dest_table} AS WITH pre_process AS ( diff --git a/backend/gn_modulator/imports/mixins/relation.py b/backend/gn_modulator/imports/mixins/relation.py index 2d340b34..0eeee907 100644 --- a/backend/gn_modulator/imports/mixins/relation.py +++ b/backend/gn_modulator/imports/mixins/relation.py @@ -64,6 +64,7 @@ def import_relation_n_n(self, from_table, key): self.add_error( code="ERR_IMPORT_RELATION_CREATE_PROCESS_VIEW", msg=f"Erreur dans la creation de la vue 'process' pour {key}: {str(e)}", + key=key, ) return diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py index e2562b91..8350dba1 100644 --- a/backend/gn_modulator/imports/mixins/utils.py +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -5,6 +5,9 @@ class ImportMixinUtils: + def init_import(self): + SchemaMethods.c_sql_exec_txt(f"CREATE SCHEMA IF NOT EXISTS {schema_import}") + def pretty_infos(self): print(self.res) txt = "" @@ -23,6 +26,9 @@ def pretty_infos(self): return txt def count_and_check_table(self, table_type, table_name): + if self.errors: + return + try: self.res[f"nb_{table_type}"] = SchemaMethods.c_sql_exec_txt( f"SELECT COUNT(*) FROM {table_name}" @@ -52,5 +58,7 @@ def table_name(self, type, key=None): rel = f"_{key}" if key is not None else "" return f"{schema_import}.v_{self.id_import}_{type}_{self.schema_code.replace('.', '_')}{rel}" - def add_error(self, code=None, msg=None): - self.errors.append({"code": code, "msg": msg}) + def add_error(self, code=None, msg=None, key=None, lines=None, values=None): + self.errors.append( + {"code": code, "msg": msg, "key": key, "lines": lines, "values": values} + ) diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index 98fa4e4a..9387989d 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -31,9 +31,9 @@ def __init__(self, schema_code=None, data_file_path=None, mapping_file_path=None csv_delimiter = db.Column(db.Unicode) data_type = db.Column(db.Unicode) - res = db.column(JSONB) - tables = db.column(JSONB) - sql = db.column(JSONB) + res = db.Column(JSONB) + tables = db.Column(JSONB) + sql = db.Column(JSONB) errors = db.Column(JSONB) def as_dict(self): diff --git a/backend/gn_modulator/imports/routes.py b/backend/gn_modulator/imports/routes.py index 8f059a17..abb41052 100644 --- a/backend/gn_modulator/imports/routes.py +++ b/backend/gn_modulator/imports/routes.py @@ -33,6 +33,7 @@ def api_import(module_code): impt = TImport(schema_code=schema_code) db.session.add(impt) + db.session.flush() files_path = {} if request.files: @@ -42,12 +43,16 @@ def api_import(module_code): module_code, object_code, impt.id_import, file ) - impt.data_file_path = str(files_path.get("data_file")) - db.session.flush() + impt.data_file_path = files_path.get("data_file") and str(files_path.get("data_file")) impt.process_import_schema() - print(impt.pretty_infos()) - db.session.commit() + if impt.errors: + out = {"errors": impt.errors} + db.session.commit() + return out - return impt.as_dict() + out = impt.as_dict() + + db.session.commit() + return out diff --git a/backend/gn_modulator/tests/import_test/route/pf.csv b/backend/gn_modulator/tests/import_test/route/pf.csv index 4438291e..c3281591 100644 --- a/backend/gn_modulator/tests/import_test/route/pf.csv +++ b/backend/gn_modulator/tests/import_test/route/pf.csv @@ -1,3 +1,3 @@ code_passage_faune, geom TEST04, POINT (43.676265 4.028108) -TEST05, POINT (43.676265 4.028108) \ No newline at end of file +TEST05, POINT (43.676262 4.028108) \ No newline at end of file diff --git a/backend/gn_modulator/tests/import_test/route/pp_linear.sql b/backend/gn_modulator/tests/import_test/route/pp_linear.sql index e5b66b9f..d4eaedf6 100644 --- a/backend/gn_modulator/tests/import_test/route/pp_linear.sql +++ b/backend/gn_modulator/tests/import_test/route/pp_linear.sql @@ -1,4 +1,5 @@ SELECT + id_import, 'RTE' AS id_type, id AS linear_code, numero || '_' || substring(id, 9) :: bigint AS linear_name, diff --git a/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql b/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql index 8ef52ba8..fe117403 100644 --- a/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql +++ b/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql @@ -1,4 +1,7 @@ SELECT - DISTINCT 'RTE' AS id_type, + DISTINCT ON (id_import) + id_import, + 'RTE' AS id_type, numero AS code, - cl_admin || ' ' || numero AS name \ No newline at end of file + cl_admin || ' ' || numero AS name + ORDER BY id_import \ No newline at end of file diff --git a/backend/gn_modulator/tests/import_test/tests/pf_test_erreurs copy.csv b/backend/gn_modulator/tests/import_test/tests/pf_test_erreurs copy.csv new file mode 100644 index 00000000..ff809071 --- /dev/null +++ b/backend/gn_modulator/tests/import_test/tests/pf_test_erreurs copy.csv @@ -0,0 +1,4 @@ +code_passage_faune, geom, id_nomenclature_ouvrage_hydrau_position +TEST07, POINT (43.676265 4.028108), RD +, POINT (43.676262 4.028108), +TEST08,, \ No newline at end of file diff --git a/backend/gn_modulator/tests/import_test/tests/pf_test_ok.csv b/backend/gn_modulator/tests/import_test/tests/pf_test_ok.csv new file mode 100644 index 00000000..816973e6 --- /dev/null +++ b/backend/gn_modulator/tests/import_test/tests/pf_test_ok.csv @@ -0,0 +1,4 @@ +code_passage_faune, geom, id_nomenclature_ouvrage_hydrau_position +_TEST01, POINT (43.676265 4.028108),RD +_TEST02, POINT (43.676262 4.028108), +_TEST023, POINT (43.676262 4.028108),RG \ No newline at end of file diff --git a/backend/gn_modulator/tests/test_import_api.py b/backend/gn_modulator/tests/test_import_api.py index 15d79dac..f382b920 100644 --- a/backend/gn_modulator/tests/test_import_api.py +++ b/backend/gn_modulator/tests/test_import_api.py @@ -12,11 +12,33 @@ class TestImportApi: def test_import_synthese(self, users): set_logged_user_cookie(self.client, users["admin_user"]) with open(import_test_dir / "synthese_1.csv", "rb") as f: - data = {"file": (f, "synthese.csv"), "object_code": "syn.synthese"} + data = {"data_file": (f, "synthese.csv"), "object_code": "syn.synthese"} r = self.client.post( url_for("modulator.api_import", module_code="MODULATOR"), data=data, headers=Headers({"Content-Type": "multipart/form-data"}), ) - print(r.data) + assert r.status_code == 200, r.data + + assert len(r.json["errors"]) == 0, r.json["errors"] + + assert r.json["res"]["nb_data"] == 2 + assert r.json["res"]["nb_insert"] == 2 + + def test_import_synthese2(self, users): + set_logged_user_cookie(self.client, users["admin_user"]) + with open(import_test_dir / "synthese_1.csv", "rb") as f: + data = {"data_file": (f, "synthese.csv"), "object_code": "syn.synthese"} + r = self.client.post( + url_for("modulator.api_import", module_code="MODULATOR"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + + assert r.status_code == 200, r.data + + assert len(r.json["errors"]) == 0, r.json["errors"] + + assert r.json["res"]["nb_data"] == 2 + assert r.json["res"]["nb_insert"] == 2 diff --git a/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml b/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml index 64558e2e..4912b796 100644 --- a/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml +++ b/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml @@ -51,13 +51,23 @@ layout: type: form items: - items: + - type: message + html: __f__data.importMsg?.html + class: __f__data.importMsg?.class + hidden: __f__!data.importMsg + + - key: importMsg + hidden: true - key: data_file type: file title: Fichier d'import + required: true description: Choisir un fichier à importer - key: object_code type: string + hidden: true default: __f__context.object_code + - direction: row items: - type: button diff --git a/frontend/app/components/base/base.scss b/frontend/app/components/base/base.scss index c722c1e4..a7b12019 100644 --- a/frontend/app/components/base/base.scss +++ b/frontend/app/components/base/base.scss @@ -178,6 +178,12 @@ div.layout-items > div { background-color: lightgreen; } +.layout-message.success { + color: darkgreen; + background-color: lightgreen; +} + + .layout-message.error { color: darkred; background-color: lightcoral; diff --git a/frontend/app/components/layout/base/layout.component.html b/frontend/app/components/layout/base/layout.component.html index 2cb8561d..10d846d4 100644 --- a/frontend/app/components/layout/base/layout.component.html +++ b/frontend/app/components/layout/base/layout.component.html @@ -142,9 +142,10 @@
this._mLayout.reComputeLayout(), 100); + // this._commonService.regularToaster('success', txtImport); } }, (error: HttpErrorResponse) => { @@ -166,4 +175,72 @@ export class ModulesActionService { } ); } + + importHTMLMsgSuccess(impt) { + let txtImport = `
Import réussi
`; + let res = impt.res; + + if (res.nb_data) { + txtImport += `data: ${res.nb_data}
`; + } + + if (res.nb_raw != res.nb_data) { + txtImport += `raw: ${res.nb_raw}
`; + } + + if (res.nb_insert) { + txtImport += `insert: ${res.nb_insert}
`; + } + + if (res.nb_update) { + txtImport += `update: ${res.nb_update}
`; + } + + if (res.nb_unchanged) { + txtImport += `unchanged: ${res.nb_unchanged}
`; + } + return txtImport; + } + + importHTMLMsgError(impt) { + let txtImport = `

${impt.errors.length} erreurs

`; + + let txtErrorRequired; + for (let error of impt.errors.filter((e) => e.code == 'ERR_IMPORT_REQUIRED')) { + if (!txtErrorRequired) { + txtErrorRequired = `
Champs requis manquants
`; + } + txtErrorRequired += `${error.key} ${error.lines.length} ligne(s): [${error.lines}]
`; + } + if (txtErrorRequired) { + txtImport += '
'; + txtImport += txtErrorRequired; + } + + let txtErrorUnresolved; + for (let error of impt.errors.filter((e) => e.code == 'ERR_IMPORT_UNRESOLVED')) { + if (!txtErrorUnresolved) { + txtErrorUnresolved = `
Champs non résolus
`; + } + txtErrorUnresolved += `${error.key} ${error.lines.length} ligne(s): [${error.lines}]
`; + if (error.values) { + txtErrorUnresolved += `Valeurs parmi : ${error.values + .map((v) => v.cd_nomenclature) + .join(', ')}
`; + } + } + if (txtErrorUnresolved) { + txtImport += '
'; + txtImport += txtErrorUnresolved; + } + + for (let error of impt.errors.filter( + (e) => !['ERR_IMPORT_REQUIRED', 'ERR_IMPORT_UNRESOLVED'].includes(e.code) + )) { + txtImport += '
'; + txtImport += `${error.code}: ${error.msg}`; + } + + return txtImport; + } } diff --git a/frontend/app/services/form.service.ts b/frontend/app/services/form.service.ts index 386d7f51..ca786086 100644 --- a/frontend/app/services/form.service.ts +++ b/frontend/app/services/form.service.ts @@ -280,10 +280,14 @@ export class ModulesFormService { /** pour mettre à jour les données sans casser les références */ updateData(data, formValue) { - if (utils.fastDeepEqual(data, formValue)) { + if (this.isEqual(formValue, data)) { return data; } + if (utils.isFile(formValue)) { + return formValue; + } + if (utils.isObject(formValue)) { if (data == null) { return formValue; @@ -318,7 +322,11 @@ export class ModulesFormService { } isEqual(formValue, data) { - return utils.isObject(formValue) + return utils.isFile(formValue) + ? utils.isFile(data) + ? ['name', 'lastModified', 'size', 'type'].every((k) => data[k] == formValue[k]) + : false + : utils.isObject(formValue) ? !utils.isObject(data) ? false : Object.entries(formValue) From eed69e2a70e9d109db35f737d7c776dc71587457 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 8 Mar 2023 16:21:46 +0100 Subject: [PATCH 009/142] migration import --- .gitignore | 2 +- .../3920371728d8_gn_modulator_import_init.py | 48 +++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py diff --git a/.gitignore b/.gitignore index df5d7890..c4101487 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,7 @@ frontend/app/module.config.ts #!migrations/data/*.sql backend/gn_modulator/migrations/versions/* -!backend/gn_modulator/migrations/versions/*_gn_modulator.py +!backend/gn_modulator/migrations/versions/*gn_modulator*.py !.gitkeep diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py new file mode 100644 index 00000000..9a920659 --- /dev/null +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -0,0 +1,48 @@ +"""gn_modulator import init + +Revision ID: 3920371728d8 +Revises: d3f266c7b1b6 +Create Date: 2023-03-03 14:31:35.339631 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "3920371728d8" +down_revision = "d3f266c7b1b6" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ +CREATE TABLE gn_modulator.t_imports( + id_import SERIAL NOT NULL, + schema_code VARCHAR, + data_file_path VARCHAR, + mapping_file_path VARCHAR, + csv_delimiter VARCHAR, + data_type VARCHAR, + res JSONB, + tables JSONB, + errors JSONB, + sql JSONB +); + +ALTER TABLE gn_modulator.t_imports + ADD CONSTRAINT pk_gn_modulator_t_imports_id_import PRIMARY KEY (id_import); + """ + ) + pass + + +def downgrade(): + op.execute( + """ + DROP TABLE gn_modulator.t_imports; + """ + ) + pass From d1b7fb401679f3119beb93ac5b9fd2839a329873 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 9 Mar 2023 09:18:18 +0100 Subject: [PATCH 010/142] mapping :table_data --- backend/gn_modulator/imports/mixins/mapping.py | 16 ++++++---------- .../tests/import_test/route/pp_linear.sql | 3 ++- .../tests/import_test/route/pp_linear_group.sql | 3 ++- .../m_sipaf/imports/scripts/ppi_actor.sql | 1 + .../m_sipaf/imports/scripts/ppi_actor_V1.sql | 1 + .../imports/scripts/ppi_groupe_route_na.sql | 4 +++- .../m_sipaf/imports/scripts/ppi_organism.sql | 5 +++-- .../m_sipaf/imports/scripts/ppi_organism_V1.sql | 5 +++-- .../m_sipaf/imports/scripts/ppi_pf_V1.sql | 1 + .../imports/scripts/ppi_troncon_route_na.sql | 4 +++- 10 files changed, 25 insertions(+), 18 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/mapping.py b/backend/gn_modulator/imports/mixins/mapping.py index 48083392..39c56ffe 100644 --- a/backend/gn_modulator/imports/mixins/mapping.py +++ b/backend/gn_modulator/imports/mixins/mapping.py @@ -60,24 +60,20 @@ def sql_mapping(self): code="ERR_IMPORT_MAPPING_FORBIDEN_WORD", msg=f"Le fichier de preprocess {self.mapping_file_path} contient le ou les mots interdits {', '.join(forbidden_word)}", ) - return - for word in ["WHERE", "ORDER BY", "LIMIT"]: - if word in mapping_select: - mapping_select = mapping_select.replace( - f"{word}", f"\nFROM {from_table}\n{word}" - ) - break + if ":TABLE_DATA" not in mapping_select: + self.add_error( + code="ERR_IMPORT_MAPPING_MISSING_TABLE", + msg="La selection de mapping doit contenir 'FROM :table_data", + ) - if "FROM" not in mapping_select: - mapping_select += f"\nFROM {from_table}" + mapping_select = mapping_select.replace(":TABLE_DATA", from_table) if "ID_IMPORT" not in mapping_select: self.add_error( code="ERR_IMPORT_MAPPING_MISSING_IMPORT", msg=f"La selection de mapping doit contenir le champs id_import dans {self.mapping_file_path}", ) - return sql_mapping = f""" DROP VIEW IF EXISTS {dest_table}; diff --git a/backend/gn_modulator/tests/import_test/route/pp_linear.sql b/backend/gn_modulator/tests/import_test/route/pp_linear.sql index d4eaedf6..0daaf31c 100644 --- a/backend/gn_modulator/tests/import_test/route/pp_linear.sql +++ b/backend/gn_modulator/tests/import_test/route/pp_linear.sql @@ -6,4 +6,5 @@ SELECT wkt as geom, true as enable, 'https://geoservices.ign.fr/bdtopo#telechargementshpreg' AS source, - numero as groups -- n-n ++ \ No newline at end of file + numero as groups -- n-n ++ + FROM :table_data \ No newline at end of file diff --git a/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql b/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql index fe117403..2ca16629 100644 --- a/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql +++ b/backend/gn_modulator/tests/import_test/route/pp_linear_group.sql @@ -4,4 +4,5 @@ SELECT 'RTE' AS id_type, numero AS code, cl_admin || ' ' || numero AS name - ORDER BY id_import \ No newline at end of file + FROM :table_data + ORDER BY id_import diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql index 8d0826d4..ca0a2dc4 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql @@ -9,6 +9,7 @@ SELECT END AS id_nomenclature_type_actor, nom_organism AS id_organism, NULL AS id_role + FROM :table_data WHERE nom_organism IS NOT NULL AND nom_organism != '' ; diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql index 0a52176f..713fed0c 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql @@ -3,6 +3,7 @@ SELECT 'CON' AS id_nomenclature_type_actor, concess AS id_organism, NULL AS id_role + FROM :table_data WHERE concess IS NOT NULL AND concess != '' ; diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql index 1eed5d7c..2a25e3fc 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql @@ -1,6 +1,8 @@ DROP VIEW IF EXISTS :pre_processed_import_view CASCADE; CREATE VIEW :pre_processed_import_view AS -SELECT DISTINCT +SELECT DISTINCT ON(id_import) 'RTE' AS id_type, numero AS code, cl_admin || ' ' || numero AS name + FROM :table_data + ORDER BY id_import \ No newline at end of file diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql index 112ad275..57d5ce2f 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql @@ -1,5 +1,6 @@ -SELECT DISTINCT +SELECT DISTINCT ON (id_import) nom_organism AS nom_organisme, 'SIPAF' AS adresse_organisme WHERE nom_organism IS NOT NULL AND nom_organism != '' - ORDER BY nom_organism + FROM :table_data + ORDER BY id_import, nom_organism diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql index a6267ec9..e0423a4b 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql @@ -1,5 +1,6 @@ -SELECT DISTINCT +SELECT DISTINCT ON (id_import) concess AS nom_organisme, 'SIPAF' AS adresse_organisme WHERE concess IS NOT NULL AND concess != '' - ORDER BY concess + FROM :table_data + ORDER BY id_import, concess diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql index 3ab0ac68..2d373ed4 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql @@ -1,6 +1,7 @@ -- import V1 -- (sans les données spécificité, matériaux, et ouvrage_type) select + id_import, uuid_pf as code_passage_faune, CASE WHEN pi_ou_ps = 'PI' THEN FALSE diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_troncon_route_na.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_troncon_route_na.sql index e5b66b9f..0daaf31c 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_troncon_route_na.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_troncon_route_na.sql @@ -1,8 +1,10 @@ SELECT + id_import, 'RTE' AS id_type, id AS linear_code, numero || '_' || substring(id, 9) :: bigint AS linear_name, wkt as geom, true as enable, 'https://geoservices.ign.fr/bdtopo#telechargementshpreg' AS source, - numero as groups -- n-n ++ \ No newline at end of file + numero as groups -- n-n ++ + FROM :table_data \ No newline at end of file From 4753b693a27f909267fd6ab69aeb08b2a2e8126a Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 9 Mar 2023 13:18:48 +0100 Subject: [PATCH 011/142] process import code --- .../gn_modulator/imports/mixins/__init__.py | 50 ++++++++++++++++++- backend/gn_modulator/imports/models.py | 5 +- backend/gn_modulator/schema/imports/bulk.py | 4 +- backend/gn_modulator/tests/utils/imports.py | 2 +- 4 files changed, 54 insertions(+), 7 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index 4d55b4e4..c6fb9d44 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -1,4 +1,6 @@ +from pathlib import Path from geonature.utils.env import db +from gn_modulator.definition import DefinitionMethods from .check import ImportMixinCheck from .data import ImportMixinData @@ -22,8 +24,7 @@ class ImportMixin( ImportMixinUpdate, ImportMixinUtils, ): - def process_import_schema(self, _insert_data=False): - self._insert_data = _insert_data + def process_import_schema(self): self.init_import() if self.errors: @@ -75,3 +76,48 @@ def process_import_schema(self, _insert_data=False): ) return self + + @classmethod + def process_import_code(cls, import_code, data_dir_path): + + print(f"\nProcess scenaria d'import {import_code}") + + # get import definition + import_definitions = DefinitionMethods.get_definition("import", import_code) + import_definitions_file_path = DefinitionMethods.get_file_path("import", import_code) + + # for all definition items + imports = [] + last_impt = None + for import_definition in import_definitions["items"]: + # récupération du fichier de données + data_file_path = Path(data_dir_path) / import_definition["data"] if d.get("data") else Path(data_dir_path) + + # récupération du fichier pre-process, s'il est défini + pre_process_file_path = ( + Path(import_definitions_file_path).parent / import_definition["pre_process"] + if import_definition.get("pre_process") + else None + ) + + impt = cls(schema_code=import_definition["schema_code"], data_dir_path=data_file_path, pre_process_file_path=pre_process_file_path) + + # pour éviter d'avoir à recharger + if import_definition['keep_raw'] and last_impt: + impt.tables['data'] = last_impt.tables['data'] + + db.session.add(impt) + # flush ?? + + impt.process_import_schema() + import_infos = impt.import_infos() + if errors := import_infos["errors"]: + print(f"Il y a des erreurs dans l'import {import_definition['schema_code']}") + for error in errors: + print(f"- {error['code']} : {error['msg']}") + return imports + print(impt.pretty_infos()) + last_impt = impt + + print(f"\nImport {import_code} terminé\n") + return imports diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index 9387989d..50060f42 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -8,18 +8,17 @@ class TImport(db.Model, ImportMixin): __tablename__ = "t_imports" __table_args__ = {"schema": "gn_modulator"} - def __init__(self, schema_code=None, data_file_path=None, mapping_file_path=None): + def __init__(self, schema_code=None, data_file_path=None, mapping_file_path=None, _insert=False): self.schema_code = schema_code self.data_file_path = data_file_path and str(data_file_path) self.mapping_file_path = mapping_file_path and str(mapping_file_path) - + self._insert = _insert self.res = {} self.errors = [] self.sql = {} self.tables = {} _insert = False - _keep_raw = False id_import = db.Column(db.Integer, primary_key=True) diff --git a/backend/gn_modulator/schema/imports/bulk.py b/backend/gn_modulator/schema/imports/bulk.py index dc510360..e669e414 100644 --- a/backend/gn_modulator/schema/imports/bulk.py +++ b/backend/gn_modulator/schema/imports/bulk.py @@ -1,8 +1,10 @@ from pathlib import Path + +from geonature.utils.env import db + from gn_modulator.definition import DefinitionMethods from gn_modulator.utils.env import schema_import from gn_modulator.utils.cache import set_global_cache, get_global_cache -from geonature.utils.env import db class SchemaBulkImports: diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index 1e5fd0b5..587b3e67 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -10,7 +10,7 @@ def test_data_file( return with db.session.begin_nested(): - impt = TImport(schema_code, data_file_path, mapping_file_path) + impt = TImport(schema_code=schema_code, data_file_path=data_file_path, mapping_file_path=mapping_file_path, _insert=True) db.session.add(impt) assert impt.id_import is not None From 1ed81fa5dc52124ae446c8b23fee409125430a01 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 9 Mar 2023 15:14:41 +0100 Subject: [PATCH 012/142] TImport import code + test --- backend/gn_modulator/commands.py | 35 +++++---------- .../gn_modulator/imports/mixins/__init__.py | 44 +++++++++++-------- .../gn_modulator/imports/mixins/mapping.py | 9 ++-- backend/gn_modulator/imports/mixins/raw.py | 1 - backend/gn_modulator/imports/mixins/utils.py | 8 ++-- backend/gn_modulator/imports/models.py | 8 ++-- .../TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv | 10 +++++ .../import_test/import_code/linear_type.csv | 2 + .../tests/import_test/import_code/pf_V1.csv | 12 +++++ .../gn_modulator/tests/test_import_code.py | 24 ++++++++++ backend/gn_modulator/tests/utils/imports.py | 27 +++++++++++- .../m_sipaf/imports/m_sipaf.pf_V1.import.yml | 6 +-- .../m_sipaf/imports/ref_geo.route.import.yml | 4 +- .../m_sipaf/imports/scripts/ppi_actor.sql | 1 + .../m_sipaf/imports/scripts/ppi_actor_V1.sql | 1 + .../imports/scripts/ppi_groupe_route_na.sql | 7 ++- .../m_sipaf/imports/scripts/ppi_organism.sql | 1 + .../imports/scripts/ppi_organism_V1.sql | 8 ++-- .../m_sipaf/imports/scripts/ppi_pf_V1.sql | 5 ++- .../imports/scripts/ppi_srce_reservoir.sql | 4 +- 20 files changed, 147 insertions(+), 70 deletions(-) create mode 100644 backend/gn_modulator/tests/import_test/import_code/TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv create mode 100644 backend/gn_modulator/tests/import_test/import_code/linear_type.csv create mode 100644 backend/gn_modulator/tests/import_test/import_code/pf_V1.csv create mode 100644 backend/gn_modulator/tests/test_import_code.py diff --git a/backend/gn_modulator/commands.py b/backend/gn_modulator/commands.py index f8c32d30..d1576752 100644 --- a/backend/gn_modulator/commands.py +++ b/backend/gn_modulator/commands.py @@ -7,9 +7,8 @@ import click from flask.cli import with_appcontext -from gn_modulator.schema import SchemaMethods -from gn_modulator.module import ModuleMethods -from gn_modulator.definition import DefinitionMethods +from gn_modulator import SchemaMethods, ModuleMethods, DefinitionMethods +from gn_modulator.imports.models import TImport from gn_modulator.utils.errors import errors_txt from gn_modulator import init_gn_modulator from geonature.utils.env import db @@ -127,9 +126,9 @@ def cmd_doc_schema(schema_code, force=False): @click.option("-s", "schema_code") @click.option("-d", "data_path", type=click.Path(exists=True)) @click.option( - "-p", - "--pre-process", - "pre_process_file_path", + "-m", + "--mapping", + "mapping_file_path", type=click.Path(exists=True), help="chemin vers le script sql de pre-process", ) @@ -139,18 +138,12 @@ def cmd_doc_schema(schema_code, force=False): "import_code", help="code de l'import de ficher", ) -@click.option("-k", "--keep-raw", is_flag=True, help="garde le csv en base") @click.option( "-v", "--verbose", type=int, default=1, help="1 : affiche les sortie, 2: les commandes sql " ) @with_appcontext def cmd_import_bulk_data( - schema_code=None, - import_code=None, - data_path=None, - pre_process_file_path=None, - verbose=1, - keep_raw=False, + schema_code=None, import_code=None, data_path=None, mapping_file_path=None, verbose=None ): """ importe des données pour un schema @@ -159,20 +152,14 @@ def cmd_import_bulk_data( init_gn_modulator() if schema_code and data_path: - Timport() - import_number = SchemaMethods.process_import_schema( - schema_code, - data_path, - pre_process_file_path=pre_process_file_path, - verbose=verbose, - keep_raw=keep_raw, - commit=True, + impt = TImport( + schema_code=schema_code, data_file_path=data_path, mapping_file_path=mapping_file_path ) + impt.process_import_schema() + print(impt.pretty_infos()) if import_code: - import_number = SchemaMethods.process_import_code( - import_code, data_path, verbose=verbose, commit=True - ) + TImport.process_import_code(import_code, data_path) return True diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index c6fb9d44..5d3a5a46 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -25,7 +25,6 @@ class ImportMixin( ImportMixinUtils, ): def process_import_schema(self): - self.init_import() if self.errors: return self @@ -78,9 +77,8 @@ def process_import_schema(self): return self @classmethod - def process_import_code(cls, import_code, data_dir_path): - - print(f"\nProcess scenaria d'import {import_code}") + def process_import_code(cls, import_code, data_dir_path, commit=True): + print(f"\nProcess scenario d'import {import_code}") # get import definition import_definitions = DefinitionMethods.get_definition("import", import_code) @@ -88,36 +86,46 @@ def process_import_code(cls, import_code, data_dir_path): # for all definition items imports = [] - last_impt = None for import_definition in import_definitions["items"]: # récupération du fichier de données - data_file_path = Path(data_dir_path) / import_definition["data"] if d.get("data") else Path(data_dir_path) + data_file_path = ( + Path(data_dir_path) / import_definition["data"] + if import_definition.get("data") + else Path(data_dir_path) + ) # récupération du fichier pre-process, s'il est défini - pre_process_file_path = ( - Path(import_definitions_file_path).parent / import_definition["pre_process"] - if import_definition.get("pre_process") + mapping_file_path = ( + Path(import_definitions_file_path).parent / import_definition["mapping"] + if import_definition.get("mapping") else None ) - impt = cls(schema_code=import_definition["schema_code"], data_dir_path=data_file_path, pre_process_file_path=pre_process_file_path) + impt = cls( + schema_code=import_definition["schema_code"], + data_file_path=data_file_path, + mapping_file_path=mapping_file_path, + ) - # pour éviter d'avoir à recharger - if import_definition['keep_raw'] and last_impt: - impt.tables['data'] = last_impt.tables['data'] + # pour éviter d'avoir à recharger les données + if import_definition.get("keep_raw") and len(imports): + last_import = imports[-1] + impt.tables["data"] = imports[-1].tables["data"] db.session.add(impt) # flush ?? impt.process_import_schema() - import_infos = impt.import_infos() - if errors := import_infos["errors"]: + imports.append(impt) + + if impt.errors: print(f"Il y a des erreurs dans l'import {import_definition['schema_code']}") - for error in errors: + for error in impt.errors: print(f"- {error['code']} : {error['msg']}") return imports print(impt.pretty_infos()) - last_impt = impt - print(f"\nImport {import_code} terminé\n") + if commit: + db.session.commit() + print(f"Import {import_code} terminé") return imports diff --git a/backend/gn_modulator/imports/mixins/mapping.py b/backend/gn_modulator/imports/mixins/mapping.py index 39c56ffe..97b36ff1 100644 --- a/backend/gn_modulator/imports/mixins/mapping.py +++ b/backend/gn_modulator/imports/mixins/mapping.py @@ -8,7 +8,6 @@ def process_mapping_view(self): """ Application de la vue de mappage à la la table d'import """ - if self.mapping_file_path is None: return @@ -49,8 +48,8 @@ def sql_mapping(self): "DROP ", "DELETE ", "UPDATE ", - "EXECUTE", - "TRUNCATE", + "EXECUTE ", + "TRUNCATE ", ]: if forbidden_word in mapping_select: forbidden_words.append(forbidden_word.strip()) @@ -58,13 +57,13 @@ def sql_mapping(self): if forbidden_words: self.add_error( code="ERR_IMPORT_MAPPING_FORBIDEN_WORD", - msg=f"Le fichier de preprocess {self.mapping_file_path} contient le ou les mots interdits {', '.join(forbidden_word)}", + msg=f"Le fichier de preprocess {self.mapping_file_path} contient le ou les mots interdits {', '.join(forbidden_words)}:\n {mapping_select}", ) if ":TABLE_DATA" not in mapping_select: self.add_error( code="ERR_IMPORT_MAPPING_MISSING_TABLE", - msg="La selection de mapping doit contenir 'FROM :table_data", + msg=f"La selection de mapping doit contenir 'FROM :table_data {mapping_select}", ) mapping_select = mapping_select.replace(":TABLE_DATA", from_table) diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index 4b4a5f1c..a8877182 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -11,7 +11,6 @@ def process_raw_view(self): from_table = self.tables.get("mapping") or self.tables["data"] dest_table = self.tables["raw"] = self.table_name("raw") - self.sql["raw_view"] = self.sql_raw_view(from_table, dest_table) try: diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py index 8350dba1..64857511 100644 --- a/backend/gn_modulator/imports/mixins/utils.py +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -9,13 +9,15 @@ def init_import(self): SchemaMethods.c_sql_exec_txt(f"CREATE SCHEMA IF NOT EXISTS {schema_import}") def pretty_infos(self): - print(self.res) txt = "" - txt += f"\n-- import csv file {Path(self.data_file_path).name}" - txt += f" {self.res.get('nb_data')} lignes" + if self.res.get("nb_data") is not None: + txt += f"\n-- import csv file {Path(self.data_file_path).name}" + txt += f" {self.res.get('nb_data')} lignes\n\n" txt += f" - {self.schema_code}\n" if self.res.get("nb_raw") != self.res.get("nb_process"): txt += f" raw : {self.res.get('nb_raw'):10d}\n" + if self.res.get("nb_process"): + txt += f" process : {self.res.get('nb_process'):10d}\n" if self.res.get("nb_insert"): txt += f" insert : {self.res['nb_insert']:10d}\n" if self.res.get("nb_update"): diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index 50060f42..cf7c2c33 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -8,17 +8,19 @@ class TImport(db.Model, ImportMixin): __tablename__ = "t_imports" __table_args__ = {"schema": "gn_modulator"} - def __init__(self, schema_code=None, data_file_path=None, mapping_file_path=None, _insert=False): + def __init__( + self, schema_code=None, data_file_path=None, mapping_file_path=None, _insert_data=False + ): self.schema_code = schema_code self.data_file_path = data_file_path and str(data_file_path) self.mapping_file_path = mapping_file_path and str(mapping_file_path) - self._insert = _insert + self._insert = _insert_data self.res = {} self.errors = [] self.sql = {} self.tables = {} - _insert = False + _insert_data = False id_import = db.Column(db.Integer, primary_key=True) diff --git a/backend/gn_modulator/tests/import_test/import_code/TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv b/backend/gn_modulator/tests/import_test/import_code/TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv new file mode 100644 index 00000000..a2506a96 --- /dev/null +++ b/backend/gn_modulator/tests/import_test/import_code/TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv @@ -0,0 +1,10 @@ +WKT;ID;NATURE;NOM_1_G;NOM_1_D;NOM_2_G;NOM_2_D;IMPORTANCE;FICTIF;POS_SOL;ETAT;DATE_CREAT;DATE_MAJ;DATE_APP;DATE_CONF;SOURCE;ID_SOURCE;PREC_PLANI;PREC_ALTI;NB_VOIES;LARGEUR;IT_VERT;PRIVE;SENS;CYCLABLE;BUS;URBAIN;VIT_MOY_VL;ACCES_VL;ACCES_PED;FERMETURE;NAT_RESTR;RESTR_H;RESTR_P;RESTR_PPE;RESTR_LAR;RESTR_LON;RESTR_MAT;BORNEDEB_G;BORNEDEB_D;BORNEFIN_G;BORNEFIN_D;INSEECOM_G;INSEECOM_D;TYP_ADRES;ALIAS_G;ALIAS_D;C_POSTAL_G;C_POSTAL_D;DATE_SERV;ID_VOIE_G;ID_VOIE_D;ID_RN;ID_ITI;NUMERO;NUM_EUROP;CL_ADMIN;GESTION;TOPONYME;ITI_CYCL;VOIE_VERTE;NATURE_ITI;NOM_ITI +"LINESTRING (653061.6 6867012.6,653058.8 6867014.4,653051.2 6867019.4,653043.3 6867026.7,653031.4 6867040.3)";TRONROUT0000006660176847;Bretelle;;;;;"1";Non;"1";En service;2006-05-22 13:18:11;2020-12-18 10:36:12;;;;;2.5;1.5;"1";3.0;Oui;Non;Sens direct;;;Non;"45";Libre;;;;;;;;;Non;;;;;"75118";"75118";;;;"75018";"75018";;;;ROUTNOMM0000000000208330/ROUTNOMM0000000002780793;;A30001;;Autoroute;DIR Île-de-France;Autoroute du Nord;;;; +"LINESTRING (651026.3 6857814.4,651007.6 6857824.3)";TRONROUT0000006660208122;Type autoroutier;;;;;"1";Non;"1";En service;2006-05-22 13:18:11;2019-07-05 21:05:08;;;;;2.5;1.5;"3";9.0;Oui;Non;Sens direct;;;Oui;"100";Libre;;;;;;;;;Non;;;;;"75114";"75114";;;;"75014";"75014";;;;ROUTNOMM0000000000208363/ROUTNOMM0000000004450783;;A30006A;;Autoroute;DIR Île-de-France;Autoroute du Soleil;;;; +"LINESTRING (645449.7 6870697.8,645391.5 6870680.3,645260.0 6870641.7,645243.2 6870636.5,645128.7 6870600.5,645098.4 6870589.7)";TRONROUT0000006660510795;Type autoroutier;;;;;"1";Non;"0";En service;2006-05-22 13:39:36;2019-07-05 21:05:08;;;;;2.5;1.5;"2";7.0;Oui;Non;Sens inverse;;;Oui;"100";Libre;;;;;;;;;Non;;;;;"92025";"92025";;;;"92700";"92700";;;;ROUTNOMM0000000004450785;;A30086;;Autoroute;DIR Île-de-France;;;;; +"LINESTRING (649396.2 6868616.0,649387.5 6868623.8,649377.8 6868634.4,649366.7 6868648.4,649360.4 6868656.4,649347.1 6868673.2)";TRONROUT0000006660514231;Bretelle;;;;;"2";Non;"0";En service;2006-05-22 13:39:36;2021-01-26 16:59:51;;;;;2.5;1.5;"2";5.0;Oui;Non;Sens direct;;;Oui;"45";Libre;;;;;;;;;Non;;;;;"92004";"92004";;;;"92600";"92600";;;;ROUTNOMM0000000000552015;;N30315;;Nationale;DIR Île-de-France;;;;; +"LINESTRING (641257.4 6867324.0,641247.1 6867331.5,641229.6 6867344.7,641208.4 6867361.3,641195.9 6867371.1,641171.3 6867391.5,641122.0 6867437.3,641094.1 6867464.6,641086.3 6867468.5)";TRONROUT0000006660517964;Type autoroutier;;;;;"1";Non;"1";En service;2006-05-22 13:39:36;2018-05-23 18:49:45;;;;;20.0;2.5;"2";7.0;Oui;Non;Sens inverse;;;Non;"105";A péage;;;;;;;;;Non;;;;;"92050";"92050";;;;"92000";"92000";;;;ROUTNOMM0000000000552037;;A30014;;Autoroute;DIR Île-de-France;;;;; +"LINESTRING (639087.7 6865829.7,639094.5 6865837.4,639102.5 6865843.9,639112.6 6865851.0,639126.4 6865860.3,639142.0 6865869.2,639159.8 6865878.0,639180.4 6865886.7,639202.2 6865894.5,639222.5 6865900.6,639239.4 6865904.9,639254.6 6865908.8,639274.3 6865913.2,639318.8 6865923.0,639344.0 6865928.6)";TRONROUT0000006660522791;Type autoroutier;;;;;"1";Non;"0";En service;2006-05-22 13:39:36;2019-07-05 21:05:08;;;;;20.0;2.5;"4";14.0;Oui;Non;Sens direct;;;Non;"125";Libre;;;;;;;;;Non;;;;;"92063";"92063";;;;"92500";"92500";;;;ROUTNOMM0000000004450785;;A30086;;Autoroute;DIR Île-de-France;;;;; +"LINESTRING (647066.9 6864590.6,646982.3 6864630.8,646959.2 6864642.8,646952.7 6864648.9)";TRONROUT0000006660523771;Bretelle;;;;;"1";Non;"0";En service;2006-05-22 13:39:36;2019-07-05 21:05:08;;;;;20.0;2.5;"2";6.0;Oui;Non;Sens direct;;;Oui;"45";Libre;;;;;;;;;Non;;;;;"92051";"92051";;;;"92200";"92200";;;;ROUTNOMM0000000005210177;;N30013;;Nationale;DIR Île-de-France;;;;; +"LINESTRING (646622.9 6864812.2,646557.4 6864844.9)";TRONROUT0000006660523813;Route à 2 chaussées;AV CHARLES DE GAULLE;AV CHARLES DE GAULLE;;;"1";Non;"0";En service;2006-05-22 13:39:36;2019-07-05 21:05:08;;;;;20.0;2.5;"5";14.0;Oui;Non;Sens direct;;;Oui;"50";Libre;;;;;;;;;Non;;;;;"92051";"92051";Classique;;;"92200";"92200";;"920511436";"920511436";ROUTNOMM0000000005210177;;N30013;;Nationale;DIR Île-de-France;;;;; +"LINESTRING (643325.9 6861228.1,643299.0 6861223.8,643285.4 6861221.0,643267.7 6861216.6,643247.9 6861210.5,643225.1 6861201.9,643205.5 6861193.2,643187.0 6861183.7,643165.0 6861170.1)";TRONROUT0000006660528942;Type autoroutier;;;;;"1";Non;"1";En service;2006-05-22 13:39:36;2019-07-05 21:05:08;;;;;20.0;2.5;"2";7.0;Oui;Non;Sens direct;;;Oui;"100";Libre;;;;;;;;;Non;;;;;"92012";"92012";;;;"92100";"92100";;;;ROUTNOMM0000000000208343/ROUTNOMM0000000216939416;;A30013;;Autoroute;DIR Île-de-France;Autoroute de Normandie;;;; diff --git a/backend/gn_modulator/tests/import_test/import_code/linear_type.csv b/backend/gn_modulator/tests/import_test/import_code/linear_type.csv new file mode 100644 index 00000000..752a2650 --- /dev/null +++ b/backend/gn_modulator/tests/import_test/import_code/linear_type.csv @@ -0,0 +1,2 @@ +type_code;type_name;type_desc +RTE;Tronçons de route;Tronçons de route (Autoroute, Nationales, Départementales ??) diff --git a/backend/gn_modulator/tests/import_test/import_code/pf_V1.csv b/backend/gn_modulator/tests/import_test/import_code/pf_V1.csv new file mode 100644 index 00000000..1d3d8317 --- /dev/null +++ b/backend/gn_modulator/tests/import_test/import_code/pf_V1.csv @@ -0,0 +1,12 @@ +id_pf;uuid_pf;pi_ou_ps;geom_wtk;pk;pr;pr_abs;Y;X;id_pf_gest;nom_pf;cd_com;anRefCom;issu_reqa;date_creat;date_requa;date_supp;larg_ouvra;haut_ouvra;long_franc;diam;haut_disp;larg_disp;specifit;lb_typ_ouv;lb_materia;oh;oh_positio;oh_caract;oh_banqu;oh_tirant;id_cer;id_corr;nom_corr;id_resv;nom_resv;id_obst;nom_obst;comment;infra;concess;source +0;TESTAU0;PI;;0,000000;0;0;43,676265;4,028107;;iPloums buse 1;34244;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;2,270000;2,350000;48,000000;0,000000;2,350000;2,270000;Ouvrage hydraulique non dedie;OH non dedie;;0;;;;0,000000;;;;;;;;34;A9;TESTASF;table ouvrages cerema +1;TESTAU1;PI;;0,000000;0;0;43,676254;4,028049;;Ploums buse 2;34244;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;2,270000;2,350000;48,000000;0,000000;2,350000;2,270000;Ouvrage hydraulique non dedie;OH non dedie;;0;;;;0,000000;;;;;;;;34;A9;TESTASF;table ouvrages cerema +2;TESTAU2;PI;;0,000000;0;0;43,674725;4,022257;;Berange;34244;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;31,200000;3,650000;53,000000;0,000000;3,650000;7,000000;Viaduc;OH non dedie;;0;;;;0,000000;;;;;;;;34;A9;TESTASF;table ouvrages cerema +3;TESTAU3;PI;;0,000000;0;0;43,671934;4,015574;;OH877;34022;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;3,000000;1,500000;100,000000;0,000000;1,500000;3,000000;PIGF;PI specifique faune;;0;;;;0,000000;;;;;;;;34;A9-A709;TESTASF;table ouvrages cerema +4;TESTAU4;PI;;0,000000;0;0;43,668212;4,008340;;OH884;34022;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;1,000000;0,800000;75,000000;0,000000;0,800000;1,000000;Ouvrage hydraulique non dedie;OH non dedie;;0;;;;0,000000;;;;;;;;34;A9-A709;TESTASF;table ouvrages cerema +5;TESTAU5;PI;;0,000000;0;0;43,662404;4,002029;;OH892;34022;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;0,000000;0,000000;88,000000;1,000000;1,000000;0,700000;Ouvrage hydraulique non dedie;OH non dedie;;0;;;;0,000000;;;;;;;;34;A9-A709;TESTASF;table ouvrages cerema +6;TESTAU6;PI;;0,000000;0;0;43,651819;3,988755;;Cadoule_rg;34022;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;20,000000;4,000000;95,000000;0,000000;3,500000;6,600000;Ouvrage hydraulique non dedie;OH non dedie;;0;;;;0,000000;;;;;;;;34;A9-A709;TESTASF;table ouvrages cerema +7;TESTAU7;PI;;0,000000;0;0;43,651819;3,988755;;Cadoule_rd;34022;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;20,000000;4,000000;95,000000;0,000000;3,500000;6,600000;Ouvrage hydraulique non dedie;OH non dedie;;0;;;;0,000000;;;;;;;;34;A9-A709;TESTASF;table ouvrages cerema +8;TESTAU8;PI;;0,000000;0;0;43,624308;3,960569;;Salaison;34240;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;32,000000;3,000000;73,000000;0,000000;2,700000;19,200000;Viaduc;OH non dedie;;0;;;;0,000000;;;;;;;;34;A9-A709;TESTASF;table ouvrages cerema +9;TESTAU9;PI;;0,000000;0;0;43,615854;3,950371;;Jasse;34154;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;7,580000;5,280000;72,000000;0,000000;4,000000;5,000000;Ouvrage hydraulique non dedie;OH non dedie;;0;;;;0,000000;;;;;;;;34;DDA9;TESTASF;table ouvrages cerema +10;TESTAU10;PI;;0,000000;0;0;43,560064;3,827260;;PI1079;34270;03/01/-1;;03/01/-1;03/01/-1;03/01/-1;7,500000;4,500000;71,000000;0,000000;4,500000;7,500000;PI non dedie;PI non dedie;;0;;;;0,000000;;;;;;;;34;A9-A709;TESTASF;table ouvrages cerema diff --git a/backend/gn_modulator/tests/test_import_code.py b/backend/gn_modulator/tests/test_import_code.py new file mode 100644 index 00000000..c9aa03eb --- /dev/null +++ b/backend/gn_modulator/tests/test_import_code.py @@ -0,0 +1,24 @@ +import pytest # noqa +from gn_modulator.utils.env import import_test_dir +from .utils.imports import test_import_code + + +@pytest.mark.usefixtures("temporary_transaction", scope="session") +class TestImportCode: + def test_import_code_pfV1(self): + expected = [ + {"res.nb_process": 1, "res.nb_insert": 1}, + {"res.nb_process": 11, "res.nb_insert": 11}, + {"res.nb_process": 11, "res.nb_insert": 11}, + ] + + test_import_code("m_sipaf.pf_V1", import_test_dir / "import_code/", expected) + + def test_import_code_route(self): + expected = [ + {"res.nb_process": 1}, + {"res.nb_process": 7, "res.nb_insert": 7}, + {"res.nb_process": 9, "res.nb_insert": 9}, + ] + + test_import_code("ref_geo.route", import_test_dir / "import_code/", expected) diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index 587b3e67..d3ff9cac 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -10,7 +10,14 @@ def test_data_file( return with db.session.begin_nested(): - impt = TImport(schema_code=schema_code, data_file_path=data_file_path, mapping_file_path=mapping_file_path, _insert=True) + # ici _insert_data est à true pour intégrer les avec un insert + # et non un copy qui ne marche pas en test + impt = TImport( + schema_code=schema_code, + data_file_path=data_file_path, + mapping_file_path=mapping_file_path, + _insert_data=True, + ) db.session.add(impt) assert impt.id_import is not None @@ -32,3 +39,21 @@ def test_data_file( assert getAttr(import_infos, key) == expected_infos.get(key), txt_err return import_infos + + +def test_import_code(import_code=None, data_dir_path=None, expected_infos=[]): + if not (import_code and data_dir_path): + return + + imports = TImport.process_import_code(import_code, data_dir_path, commit=False) + assert len(imports) > 0 + + for impt in imports: + assert len(impt.errors) == 0 + + for index, expected_info in enumerate(expected_infos): + impt = imports[index] + import_infos = impt.as_dict() + for key in expected_info: + txt_err = f"schema_code: {impt.schema_code}, key: {key}, expected: {expected_info.get(key)}, import: {getAttr(import_infos, key)}" + assert getAttr(import_infos, key) == expected_info.get(key), txt_err diff --git a/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_V1.import.yml b/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_V1.import.yml index 65c7401a..02d37324 100644 --- a/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_V1.import.yml +++ b/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_V1.import.yml @@ -5,12 +5,12 @@ description: import données d'exemple de passage à faune pour SIPAF items: - schema_code: user.organisme data: pf_V1.csv - pre_process: scripts/ppi_organism_V1.sql + mapping: scripts/ppi_organism_V1.sql - schema_code: m_sipaf.pf data: pf_V1.csv - pre_process: scripts/ppi_pf_V1.sql + mapping: scripts/ppi_pf_V1.sql keep_raw: true - schema_code: m_sipaf.actor data: pf_V1.csv - pre_process: scripts/ppi_actor_V1.sql + mapping: scripts/ppi_actor_V1.sql keep_raw: true diff --git a/config/modules/contrib/m_sipaf/imports/ref_geo.route.import.yml b/config/modules/contrib/m_sipaf/imports/ref_geo.route.import.yml index 6d4648c6..9f98942a 100644 --- a/config/modules/contrib/m_sipaf/imports/ref_geo.route.import.yml +++ b/config/modules/contrib/m_sipaf/imports/ref_geo.route.import.yml @@ -7,9 +7,9 @@ items: data: linear_type.csv - schema_code: ref_geo.linear_group data: TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv - pre_process: scripts/ppi_groupe_route_na.sql + mapping: scripts/ppi_groupe_route_na.sql - schema_code: ref_geo.linear ref_geo.linear: ref_geo.linear data: TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv - pre_process: scripts/ppi_troncon_route_na.sql + mapping: scripts/ppi_troncon_route_na.sql keep_raw: true diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql index ca0a2dc4..4d41535f 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor.sql @@ -1,4 +1,5 @@ SELECT + id_import, uuid_pf AS id_passage_faune, CASE WHEN type_role_org = 'Concessionaire' THEN 'CON' diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql index 713fed0c..a1c370cd 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_actor_V1.sql @@ -1,4 +1,5 @@ SELECT + id_import, uuid_pf AS id_passage_faune, 'CON' AS id_nomenclature_type_actor, concess AS id_organism, diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql index 2a25e3fc..e6ea8a5a 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_groupe_route_na.sql @@ -1,8 +1,7 @@ -DROP VIEW IF EXISTS :pre_processed_import_view CASCADE; -CREATE VIEW :pre_processed_import_view AS -SELECT DISTINCT ON(id_import) +SELECT + MIN(id_import) AS id_import, 'RTE' AS id_type, numero AS code, cl_admin || ' ' || numero AS name FROM :table_data - ORDER BY id_import \ No newline at end of file + GROUP BY cl_admin, numero \ No newline at end of file diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql index 57d5ce2f..2ae10c52 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism.sql @@ -1,4 +1,5 @@ SELECT DISTINCT ON (id_import) + id_import, nom_organism AS nom_organisme, 'SIPAF' AS adresse_organisme WHERE nom_organism IS NOT NULL AND nom_organism != '' diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql index e0423a4b..66032ce4 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_organism_V1.sql @@ -1,6 +1,8 @@ -SELECT DISTINCT ON (id_import) +SELECT + MIN(id_import) AS id_import, concess AS nom_organisme, 'SIPAF' AS adresse_organisme - WHERE concess IS NOT NULL AND concess != '' FROM :table_data - ORDER BY id_import, concess + WHERE concess IS NOT NULL AND concess != '' + GROUP BY concess + ORDER BY concess diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql index 2d373ed4..cf0ae6a4 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_pf_V1.sql @@ -31,5 +31,6 @@ select replace(larg_disp, ',', '.') :: NUMERIC AS largeur_dispo_faune, replace(haut_disp, ',', '.') :: NUMERIC AS hauteur_dispo_faune, source -ORDER BY - tis.uuid_pf; \ No newline at end of file + FROM :table_data + ORDER BY + uuid_pf; \ No newline at end of file diff --git a/config/modules/contrib/m_sipaf/imports/scripts/ppi_srce_reservoir.sql b/config/modules/contrib/m_sipaf/imports/scripts/ppi_srce_reservoir.sql index 20f13523..b2066702 100644 --- a/config/modules/contrib/m_sipaf/imports/scripts/ppi_srce_reservoir.sql +++ b/config/modules/contrib/m_sipaf/imports/scripts/ppi_srce_reservoir.sql @@ -1,4 +1,5 @@ SELECT + id_import 'RESV_SRCE' AS id_type, id_resv AS area_code, CASE @@ -7,4 +8,5 @@ SELECT END AS area_name, wkt AS geom, TRUE AS enable, - 'https://inpn.mnhn.fr/docs/TVB/N_SRCE_RESERVOIR_S_000.zip' AS source \ No newline at end of file + 'https://inpn.mnhn.fr/docs/TVB/N_SRCE_RESERVOIR_S_000.zip' AS source + FROM :table_data \ No newline at end of file From 7dd6c2fee22a536da7d2136c2f50fbc5172b2d38 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 9 Mar 2023 15:23:09 +0100 Subject: [PATCH 013/142] remove schema bulk import --- .../gn_modulator/schema/imports/__init__.py | 16 -- backend/gn_modulator/schema/imports/api.py | 0 backend/gn_modulator/schema/imports/bulk.py | 187 ---------------- backend/gn_modulator/schema/imports/data.py | 123 ----------- backend/gn_modulator/schema/imports/insert.py | 56 ----- .../gn_modulator/schema/imports/preprocess.py | 188 ---------------- .../gn_modulator/schema/imports/process.py | 202 ------------------ .../gn_modulator/schema/imports/relation.py | 82 ------- backend/gn_modulator/schema/imports/update.py | 103 --------- backend/gn_modulator/schema/imports/utils.py | 138 ------------ 10 files changed, 1095 deletions(-) delete mode 100644 backend/gn_modulator/schema/imports/api.py delete mode 100644 backend/gn_modulator/schema/imports/bulk.py delete mode 100644 backend/gn_modulator/schema/imports/data.py delete mode 100644 backend/gn_modulator/schema/imports/insert.py delete mode 100644 backend/gn_modulator/schema/imports/preprocess.py delete mode 100644 backend/gn_modulator/schema/imports/process.py delete mode 100644 backend/gn_modulator/schema/imports/relation.py delete mode 100644 backend/gn_modulator/schema/imports/update.py delete mode 100644 backend/gn_modulator/schema/imports/utils.py diff --git a/backend/gn_modulator/schema/imports/__init__.py b/backend/gn_modulator/schema/imports/__init__.py index da313661..2a4c99f6 100644 --- a/backend/gn_modulator/schema/imports/__init__.py +++ b/backend/gn_modulator/schema/imports/__init__.py @@ -1,24 +1,8 @@ from .base import SchemaBaseImports -from .bulk import SchemaBulkImports -from .data import SchemaDataImports -from .insert import SchemaInsertImports -from .update import SchemaUpdateImports -from .relation import SchemaRelationImports -from .preprocess import SchemaPreProcessImports -from .process import SchemaProcessImports -from .utils import SchemaUtilsImports class SchemaImports( SchemaBaseImports, - SchemaBulkImports, - SchemaDataImports, - SchemaInsertImports, - SchemaUpdateImports, - SchemaPreProcessImports, - SchemaProcessImports, - SchemaRelationImports, - SchemaUtilsImports, ): """ methodes d'import de données diff --git a/backend/gn_modulator/schema/imports/api.py b/backend/gn_modulator/schema/imports/api.py deleted file mode 100644 index e69de29b..00000000 diff --git a/backend/gn_modulator/schema/imports/bulk.py b/backend/gn_modulator/schema/imports/bulk.py deleted file mode 100644 index e669e414..00000000 --- a/backend/gn_modulator/schema/imports/bulk.py +++ /dev/null @@ -1,187 +0,0 @@ -from pathlib import Path - -from geonature.utils.env import db - -from gn_modulator.definition import DefinitionMethods -from gn_modulator.utils.env import schema_import -from gn_modulator.utils.cache import set_global_cache, get_global_cache - - -class SchemaBulkImports: - @classmethod - def process_import_code( - cls, import_code, data_path, import_number=None, verbose=0, insert=False, commit=False - ): - """ - import_code est la référence du scenario d'import - """ - - if not import_number: - import_number = cls.generate_import_number() - - print(f"\nProcess import {import_code} {import_code}") - - # get import definition - import_definition = DefinitionMethods.get_definition("import", import_code) - import_definition_file_path = DefinitionMethods.get_file_path("import", import_code) - - # for all definition items - for d in import_definition["items"]: - # récupération du fichier de données - data_file_path = Path(data_path) / d["data"] if d.get("data") else Path(data_path) - - # récupération du fichier pre-process, s'il est défini - pre_process_file_path = ( - Path(import_definition_file_path).parent / d["pre_process"] - if d.get("pre_process") - else None - ) - - # process import schema - cls.process_import_schema( - d["schema_code"], - data_file_path, - import_number=import_number, - pre_process_file_path=pre_process_file_path, - keep_raw=d.get("keep_raw"), - verbose=verbose, - insert=insert, - commit=commit, - ) - - import_infos = cls.import_get_infos(import_number, d["schema_code"]) - - if import_infos["errors"]: - print(f"Il y a des erreurs dans l'import {d['schema_code']}") - for error in errors: - print(f"- {error['code']} : {error['msg']}") - return import_number - - print(f"\nImport {import_code} terminé\n") - return import_number - - @classmethod - def process_import_schema( - cls, - schema_code, - data_file_path, - import_number=None, - pre_process_file_path=None, - verbose=0, - insert=False, - keep_raw=False, - commit=False, - ): - """ - import de données - - todo tout types de données - """ - - # 0) init - # suppression des table d'import précedentes ? - # si keep_raw on garde la table qui contient les données csv - - if not import_number: - import_number = cls.generate_import_number() - - cls.import_init(import_number, schema_code, data_file_path, pre_process_file_path) - cls.import_clean_tables(import_number, schema_code, keep_raw) - - # 1) csv -> table temporaire - # - cls.import_process_data( - import_number, - schema_code, - data_file_path, - cls.import_get_infos(import_number, schema_code, "tables.import"), - insert=insert, - keep_raw=keep_raw, - ) - if verbose and not keep_raw: - print(f"\n-- import csv file {data_file_path.name}") - print(f" {cls.import_get_infos(import_number, schema_code, 'nb_data')} lignes") - - if cls.import_get_infos(import_number, schema_code, "errors"): - return import_number - # 2.1) pre-process - # - cls.import_preprocess( - import_number, - schema_code, - cls.import_get_infos(import_number, schema_code, "tables.import"), - cls.import_get_infos(import_number, schema_code, "tables.preprocess"), - pre_process_file_path, - ) - if cls.import_get_infos(import_number, schema_code, "errors"): - return import_number - - # 2.2) table import (ou preprocess) -> vue brute - cls.import_raw( - import_number, - schema_code, - cls.import_get_infos(import_number, schema_code, "tables.preprocess"), - cls.import_get_infos(import_number, schema_code, "tables.raw"), - ) - if cls.import_get_infos(import_number, schema_code, "errors"): - return import_number - - # 3) vue brute -> vue prête pour l'import avec les clés étrangéres et primaires résolues - cls.import_process( - import_number, - schema_code, - cls.import_get_infos(import_number, schema_code, "tables.raw"), - cls.import_get_infos(import_number, schema_code, "tables.process"), - ) - if cls.import_get_infos(import_number, schema_code, "errors"): - return import_number - - # 4) INSERT / UPDATE - # 4-1) INSERT - cls.import_insert( - import_number, - schema_code, - cls.import_get_infos(import_number, schema_code, "tables.process"), - ) - if cls.import_get_infos(import_number, schema_code, "errors"): - return import_number - - # 4-2) UPDATE - cls.import_update( - import_number, - schema_code, - cls.import_get_infos(import_number, schema_code, "tables.process"), - ) - if cls.import_get_infos(import_number, schema_code, "errors"): - return import_number - ## HERE !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - # 4-2) UNCHANGED - - nb_unchanged = ( - cls.import_get_infos(import_number, schema_code, "nb_process") - - cls.import_get_infos(import_number, schema_code, "nb_insert") - - cls.import_get_infos(import_number, schema_code, "nb_update") - ) - cls.import_set_infos(import_number, schema_code, "nb_unchanged", nb_unchanged) - txt_pretty_info = cls.import_pretty_infos(import_number, schema_code) - - verbose and print(f"\n{txt_pretty_info}") - - # 5) process relations ??? - # ?? au moins n-n - cls.import_relations( - import_number, - schema_code, - cls.import_get_infos(import_number, schema_code, "tables.preprocess", data_file_path), - data_file_path, - verbose, - ) - - if cls.import_get_infos(import_number, schema_code, "errors"): - return import_number - - if commit: - db.session.commit() - - return import_number diff --git a/backend/gn_modulator/schema/imports/data.py b/backend/gn_modulator/schema/imports/data.py deleted file mode 100644 index 7d19588c..00000000 --- a/backend/gn_modulator/schema/imports/data.py +++ /dev/null @@ -1,123 +0,0 @@ -from geonature.utils.env import db - - -class SchemaDataImports: - @classmethod - def import_process_data( - cls, import_number, schema_code, data_file_path, dest_table, insert=False, keep_raw=False - ): - """ - cree une vue a partir d'un fichier csv pour pouvoir traiter les données ensuite - - le fichier csv - separateur : ';' - créé - - une table temporaire pour avoir les données du csv en varchar - - une vue pour passer les champs en '' à NULL - """ - - # cas où la table d'import à été générée lors d'un import d'un import précédent - - if not keep_raw: - cls.import_csv_file( - import_number, schema_code, data_file_path, dest_table, insert=insert - ) - - cls.count_and_check_table(import_number, schema_code, dest_table, "data") - - return - - @classmethod - def import_csv_file(cls, import_number, schema_code, data_file_path, dest_table, insert=False): - if not data_file_path.exists(): - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_DATA_FILE_NOT_FOUND", - msg=f"Le fichier d'import {data_file_path} n'existe pas", - ) - return - - with open(data_file_path, "r") as f: - # on récupère la premiere ligne du csv pour avoir le nom des colonnes - first_line = f.readline() - - delimiter = ";" if ";" in first_line else "," if "," in first_line else None - - import_table_columns = first_line.replace("\n", "").split(delimiter) - - cls.import_set_infos(import_number, schema_code, "delimiter", delimiter) - cls.import_set_infos( - import_number, schema_code, "import_table_columns", import_table_columns - ) - - if delimiter is None: - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_CSV_FILE_DELIMITER_NOT_FOUND", - msg=f"Pas de séparateur trouvé pour le fichier csv {data_file_path}", - ) - return - # creation de la table temporaire - import_txt_create_import_table = cls.import_txt_create_import_table( - import_number, schema_code, dest_table, import_table_columns - ) - cls.import_set_infos( - import_number, - schema_code, - "sql.import", - import_txt_create_import_table, - ) - cls.c_sql_exec_txt(import_txt_create_import_table) - - # on copie les données dans la table temporaire - - # pour faire marcher les tests pytest on passe par un insert - # TODO faire marche copy_expert avec pytest - # manière de récupérer cursor ? - if insert: - cls.import_csv_insert( - import_number, schema_code, f, dest_table, import_table_columns, delimiter - ) - else: - columns_fields = ", ".join(import_table_columns) - txt_copy_from_csv = f"""COPY {dest_table}({columns_fields}) FROM STDIN DELIMITER '{delimiter}' QUOTE '"' CSV""" - cls.import_set_infos(import_number, schema_code, "sql.csv_copy", txt_copy_from_csv) - cursor = db.session.connection().connection.cursor() - cursor.copy_expert(sql=txt_copy_from_csv, file=f) - - @classmethod - def import_csv_insert( - cls, import_number, schema_code, f, dest_table, table_columns, delimiter - ): - sql_columns_fields = ", ".join(table_columns) - - values = "" - for line in f: - data = "', '".join((line.replace('"', "").replace("\n", "").split(delimiter))) - values += f"('{data}')," - if not values: - return - - values = values[:-1] - txt_insert_csv = f"INSERT INTO {dest_table} ({sql_columns_fields}) VALUES {values}" - cls.import_set_infos(import_number, schema_code, "sql.csv_insert", txt_insert_csv) - cls.c_sql_exec_txt(txt_insert_csv) - - @classmethod - def import_txt_create_import_table(cls, import_number, schema_code, dest_table, table_columns): - """ - requete de creation d'une table temporaire pour import csv - tout les champs sont en varchar - """ - - columns_sql = "\n ".join(map(lambda x: f"{x} VARCHAR,", table_columns)) - pk_constraint_name = f"pk_{'_'.join(dest_table.split('.'))}_id_import" - - txt = f"""CREATE TABLE IF NOT EXISTS {dest_table} ( - id_import SERIAL NOT NULL, - {columns_sql} - CONSTRAINT {pk_constraint_name} PRIMARY KEY (id_import) -);""" - return txt diff --git a/backend/gn_modulator/schema/imports/insert.py b/backend/gn_modulator/schema/imports/insert.py deleted file mode 100644 index 49ce1870..00000000 --- a/backend/gn_modulator/schema/imports/insert.py +++ /dev/null @@ -1,56 +0,0 @@ -class SchemaInsertImports: - @classmethod - def import_insert(cls, import_number, schema_code, from_table): - sm = cls(schema_code) - nb_insert = cls.c_sql_exec_txt( - f"SELECT COUNT(*) FROM {from_table} WHERE {sm.pk_field_name()} IS NULL" - ).scalar() - - cls.import_set_infos(import_number, schema_code, "nb_insert", nb_insert) - - if not nb_insert: - return - - try: - import_txt_insert = cls.import_txt_insert(schema_code, from_table) - cls.import_set_infos(import_number, schema_code, "sql.insert", nb_insert) - cls.c_sql_exec_txt(import_txt_insert) - except Exception as e: - if isinstance(e, AttributeError): - raise e - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_INSERT", - msg=f"Erreur durant l'insert de {from_table} vers {schema_code} : {str(e)}", - ) - - @classmethod - def import_txt_insert(cls, schema_code, from_table, dest_table=None, keys=None): - sm = cls(schema_code) - - table_name = dest_table or sm.sql_schema_dot_table() - - columns_select = filter( - lambda x: ( - x in keys - if keys is not None - else not (sm.is_column(x) and sm.property(x).get("primary_key")) - ), - cls.get_table_columns(from_table), - ) - - v_column_select_keys = map(lambda x: x, columns_select) - - txt_columns_select_keys = ",\n ".join(v_column_select_keys) - - txt_where = f" WHERE {sm.pk_field_name()} IS NULL" if keys is None else "" - - return f""" -INSERT INTO {table_name} ( - {txt_columns_select_keys} -) -SELECT - {txt_columns_select_keys} -FROM {from_table}{txt_where}; -""" diff --git a/backend/gn_modulator/schema/imports/preprocess.py b/backend/gn_modulator/schema/imports/preprocess.py deleted file mode 100644 index d566079f..00000000 --- a/backend/gn_modulator/schema/imports/preprocess.py +++ /dev/null @@ -1,188 +0,0 @@ -import re - - -class SchemaPreProcessImports: - @classmethod - def import_raw(cls, import_number, schema_code, from_table, dest_table): - """ - creation de la vue d'import à partir de la table d'import - correction des null et association du bon typage - """ - import_txt_create_raw_view = cls.import_txt_create_raw_view( - import_number, schema_code, from_table, dest_table - ) - - cls.import_set_infos(import_number, schema_code, "sql.raw", import_txt_create_raw_view) - cls.c_sql_exec_txt(import_txt_create_raw_view) - - cls.count_and_check_table(import_number, schema_code, dest_table, "raw") - - @classmethod - def import_preprocess( - cls, import_number, schema_code, from_table, dest_table, pre_process_file_path - ): - """ - Application de la vue de mappage à la la table d'import - """ - - if pre_process_file_path is None: - return - - if not pre_process_file_path.exists(): - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_PRE_PROCESS_FILE_MISSING", - msg=f"Le fichier de preprocess {pre_process_file_path} n'existe pas", - ) - return - - with open(pre_process_file_path, "r") as f: - preprocess_select = f.read().upper().replace(";", "").replace("%", "%%") - - forbidden_words = [] - for forbidden_word in [ - "INSERT ", - "DROP ", - "DELETE ", - "UPDATE ", - "EXECUTE", - "TRUNCATE", - ]: - if forbidden_word in preprocess_select: - forbidden_words.append(forbidden_word.strip()) - - if forbidden_words: - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_PRE_PROCESS_FORBIDEN_WORD", - msg=f"Le fichier de preprocess {pre_process_file_path} contient le ou les mots interdits {', '.join(forbidden_word)}", - ) - return - - for word in ["WHERE", "ORDER BY", "LIMIT"]: - if word in preprocess_select: - preprocess_select = preprocess_select.replace( - f"{word}", "\nFROM {from_table}\n{word}" - ) - break - - if "FROM" not in preprocess_select: - preprocess_select += f"\nFROM {from_table}" - - txt_pre_process_raw_import_view = f""" -DROP VIEW IF EXISTS {dest_table}; -CREATE VIEW {dest_table} AS -{preprocess_select} -; - """ - cls.import_set_infos( - import_number, schema_code, "sql.preprocess", txt_pre_process_raw_import_view - ) - try: - cls.c_sql_exec_txt(txt_pre_process_raw_import_view) - except Exception as e: - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_PRE_PROCESS_CREATE_VIEW", - msg=f"La vue de preprocess n'a pas être crée : {str(e)}", - ) - return - cls.count_and_check_table(import_number, schema_code, dest_table, "preprocess") - - @classmethod - def import_txt_create_raw_view( - cls, - import_number, - schema_code, - from_table, - dest_table, - keys=None, - key_unnest=None, - limit=None, - ): - """ - - temporary_table : table ou sont stockées les données d'un csv - - raw_import_view : vue qui corrige les '' en NULL - Creation d'une vue d'import brute à partir d'une table accueillant des données d'un fichier csv - on passe les champs valant '' à NULL - """ - - sm = cls(schema_code) - - from_table_columns = cls.get_table_columns(from_table) - - columns = filter( - lambda x: ( - x in keys - if keys is not None - else not (sm.is_column(x) and sm.property(x).get("primary_key")) - ), - from_table_columns, - ) - - # on preprocess ttes les colonnes - v_txt_pre_process_columns = list( - map( - lambda x: cls(schema_code).pre_process_raw_import_columns( - x, key_unnest=key_unnest - ), - from_table_columns, - ) - ) - - v_txt_columns = list(map(lambda x: cls(schema_code).process_raw_import_column(x), columns)) - - txt_primary_column = ( - f"""CONCAT({", '|', ".join(sm.attr('meta.unique'))}) AS {sm.pk_field_name()}""" - ) - v_txt_columns.insert(0, txt_primary_column) - - txt_columns = ",\n ".join(v_txt_columns) - txt_pre_process_columns = ",\n ".join(v_txt_pre_process_columns) - txt_limit = f"LIMIT {limit}" if limit else "" - - return f"""DROP VIEW IF EXISTS {dest_table} CASCADE; -CREATE VIEW {dest_table} AS -WITH pre_process AS ( -SELECT - {txt_pre_process_columns} -FROM {from_table} -{txt_limit} -) -SELECT - {txt_columns} -FROM pre_process; -""" - - def pre_process_raw_import_columns(self, key, key_unnest=None): - """ """ - - if key == "id_import": - return key - - if key_unnest == key: - return f"UNNEST(STRING_TO_ARRAY({key}, ',')) AS {key}" - - if not self.has_property(key): - return f"{key}" - - property = self.property(key) - if property.get("foreign_key"): - return key - - if property["type"] == "number": - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::FLOAT END AS {key}" - - if property["type"] == "date": - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::DATE END AS {key}" - - if property["type"] == "datetime": - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::TIMESTAMP END AS {key}" - - if property["type"] == "integer" and "schema_code" not in property: - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::INTEGER END AS {key}" - - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key} END AS {key}" diff --git a/backend/gn_modulator/schema/imports/process.py b/backend/gn_modulator/schema/imports/process.py deleted file mode 100644 index 83fa3b4b..00000000 --- a/backend/gn_modulator/schema/imports/process.py +++ /dev/null @@ -1,202 +0,0 @@ -class SchemaProcessImports: - @classmethod - def import_process(cls, import_number, schema_code, from_table, dest_table, keys=None): - import_txt_processed_view = cls.import_txt_processed_view( - import_number, schema_code, from_table, dest_table, keys - ) - - cls.import_set_infos(import_number, schema_code, "sql.process", import_txt_processed_view) - - cls.c_sql_exec_txt(import_txt_processed_view) - - cls.count_and_check_table(import_number, schema_code, dest_table, "process") - - @classmethod - def import_txt_processed_view( - cls, import_number, schema_code, from_table, dest_table, keys=None - ): - """ - requete pour créer une vue qui résoud les clé - """ - - sm = cls(schema_code) - - v_columns = [] - v_joins = [] - - from_table_columns = cls.get_table_columns(from_table) - - columns = list( - filter( - lambda x: ( - x in keys - if keys is not None - else sm.is_column(x) and not sm.property(x).get("primary_key") - ), - from_table_columns, - ) - ) - - solved_keys = {} - - for index, key in enumerate(columns): - txt_column, v_join = sm.process_column_import_view(index, key) - if txt_column: - # TODO n-n ici ???? - if sm.has_property(key) and sm.property(key).get("relation_type") == "n-n": - rel = cls(sm.property(key)["schema_code"]) - v_columns.append(f"{txt_column.split('.')[0]}.{rel.pk_field_name()}") - else: - v_columns.append(f"{txt_column} AS {key}") - solved_keys[key] = txt_column - v_joins += v_join - - txt_pk_column, v_join = sm.resolve_key( - sm.pk_field_name(), alias_join_base="j_pk", solved_keys=solved_keys - ) - v_columns.append(txt_pk_column) - v_joins += v_join - - txt_columns = ",\n ".join(v_columns) - txt_joins = "\n".join(v_joins) - - return f"""DROP VIEW IF EXISTS {dest_table} CASCADE; -CREATE VIEW {dest_table} AS -SELECT - {txt_columns} -FROM {from_table} t -{txt_joins}; -""" - - def process_raw_import_column(self, key): - """ """ - if not self.has_property(key): - return f"{key}" - - property = self.property(key) - - # pour les nomenclature (on rajoute le type) - if nomenclature_type := property.get("nomenclature_type"): - return f"""CASE - WHEN {key} IS NOT NULL AND {key} NOT LIKE '%%|%%' THEN CONCAT('{nomenclature_type}|', {key}) - ELSE {key} - END AS {key}""" - - if property["type"] == "boolean": - return f"""CASE - WHEN {key}::text IN ('t', 'true') THEN TRUE - WHEN {key}::text IN ('f', 'false') THEN FALSE - ELSE NULL - END AS {key}""" - - if property["type"] == "geometry": - geometry_type = "ST_MULTI" if property["geometry_type"] == "multipolygon" else "" - return f"""{geometry_type}( - ST_SETSRID( - ST_FORCE2D( - ST_GEOMFROMEWKT({key}) - ), {self.property(key).get('srid')} - ) - ) - AS {key}""" - - return f"{key}" - - def resolve_key(self, key, index=None, alias_main="t", alias_join_base="j", solved_keys={}): - """ - compliqué - crée le txt pour - le champs de la colonne qui doit contenir la clé - la ou les jointures nécessaire pour résoudre la clé - """ - - alias_join = alias_join_base if index is None else f"{alias_join_base}_{index}" - - txt_column = f"{alias_join}.{self.pk_field_name()}" - - unique = self.attr("meta.unique") - v_join = [] - - # resolution des cles si besoins - - # couf pour permttre de faire les liens entre les join quand il y en a plusieurs - link_joins = {} - for index_unique, k_unique in enumerate(unique): - var_key = self.var_key(key, k_unique, index_unique, link_joins, alias_main) - if self.property(k_unique).get("foreign_key"): - if k_unique in solved_keys: - link_joins[k_unique] = solved_keys[k_unique] - else: - rel = self.cls(self.property(k_unique)["schema_code"]) - txt_column_join, v_join_inter = rel.resolve_key( - var_key, - index=index_unique, - alias_main=alias_join, - alias_join_base=alias_join, - ) - v_join += v_join_inter - - link_joins[k_unique] = f"{alias_join}_{index_unique}.{rel.pk_field_name()}" - - # creation des joins avec les conditions - v_join_on = [] - - for index_unique, k_unique in enumerate(unique): - var_key = self.var_key(key, k_unique, index_unique, link_joins, alias_main) - # !!!(SELECT (NULL = NULL) => NULL) - cast = "::TEXT" # if var_type != main_type else '' - txt_join_on = ( - f"{alias_join}.{k_unique}{cast} = {var_key}{cast}" - if not self.is_nullable(k_unique) or self.is_required(k_unique) - else f"({alias_join}.{k_unique}{cast} = {var_key}{cast} OR ({alias_join}.{k_unique} IS NULL AND {var_key} IS NULL))" - # else f"({var_key} IS NOT NULL) AND ({alias_join}.{k_unique} = {var_key})" - ) - v_join_on.append(txt_join_on) - - txt_join_on = "\n AND ".join(v_join_on) - txt_join = f"LEFT JOIN {self.sql_schema_dot_table()} {alias_join} ON\n {txt_join_on}" - - v_join.append(txt_join) - - return txt_column, v_join - - def var_key(self, key, k_unique, index_unique, link_joins, alias_main): - """ - TODO à clarifier - """ - - if key is None: - return f"{alias_main}.{k_unique}" - - if link_joins.get(k_unique): - return link_joins[k_unique] - - if "." in key: - return key - - if len(self.attr("meta.unique", [])) <= 1: - return f"{alias_main}.{key}" - - return f"SPLIT_PART({alias_main}.{key}, '|', { index_unique + 1})" - - def process_column_import_view(self, index, key): - """ - process column for processed view - """ - if not self.has_property(key): - return key, [] - - property = self.property(key) - - if property.get("foreign_key"): - rel = self.cls(property["schema_code"]) - return rel.resolve_key(key, index) - - if property.get("relation_type") == "n-n": - rel = self.cls(property["schema_code"]) - return rel.resolve_key(key, index) - - # txt_column, v_join = rel.resolve_key(key, index) - # return f"{txt_column.split('.')[0]}.{rel.pk_field_name()}", v_join - - return f"t.{key}", [] diff --git a/backend/gn_modulator/schema/imports/relation.py b/backend/gn_modulator/schema/imports/relation.py deleted file mode 100644 index aa79f20b..00000000 --- a/backend/gn_modulator/schema/imports/relation.py +++ /dev/null @@ -1,82 +0,0 @@ -class SchemaRelationImports: - @classmethod - def import_relations( - cls, import_number, schema_code, from_table, data_file_path, verbose=None - ): - sm = cls(schema_code) - - columns = cls.get_table_columns(from_table) - - for index, key in enumerate(columns): - if not sm.is_relationship(key): - continue - property = sm.property(key) - - # on commence par les n-n - if property.get("relation_type") in ("n-n"): - print(f" process relation n-n {key}") - cls.import_relation_n_n(import_number, schema_code, from_table, key, verbose) - - @classmethod - def import_relation_n_n(cls, import_number, schema_code, from_table, key, verbose=None): - sm = cls(schema_code) - - property = sm.property(key) - cor_table = property["schema_dot_table"] - rel = cls(property["schema_code"]) - - raw_delete_view = cls.import_table_name(import_number, schema_code, "raw_delete", key) - process_delete_view = cls.import_table_name( - import_number, schema_code, "process_delete", key - ) - raw_import_view = cls.import_table_name(import_number, schema_code, "raw", key) - process_import_view = cls.import_table_name(import_number, schema_code, "process", key) - - # 0) clean - - cls.c_sql_exec_txt(f"DROP VIEW IF EXISTS {process_delete_view}") - cls.c_sql_exec_txt(f"DROP VIEW IF EXISTS {raw_delete_view}") - cls.c_sql_exec_txt(f"DROP VIEW IF EXISTS {process_import_view}") - cls.c_sql_exec_txt(f"DROP VIEW IF EXISTS {raw_import_view}") - - # 1) create raw_temp_table for n-n - txt_raw_unnest_table = cls.import_txt_create_raw_view( - import_number, schema_code, from_table, raw_import_view, keys=[key], key_unnest=key - ) - cls.c_sql_exec_txt(txt_raw_unnest_table) - txt_process_table = cls.import_txt_processed_view( - import_number, schema_code, raw_import_view, process_import_view, keys=[key] - ) - - cls.c_sql_exec_txt(txt_process_table) - - # 3) insert / update / delete ?? - - # - delete : tout depuis import_table - # create_view for delete - txt_raw_delete_table = cls.import_txt_create_raw_view( - import_number, schema_code, from_table, raw_delete_view, keys=[] - ) - cls.c_sql_exec_txt(txt_raw_delete_table) - - txt_processed_delete_table = cls.import_txt_processed_view( - import_number, schema_code, raw_delete_view, process_delete_view, keys=[] - ) - cls.c_sql_exec_txt(txt_processed_delete_table) - - txt_delete = f""" -DELETE FROM {cor_table} t - USING {process_delete_view} j - WHERE t.{sm.pk_field_name()} = j.{sm.pk_field_name()}; - """ - - cls.c_sql_exec_txt(txt_delete) - - # - insert - txt_insert = cls.import_txt_insert( - schema_code, - process_import_view, - keys=[sm.pk_field_name(), rel.pk_field_name()], - dest_table=cor_table, - ) - cls.c_sql_exec_txt(txt_insert) diff --git a/backend/gn_modulator/schema/imports/update.py b/backend/gn_modulator/schema/imports/update.py deleted file mode 100644 index d210851f..00000000 --- a/backend/gn_modulator/schema/imports/update.py +++ /dev/null @@ -1,103 +0,0 @@ -class SchemaUpdateImports: - @classmethod - def import_update(cls, import_number, schema_code, from_table): - nb_update = cls.c_sql_exec_txt(cls.import_txt_nb_update(schema_code, from_table)).scalar() - - cls.import_set_infos(import_number, schema_code, "nb_update", nb_update) - - if nb_update == 0: - return - - try: - import_txt_update = cls.import_txt_update(schema_code, from_table) - cls.import_set_infos(import_number, schema_code, "sql.update", nb_update) - cls.c_sql_exec_txt(import_txt_update) - except Exception as e: - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_UPDATE", - msg=f"Erreur durant l'insert de {from_table} vers {schema_code} : {str(e)}", - ) - - @classmethod - def import_txt_update(cls, schema_code, processed_import_view): - sm = cls(schema_code) - - columns = cls.get_table_columns(processed_import_view) - - v_column_keys = map( - lambda x: x, - filter(lambda x: sm.has_property(x) and sm.is_column(x), columns), - ) - - v_set_keys = list( - map( - lambda x: f"{x}=a.{x}", - filter( - lambda x: sm.has_property(x) - and sm.is_column(x) - and not sm.property(x).get("primary_key"), - columns, - ), - ) - ) - - v_update_condition = list( - map( - lambda x: f"(t.{x}::TEXT IS DISTINCT FROM a.{x}::TEXT)", - filter( - lambda x: sm.has_property(x) - and sm.is_column(x) - and not sm.property(x).get("primary_key"), - columns, - ), - ) - ) - - txt_set_keys = ",\n ".join(v_set_keys) - txt_columns_keys = ",\n ".join(v_column_keys) - txt_update_conditions = "NOT (" + "\n AND ".join(v_update_condition) + ")" - - return f""" -UPDATE {sm.sql_schema_dot_table()} t SET - {txt_set_keys} -FROM ( - SELECT - {txt_columns_keys} - FROM {processed_import_view} -)a -WHERE a.{sm.pk_field_name()} = t.{sm.pk_field_name()} -AND {txt_update_conditions} -; -""" - - @classmethod - def import_txt_nb_update(cls, schema_code, processed_import_view): - sm = cls(schema_code) - - columns = cls.get_table_columns(processed_import_view) - - v_update_conditions = list( - map( - lambda x: f"(t.{x}::TEXT IS DISTINCT FROM a.{x}::TEXT)", - filter( - lambda x: sm.has_property(x) - and sm.is_column(x) - and not sm.property(x).get("primary_key"), - columns, - ), - ) - ) - - txt_update_conditions = "" + "\n OR ".join(v_update_conditions) + "" - - return f""" - SELECT - COUNT(*) - FROM {sm.sql_schema_dot_table()} t - JOIN {processed_import_view} a - ON a.{sm.pk_field_name()} = t.{sm.pk_field_name()} - WHERE {txt_update_conditions} -; -""" diff --git a/backend/gn_modulator/schema/imports/utils.py b/backend/gn_modulator/schema/imports/utils.py deleted file mode 100644 index 0471e0af..00000000 --- a/backend/gn_modulator/schema/imports/utils.py +++ /dev/null @@ -1,138 +0,0 @@ -from pathlib import Path -import math, random -from geonature.utils.env import db -from utils_flask_sqla.generic import GenericTable -from gn_modulator.utils.env import schema_import -from gn_modulator.utils.cache import set_global_cache, get_global_cache - - -class SchemaUtilsImports: - """ - methodes pour aider aux imports - """ - - @classmethod - def count_and_check_table(cls, import_number, schema_code, dest_table, table_type): - try: - nb_lines = cls.c_sql_exec_txt(f"SELECT COUNT(*) FROM {dest_table}").scalar() - cls.import_set_infos(import_number, schema_code, f"nb_{table_type}", nb_lines) - except Exception as e: - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_COUNT_VIEW", - msg=f"Erreur avec la table/vue '{table_type}' {dest_table}: {str(e)}", - ) - return - - if nb_lines == 0: - cls.import_add_error( - import_number, - schema_code, - code="ERR_IMPORT_COUNT_VIEW", - msg=f"Erreur avec la table/vue '{table_type}' {dest_table}: il n'y a n'a pas de données", - ) - - @classmethod - def generate_import_number(cls): - """ - genere un nombre aleatoire pour différer tous les imports - TODO utiliser un serial ? - """ - return math.floor(random.random() * 1e6) - - @classmethod - def import_pretty_infos(cls, import_number, schema_code): - """ - met en forme les resultats de l'import - """ - - import_infos = cls.import_get_infos(import_number, schema_code) - txt = "" - txt += f" - {schema_code}\n" - txt += f" raw : {import_infos['nb_raw']:10d}\n" - if import_infos.get("nb_raw") != import_infos["nb_process"]: - txt += f" processed : {import_infos['nb_process']:10d}\n" - if import_infos.get("nb_insert"): - txt += f" insert : {import_infos['nb_insert']:10d}\n" - if import_infos.get("nb_update"): - txt += f" update : {import_infos['nb_update']:10d}\n" - if import_infos.get("nb_unchanged"): - txt += f" unchanged : {import_infos['nb_unchanged']:10d}\n" - - return txt - - @classmethod - def import_clean_tables(cls, import_number, schema_code, keep_raw): - """ - Drop import tables - """ - tables = cls.import_get_infos(import_number, schema_code, "tables", required=True) - - if not keep_raw: - cls.c_sql_exec_txt(f"DROP TABLE IF EXISTS {tables['import']} CASCADE") - else: - cls.c_sql_exec_txt(f"DROP VIEW IF EXISTS {tables['process']} CASCADE") - cls.c_sql_exec_txt(f"DROP VIEW IF EXISTS {tables['preprocess']} CASCADE") - cls.c_sql_exec_txt(f"DROP VIEW IF EXISTS {tables['raw']} CASCADE") - - @classmethod - def import_table_name(cls, import_number, schema_code, type, key=None): - """ - table dans laquelle on importe le fichier csv - """ - - if type == "import": - return f"{schema_import}.t_{import_number}_{type}" - else: - rel = f"_{key}" if key is not None else "" - return f"{schema_import}.v_{import_number}_{type}_{schema_code.replace('.', '_')}{rel}" - - @classmethod - def import_init(cls, import_number, schema_code, data_file_path, pre_process_file_path): - """ - create schema if not exists - drop previous tables ?? - """ - cls.import_set_infos(import_number, schema_code, "data_file_path", data_file_path) - cls.import_set_infos( - import_number, schema_code, "pre_process_file_path", pre_process_file_path - ) - cls.import_set_infos(import_number, schema_code, "errors", []) - - for table_type in ["import", "raw", "preprocess", "process"]: - table_type2 = ( - "import" - if (table_type == "preprocess" and not pre_process_file_path) - else table_type - ) - cls.import_set_infos( - import_number, - schema_code, - f"tables.{table_type}", - cls.import_table_name(import_number, schema_code, table_type2), - ) - - cls.c_sql_exec_txt(f"CREATE SCHEMA IF NOT EXISTS {schema_import}") - - @classmethod - def import_get_infos(cls, import_number, schema_code, key=None, required=False): - cache_keys = ["import_info", import_number, schema_code] - if key is not None: - cache_keys += key.split(".") - res = get_global_cache(cache_keys) - if required and res is None: - raise cls.SchemaImportRequiredInfoNotFoundError( - f"Required import_info not found for {{import_number: {import_number}, schema_code: {schema_code}, key: {key}}}" - ) - return res - - @classmethod - def import_set_infos(cls, import_number, schema_code, key, value): - cache_keys = ["import_info", import_number, schema_code] + key.split(".") - set_global_cache(cache_keys, value) - - @classmethod - def import_add_error(cls, import_number, schema_code, code=None, msg=None): - errors = cls.import_get_infos(import_number, schema_code, "errors") - errors.append({"code": code, "msg": msg}) From 465ad303e421526b86ed59fd0cf0e0479665673e Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 9 Mar 2023 15:49:33 +0100 Subject: [PATCH 014/142] import add id_digitiser && test --- backend/gn_modulator/imports/mixins/process.py | 11 ++++++++++- backend/gn_modulator/imports/models.py | 9 ++++++++- .../versions/3920371728d8_gn_modulator_import_init.py | 7 +++++++ backend/gn_modulator/tests/test_import_api.py | 2 ++ 4 files changed, 27 insertions(+), 2 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/process.py b/backend/gn_modulator/imports/mixins/process.py index a0af32a0..2ca0583a 100644 --- a/backend/gn_modulator/imports/mixins/process.py +++ b/backend/gn_modulator/imports/mixins/process.py @@ -1,5 +1,5 @@ -from .utils import ImportMixinUtils from gn_modulator import SchemaMethods +from .utils import ImportMixinUtils class ImportMixinProcess(ImportMixinUtils): @@ -66,10 +66,19 @@ def sql_process_view(self, from_table, dest_table, keys=None): txt_columns = ",\n ".join(v_columns) txt_joins = "\n".join(v_joins) + # TODO rendre id_digitiser parametrable ? + txt_id_digitiser = "" + if self.id_digitiser: + for key in ["id_digitiser", "id_digitizer"]: + if SchemaMethods(self.schema_code).has_property(key): + txt_id_digitiser = f"{self.id_digitiser} AS {key}," + break + return f"""DROP VIEW IF EXISTS {dest_table} CASCADE; CREATE VIEW {dest_table} AS SELECT id_import, + {txt_id_digitiser} {txt_columns} FROM {from_table} t {txt_joins}; diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index cf7c2c33..b34ae7f9 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -1,6 +1,7 @@ # modeles d'import -from geonature.utils.env import db +from flask import g from sqlalchemy.dialects.postgresql import JSONB +from geonature.utils.env import db from .mixins import ImportMixin @@ -11,10 +12,14 @@ class TImport(db.Model, ImportMixin): def __init__( self, schema_code=None, data_file_path=None, mapping_file_path=None, _insert_data=False ): + self.id_digitiser = g.current_user.id_role if hasattr(g, "current_user") else None + self.schema_code = schema_code self.data_file_path = data_file_path and str(data_file_path) self.mapping_file_path = mapping_file_path and str(mapping_file_path) + self._insert = _insert_data + self.res = {} self.errors = [] self.sql = {} @@ -24,6 +29,7 @@ def __init__( id_import = db.Column(db.Integer, primary_key=True) + id_digitiser = db.Column(db.Integer, db.ForeignKey("utilisateurs.t_roles.id_role")) schema_code = db.Column(db.Unicode) data_file_path = db.Column(db.Unicode) @@ -40,6 +46,7 @@ def __init__( def as_dict(self): return { "id_import": self.id_import, + "id_digitiser": self.id_digitiser, "data_type": self.data_type, "csv_delimiter": self.csv_delimiter, "res": self.res, diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py index 9a920659..0b6de7df 100644 --- a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -21,6 +21,7 @@ def upgrade(): """ CREATE TABLE gn_modulator.t_imports( id_import SERIAL NOT NULL, + id_digitiser INTEGER, schema_code VARCHAR, data_file_path VARCHAR, mapping_file_path VARCHAR, @@ -34,6 +35,12 @@ def upgrade(): ALTER TABLE gn_modulator.t_imports ADD CONSTRAINT pk_gn_modulator_t_imports_id_import PRIMARY KEY (id_import); + +ALTER TABLE pr_sipaf.t_passages_faune + ADD CONSTRAINT fk_modulator_t_impt_t_role_id_digitiser FOREIGN KEY (id_digitiser) + REFERENCES utilisateurs.t_roles(id_role) + ON UPDATE CASCADE ON DELETE SET NULL; + """ ) pass diff --git a/backend/gn_modulator/tests/test_import_api.py b/backend/gn_modulator/tests/test_import_api.py index f382b920..d46f8c54 100644 --- a/backend/gn_modulator/tests/test_import_api.py +++ b/backend/gn_modulator/tests/test_import_api.py @@ -26,6 +26,8 @@ def test_import_synthese(self, users): assert r.json["res"]["nb_data"] == 2 assert r.json["res"]["nb_insert"] == 2 + assert r.json["id_digitiser"] == users["admin_user"].id_role + def test_import_synthese2(self, users): set_logged_user_cookie(self.client, users["admin_user"]) with open(import_test_dir / "synthese_1.csv", "rb") as f: From 6fc8041c8cb5deca4456e7e3ee887ba6b2bdc933 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 9 Mar 2023 15:56:27 +0100 Subject: [PATCH 015/142] --no-edit --- backend/gn_modulator/imports/mixins/check.py | 4 ++-- backend/gn_modulator/imports/mixins/insert.py | 2 +- backend/gn_modulator/imports/mixins/process.py | 2 +- backend/gn_modulator/imports/mixins/raw.py | 2 +- backend/gn_modulator/imports/mixins/relation.py | 2 +- backend/gn_modulator/imports/mixins/update.py | 4 ++-- backend/gn_modulator/imports/mixins/utils.py | 5 +++++ backend/gn_modulator/imports/models.py | 1 + backend/gn_modulator/schema/sql/base.py | 2 +- 9 files changed, 15 insertions(+), 9 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index 6375c06d..b70cf3f2 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -17,7 +17,7 @@ def check_required(self): raw_table = self.tables["raw"] sm = SchemaMethods(self.schema_code) - for key in SchemaMethods.get_table_columns(raw_table): + for key in self.get_table_columns(raw_table): if not sm.is_required(key): continue @@ -48,7 +48,7 @@ def check_resolve_keys(self): process_table = self.tables["process"] sm = SchemaMethods(self.schema_code) - for key in SchemaMethods.get_table_columns(raw_table): + for key in self.get_table_columns(raw_table): if not (sm.has_property(key) and sm.property(key).get("foreign_key")): continue diff --git a/backend/gn_modulator/imports/mixins/insert.py b/backend/gn_modulator/imports/mixins/insert.py index dc50ef6a..48f677b4 100644 --- a/backend/gn_modulator/imports/mixins/insert.py +++ b/backend/gn_modulator/imports/mixins/insert.py @@ -47,7 +47,7 @@ def sql_insert(self, from_table, dest_table=None, keys=None): if keys is not None else sm.is_column(x) and not (sm.property(x).get("primary_key")) ), - SchemaMethods.get_table_columns(from_table), + self.get_table_columns(from_table), ) ) diff --git a/backend/gn_modulator/imports/mixins/process.py b/backend/gn_modulator/imports/mixins/process.py index 2ca0583a..6c597372 100644 --- a/backend/gn_modulator/imports/mixins/process.py +++ b/backend/gn_modulator/imports/mixins/process.py @@ -30,7 +30,7 @@ def sql_process_view(self, from_table, dest_table, keys=None): v_columns = [] v_joins = [] - from_table_columns = SchemaMethods.get_table_columns(from_table) + from_table_columns = self.get_table_columns(from_table) columns = list( filter( diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index a8877182..bfd7bf52 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -40,7 +40,7 @@ def sql_raw_view( sm = SchemaMethods(self.schema_code) - from_table_columns = SchemaMethods.get_table_columns(from_table) + from_table_columns = self.get_table_columns(from_table) columns = filter( lambda x: ( diff --git a/backend/gn_modulator/imports/mixins/relation.py b/backend/gn_modulator/imports/mixins/relation.py index 0eeee907..73c36a97 100644 --- a/backend/gn_modulator/imports/mixins/relation.py +++ b/backend/gn_modulator/imports/mixins/relation.py @@ -10,7 +10,7 @@ def process_relations(self): from_table = self.tables.get("mapping") or self.tables["data"] sm = SchemaMethods(self.schema_code) - columns = SchemaMethods.get_table_columns(from_table) + columns = self.get_table_columns(from_table) for index, key in enumerate(columns): if not sm.is_relationship(key): diff --git a/backend/gn_modulator/imports/mixins/update.py b/backend/gn_modulator/imports/mixins/update.py index 3c52076c..e691ab82 100644 --- a/backend/gn_modulator/imports/mixins/update.py +++ b/backend/gn_modulator/imports/mixins/update.py @@ -35,7 +35,7 @@ def process_update(self): def sql_update(self, from_table): sm = SchemaMethods(self.schema_code) - columns = SchemaMethods.get_table_columns(from_table) + columns = self.get_table_columns(from_table) v_column_keys = map( lambda x: x, @@ -86,7 +86,7 @@ def sql_update(self, from_table): def sql_nb_update(self, from_table): sm = SchemaMethods(self.schema_code) - columns = SchemaMethods.get_table_columns(from_table) + columns = self.get_table_columns(from_table) v_update_conditions = list( map( diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py index 64857511..0eeaa466 100644 --- a/backend/gn_modulator/imports/mixins/utils.py +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -64,3 +64,8 @@ def add_error(self, code=None, msg=None, key=None, lines=None, values=None): self.errors.append( {"code": code, "msg": msg, "key": key, "lines": lines, "values": values} ) + + def get_table_columns(self, table_name): + if not self._columns.get(table_name): + self._columns[table_name] = SchemaMethods.get_table_columns(table_name) + return self._columns[table_name] diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index b34ae7f9..c2d87408 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -26,6 +26,7 @@ def __init__( self.tables = {} _insert_data = False + _columns = {} id_import = db.Column(db.Integer, primary_key=True) diff --git a/backend/gn_modulator/schema/sql/base.py b/backend/gn_modulator/schema/sql/base.py index 50c1202e..f7d273b3 100644 --- a/backend/gn_modulator/schema/sql/base.py +++ b/backend/gn_modulator/schema/sql/base.py @@ -58,7 +58,7 @@ def get_table_columns(cls, schema_dot_table): column_name FROM information_schema.columns c - WHERE + WHERE c.table_schema = '{table_schema}' AND c.table_name = '{table_name}' """ From 94ba5892a851ceefc8d9536e2909c0b6fb52a555 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Fri, 10 Mar 2023 12:25:22 +0100 Subject: [PATCH 016/142] fix id_digitiser_fk_constraint --- .../versions/3920371728d8_gn_modulator_import_init.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py index 0b6de7df..f0b01446 100644 --- a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -36,7 +36,7 @@ def upgrade(): ALTER TABLE gn_modulator.t_imports ADD CONSTRAINT pk_gn_modulator_t_imports_id_import PRIMARY KEY (id_import); -ALTER TABLE pr_sipaf.t_passages_faune +ALTER TABLE gn_modulator.t_imports ADD CONSTRAINT fk_modulator_t_impt_t_role_id_digitiser FOREIGN KEY (id_digitiser) REFERENCES utilisateurs.t_roles(id_role) ON UPDATE CASCADE ON DELETE SET NULL; From 4b56b5192ebef7a10c67d90af0c813c9600c4629 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Fri, 10 Mar 2023 17:31:23 +0100 Subject: [PATCH 017/142] doc technique init --- doc/technique/import.md | 45 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 doc/technique/import.md diff --git a/doc/technique/import.md b/doc/technique/import.md new file mode 100644 index 00000000..6f569db8 --- /dev/null +++ b/doc/technique/import.md @@ -0,0 +1,45 @@ +# Import + +# TODO + donner exemple pour chaque étapes + +## Les étapes + +### 1) Données () + +Chargement du fichier de données dans une table `gn_modulator_import.t_` + +### 2) Mapping (optionnel) + +Creation du vue de mapping pour réorganiser les colonnes de la table de données à partir d'une instruction select `SELECT` + +### 3) Vue brute (`raw`) + +En partant de vue de mapping si elle existe, ou de la table de donnée, on cherche à donner le bon type aux colonnes. + +### 4) Vue process + +En partant de la vue brute, on cherche à résoudre les clé étrangère +La vue process est prête à être intégrée telle quelle dans la table destinataire elle possède les bonnes id + + +### 5) Vérification des données + +On collecte des erreur si +- dans la table `raw`, une colonnes requise possède une valeur NULL. +- dans la table `process` il y a ube clé étrangère NULL aors qu'elle est non nulles dans la table `raw`. + +### 5) Insertion des données + +On insère dans la table destinataire les lignes de la table process pour lequelles la colonne corespondant à la clé primaire est nulle. + +### 6) Mise à jour des données (optionnel) + +On met à jour les ligne sde la table destinataire qui correspondent aux lignes de la table process +- pour lequelles la colonne corespondant à la clé primaire est nulle. +- et où une au moins des colonnes est différente de celle de la ligne destinataire + +### 7) Traitement des relations n-n + +- Effacement de toutes les lignes concernées +- Insert des lignes selon les données \ No newline at end of file From 63ffa6363e9d7028c4971175c2ff6b68acb43e5b Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 13 Mar 2023 12:08:48 +0100 Subject: [PATCH 018/142] NULLIF --- backend/gn_modulator/blueprint.py | 4 ---- backend/gn_modulator/imports/mixins/raw.py | 10 +++++----- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index 484b674d..599bd1a9 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -14,12 +14,8 @@ blueprint = Blueprint(MODULE_CODE.lower(), __name__) -<<<<<<< HEAD from gn_modulator.routes.rest import * # noqa from gn_modulator.routes.exports import * # noqa -======= -from gn_modulator.imports.routes import * # noqa ->>>>>>> add model TImport and methods # Creation des commandes pour modules blueprint.cli.short_help = "Commandes pour l' administration du module MODULES" diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index bfd7bf52..0e6c3049 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -110,15 +110,15 @@ def pre_process_raw_import_columns(self, key, key_unnest=None): return key if property["type"] == "number": - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::FLOAT END AS {key}" + return f"NULLIF({key}, '')::FLOAT END AS {key}" if property["type"] == "date": - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::DATE END AS {key}" + return f"NULLIF({key}, '')::DATE END AS {key}" if property["type"] == "datetime": - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::TIMESTAMP END AS {key}" + return f"NULLIF({key}, '')::TIMESTAMP END AS {key}" if property["type"] == "integer" and "schema_code" not in property: - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key}::INTEGER END AS {key}" + return f"NULLIF({key}, '')::INTEGER END AS {key}" - return f"CASE WHEN {key}::TEXT = '' THEN NULL ELSE {key} END AS {key}" + return f"NULLIF({key}, '') END AS {key}" From 0fed0379170d28ab3a023294f5631021178477e1 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 13 Mar 2023 16:43:10 +0100 Subject: [PATCH 019/142] test wrong value for type and test --- backend/gn_modulator/blueprint.py | 1 + .../gn_modulator/imports/mixins/__init__.py | 13 +++-- backend/gn_modulator/imports/mixins/check.py | 53 +++++++++++++++++-- backend/gn_modulator/imports/mixins/data.py | 9 +++- .../gn_modulator/imports/mixins/process.py | 37 ------------- backend/gn_modulator/imports/mixins/raw.py | 44 ++++++++++++--- backend/gn_modulator/imports/models.py | 2 +- .../3920371728d8_gn_modulator_import_init.py | 24 ++++++++- .../{imports/routes.py => routes/imports.py} | 8 ++- backend/gn_modulator/schema/auto.py | 1 - backend/gn_modulator/schema/sql/base.py | 5 ++ ...hese_ERR_IMPORT_INVALID_VALUE_FOR_TYPE.csv | 3 ++ backend/gn_modulator/tests/test_import.py | 16 ++++-- backend/gn_modulator/tests/utils/imports.py | 27 +++++----- .../utils/synthese/syn.synthese.schema.yml | 6 +-- doc/technique/import.md | 6 ++- 16 files changed, 175 insertions(+), 80 deletions(-) rename backend/gn_modulator/{imports/routes.py => routes/imports.py} (90%) create mode 100644 backend/gn_modulator/tests/import_test/synthese_ERR_IMPORT_INVALID_VALUE_FOR_TYPE.csv diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index 599bd1a9..2a55514d 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -16,6 +16,7 @@ from gn_modulator.routes.rest import * # noqa from gn_modulator.routes.exports import * # noqa +from gn_modulator.routes.imports import * # noqa # Creation des commandes pour modules blueprint.cli.short_help = "Commandes pour l' administration du module MODULES" diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index 5d3a5a46..70cb105a 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -40,6 +40,11 @@ def process_import_schema(self): return self db.session.flush() + self.process_check_types() + if self.errors: + return self + db.session.flush() + self.process_raw_view() if self.errors: return self @@ -50,7 +55,9 @@ def process_import_schema(self): return self db.session.flush() - self.process_check() + self.process_check_required() + self.process_check_resolve_keys() + if self.errors: return self db.session.flush() @@ -77,7 +84,7 @@ def process_import_schema(self): return self @classmethod - def process_import_code(cls, import_code, data_dir_path, commit=True): + def process_import_code(cls, import_code, data_dir_path, insert_data=True, commit=True): print(f"\nProcess scenario d'import {import_code}") # get import definition @@ -105,11 +112,11 @@ def process_import_code(cls, import_code, data_dir_path, commit=True): schema_code=import_definition["schema_code"], data_file_path=data_file_path, mapping_file_path=mapping_file_path, + _insert_data=False, ) # pour éviter d'avoir à recharger les données if import_definition.get("keep_raw") and len(imports): - last_import = imports[-1] impt.tables["data"] = imports[-1].tables["data"] db.session.add(impt) diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index b70cf3f2..156b0b60 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -5,11 +5,54 @@ class ImportMixinCheck(ImportMixinUtils): - def process_check(self): - self.check_required() - self.check_resolve_keys() + def process_check_types(self): + # avant raw + # verifie si les champs des colonnes correspondent bien aux types + # avec la fonction + # gn_modulator.check_type(TYPE_IN, val) - def check_required(self): + table_test = self.tables.get("mapping") or self.tables["data"] + sm = SchemaMethods(self.schema_code) + for key in filter( + lambda x: sm.is_column(x) and not sm.property(x).get("foreign_key"), + self.get_table_columns(table_test), + ): + sql_type = sm.sql_type(key)["type"] + if sql_type == "DATETIME": + sql_type = "TIMESTAMP" + sql_check_type_for_column = f""" + SELECT + COUNT(*), + ARRAY_AGG(id_import), + ARRAY_AGG({key}) + FROM {table_test} + WHERE NOT ( + {key} is NULL + OR + gn_modulator.check_value_for_type('{sql_type}', {key}) + ) + GROUP BY id_import + ORDER BY id_import + """ + res = SchemaMethods.c_sql_exec_txt(sql_check_type_for_column).fetchone() + if res is None: + continue + nb_lines = res[0] + lines = res[1] + values = res[2] + str_lines = lines and ", ".join(map(lambda x: str(x), lines)) or "" + if nb_lines == 0: + continue + self.add_error( + code="ERR_IMPORT_INVALID_VALUE_FOR_TYPE", + key=key, + lines=lines, + values=values, + msg=f"Il y a des valeurs invalides pour la colonne {key} de type {sql_type}. {nb_lines} ligne(s) concernée(s) : [{str_lines}]", + ) + + def process_check_required(self): + # apres raw # pour toutes les colonnes de raw # si une colonne est requise # et que la valeur dans raw est nulle @@ -43,7 +86,7 @@ def check_required(self): return - def check_resolve_keys(self): + def process_check_resolve_keys(self): raw_table = self.tables["raw"] process_table = self.tables["process"] sm = SchemaMethods(self.schema_code) diff --git a/backend/gn_modulator/imports/mixins/data.py b/backend/gn_modulator/imports/mixins/data.py index 9f3b46c9..6e94e2e8 100644 --- a/backend/gn_modulator/imports/mixins/data.py +++ b/backend/gn_modulator/imports/mixins/data.py @@ -48,7 +48,6 @@ def import_csv_file(self, dest_table): return import_table_columns = first_line.replace("\n", "").split(self.csv_delimiter) - # creation de la table temporaire self.sql["data_table"] = self.sql_create_data_table( self.tables["data"], import_table_columns @@ -71,6 +70,14 @@ def import_csv_file(self, dest_table): else: self.copy_csv_data(f, dest_table, import_table_columns) + set_columns_txt = ", ".join("NULLIF({key}, '') AS {key}") + + self.sql[ + "post_data" + ] = f""" + UPDATE {dest_table} SET {set_columns_txt}; + """ + def copy_csv_data(self, f, dest_table, table_columns): columns_fields = ", ".join(table_columns) self.sql[ diff --git a/backend/gn_modulator/imports/mixins/process.py b/backend/gn_modulator/imports/mixins/process.py index 6c597372..ca7dbacb 100644 --- a/backend/gn_modulator/imports/mixins/process.py +++ b/backend/gn_modulator/imports/mixins/process.py @@ -84,43 +84,6 @@ def sql_process_view(self, from_table, dest_table, keys=None): {txt_joins}; """ - def process_raw_import_column(self, key): - """ """ - - sm = SchemaMethods(self.schema_code) - - if not sm.has_property(key): - return f"{key}" - - property = sm.property(key) - - # pour les nomenclature (on rajoute le type) - if nomenclature_type := property.get("nomenclature_type"): - return f"""CASE - WHEN {key} IS NOT NULL AND {key} NOT LIKE '%%|%%' THEN CONCAT('{nomenclature_type}|', {key}) - ELSE {key} - END AS {key}""" - - if property["type"] == "boolean": - return f"""CASE - WHEN {key}::text IN ('t', 'true') THEN TRUE - WHEN {key}::text IN ('f', 'false') THEN FALSE - ELSE NULL - END AS {key}""" - - if property["type"] == "geometry": - geometry_type = "ST_MULTI" if property["geometry_type"] == "multipolygon" else "" - return f"""{geometry_type}( - ST_SETSRID( - ST_FORCE2D( - ST_GEOMFROMEWKT({key}) - ), {sm.property(key).get('srid')} - ) - ) - AS {key}""" - - return f"{key}" - def resolve_key( self, schema_code, key, index=None, alias_main="t", alias_join_base="j", solved_keys={} ): diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index 0e6c3049..6bad6d87 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -12,7 +12,6 @@ def process_raw_view(self): from_table = self.tables.get("mapping") or self.tables["data"] dest_table = self.tables["raw"] = self.table_name("raw") self.sql["raw_view"] = self.sql_raw_view(from_table, dest_table) - try: SchemaMethods.c_sql_exec_txt(self.sql["raw_view"]) except Exception as e: @@ -109,16 +108,49 @@ def pre_process_raw_import_columns(self, key, key_unnest=None): if property.get("foreign_key"): return key + if property["type"] == "geometry": + geometry_type = "ST_MULTI" if property["geometry_type"] == "multipolygon" else "" + return f"""{geometry_type}( + ST_SETSRID( + ST_FORCE2D( + ST_GEOMFROMEWKT({key}) + ), {sm.property(key).get('srid')} + ) + ) + AS {key}""" + if property["type"] == "number": - return f"NULLIF({key}, '')::FLOAT END AS {key}" + return f"({key})::FLOAT" + + if property["type"] == "boolean": + f"({key})::BOOLEAN" if property["type"] == "date": - return f"NULLIF({key}, '')::DATE END AS {key}" + return f"({key})::DATE" if property["type"] == "datetime": - return f"NULLIF({key}, '')::TIMESTAMP END AS {key}" + return f"({key})::TIMESTAMP" if property["type"] == "integer" and "schema_code" not in property: - return f"NULLIF({key}, '')::INTEGER END AS {key}" + return f"({key})::INTEGER" + + return f"{key}" + + def process_raw_import_column(self, key): + """ """ + + sm = SchemaMethods(self.schema_code) + + if not sm.has_property(key): + return f"{key}" + + property = sm.property(key) + + # pour les nomenclature (on rajoute le type) + if nomenclature_type := property.get("nomenclature_type"): + return f"""CASE + WHEN {key} IS NOT NULL AND {key} NOT LIKE '%%|%%' THEN CONCAT('{nomenclature_type}|', {key}) + ELSE {key} + END AS {key}""" - return f"NULLIF({key}, '') END AS {key}" + return f"{key}" diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index c2d87408..c9bd7eca 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -18,7 +18,7 @@ def __init__( self.data_file_path = data_file_path and str(data_file_path) self.mapping_file_path = mapping_file_path and str(mapping_file_path) - self._insert = _insert_data + self._insert_data = _insert_data self.res = {} self.errors = [] diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py index f0b01446..a1c27b60 100644 --- a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -19,6 +19,7 @@ def upgrade(): op.execute( """ + CREATE TABLE gn_modulator.t_imports( id_import SERIAL NOT NULL, id_digitiser INTEGER, @@ -30,7 +31,9 @@ def upgrade(): res JSONB, tables JSONB, errors JSONB, - sql JSONB + sql JSONB, + meta_create_date timestamp without time zone DEFAULT now(), + meta_update_date timestamp without time zone DEFAULT now() ); ALTER TABLE gn_modulator.t_imports @@ -41,6 +44,25 @@ def upgrade(): REFERENCES utilisateurs.t_roles(id_role) ON UPDATE CASCADE ON DELETE SET NULL; +CREATE TRIGGER tri_meta_dates_change_gnm_t_import + BEFORE INSERT OR UPDATE + ON gn_modulator.t_imports + FOR EACH ROW + EXECUTE PROCEDURE public.fct_trg_meta_dates_change(); + +CREATE OR REPLACE FUNCTION gn_modulator.check_value_for_type(type_in VARCHAR, value_in ANYELEMENT) +RETURNS BOOLEAN AS +$$ +BEGIN + EXECUTE FORMAT('SELECT (''%s'')::%s', value_in, type_in); + RETURN TRUE; +EXCEPTION WHEN OTHERS THEN + RETURN FALSE; +END; +$$ +LANGUAGE 'plpgsql' COST 100 +; + """ ) pass diff --git a/backend/gn_modulator/imports/routes.py b/backend/gn_modulator/routes/imports.py similarity index 90% rename from backend/gn_modulator/imports/routes.py rename to backend/gn_modulator/routes/imports.py index abb41052..124d2d7d 100644 --- a/backend/gn_modulator/imports/routes.py +++ b/backend/gn_modulator/routes/imports.py @@ -1,15 +1,13 @@ -from flask import request, jsonify +from flask import request from geonature.core.gn_permissions.decorators import check_cruved_scope from geonature.utils.env import db from gn_modulator.blueprint import blueprint -from gn_modulator.schema import SchemaMethods from gn_modulator.module import ModuleMethods - -from .utils.files import upload_import_file -from .models import TImport +from gn_modulator.imports.utils.files import upload_import_file +from gn_modulator.imports.models import TImport @check_cruved_scope("R") # object import ?? diff --git a/backend/gn_modulator/schema/auto.py b/backend/gn_modulator/schema/auto.py index e512ac37..81a640d2 100644 --- a/backend/gn_modulator/schema/auto.py +++ b/backend/gn_modulator/schema/auto.py @@ -153,7 +153,6 @@ def process_relation_auto(self, relation_key, relation): def process_column_auto(self, column, sql_schema_name, sql_table_name): type = str(column.type) - if "VARCHAR(" in type: type = "VARCHAR" diff --git a/backend/gn_modulator/schema/sql/base.py b/backend/gn_modulator/schema/sql/base.py index f7d273b3..3274d163 100644 --- a/backend/gn_modulator/schema/sql/base.py +++ b/backend/gn_modulator/schema/sql/base.py @@ -15,6 +15,11 @@ class SchemaSqlBase: + def sql_type(self, key): + if not self.is_column(key): + return None + return self.cls.c_get_type(self.property(key)["type"], "definition", "sql") + @classmethod def auto_sql_schemas_dot_tables(cls): auto_sql_schemas_dot_tables = [] diff --git a/backend/gn_modulator/tests/import_test/synthese_ERR_IMPORT_INVALID_VALUE_FOR_TYPE.csv b/backend/gn_modulator/tests/import_test/synthese_ERR_IMPORT_INVALID_VALUE_FOR_TYPE.csv new file mode 100644 index 00000000..162c32c0 --- /dev/null +++ b/backend/gn_modulator/tests/import_test/synthese_ERR_IMPORT_INVALID_VALUE_FOR_TYPE.csv @@ -0,0 +1,3 @@ +cd_nom,id_source,entity_source_pk_value,nom_cite,date_min,date_max,cor_observers +67111,Occtax,21,Ablette,"xxx","2017-01-08 23:00:00.000","admin" +67111,Occtax,22,Ablette,"2017-01-08 20:00:00.000","2017-01-08 23:00:00.000","admin" \ No newline at end of file diff --git a/backend/gn_modulator/tests/test_import.py b/backend/gn_modulator/tests/test_import.py index 5fe6187a..c040bc46 100644 --- a/backend/gn_modulator/tests/test_import.py +++ b/backend/gn_modulator/tests/test_import.py @@ -49,7 +49,7 @@ def test_ref_geo_linear(self): schema_code = "ref_geo.linear_group" data_file_path = import_test_dir / "route/route.csv" - pre_process_file_path = import_test_dir / "route/pp_linear_group.sql" + mapping_file_path = import_test_dir / "route/pp_linear_group.sql" expected_infos = { "res.nb_data": 1, "res.nb_insert": 1, @@ -57,12 +57,12 @@ def test_ref_geo_linear(self): "res.nb_unchanged": 0, } test_data_file( - schema_code, data_file_path, pre_process_file_path, expected_infos=expected_infos + schema_code, data_file_path, mapping_file_path, expected_infos=expected_infos ) schema_code = "ref_geo.linear" data_file_path = import_test_dir / "route/route.csv" - pre_process_file_path = import_test_dir / "route/pp_linear.sql" + mapping_file_path = import_test_dir / "route/pp_linear.sql" expected_infos = { "res.nb_data": 1, "res.nb_insert": 1, @@ -70,5 +70,13 @@ def test_ref_geo_linear(self): "res.nb_unchanged": 0, } test_data_file( - schema_code, data_file_path, pre_process_file_path, expected_infos=expected_infos + schema_code, data_file_path, mapping_file_path, expected_infos=expected_infos + ) + + def test_error_ERR_IMPORT_INVALID_VALUE_FOR_TYPE(self): + schema_code = "syn.synthese" + data_file_path = import_test_dir / "synthese_ERR_IMPORT_INVALID_VALUE_FOR_TYPE.csv" + expected_infos = {"errors": [{"code": "ERR_IMPORT_INVALID_VALUE_FOR_TYPE"}]} + test_data_file( + schema_code, data_file_path, mapping_file_path=None, expected_infos=expected_infos ) diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index d3ff9cac..e5669501 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -1,14 +1,13 @@ +import pytest from gn_modulator.imports.models import TImport from gn_modulator.utils.commons import getAttr from geonature.utils.env import db +@pytest.mark.skip() def test_data_file( schema_code=None, data_file_path=None, mapping_file_path=None, expected_infos={} ): - if not (schema_code and data_file_path): - return - with db.session.begin_nested(): # ici _insert_data est à true pour intégrer les avec un insert # et non un copy qui ne marche pas en test @@ -25,14 +24,18 @@ def test_data_file( import_infos = impt.as_dict() - errors = expected_infos.pop("errors", []) + expected_errors = expected_infos.pop("errors", []) - if len(errors) == 0: + if len(expected_errors) == 0: + # on teste si le nombre d'erreur est bien nul assert len(import_infos["errors"]) == 0, import_infos["errors"] else: - assert len(errors) == len(import_infos("error")) - for error in errors: - assert len([e for e in import_infos["errors"] if error["code"] == e["code"]]) > 0 + # on teste si on rencontre bien les erreurs attendues parmi les erreurs rencontrées + assert len(expected_errors) == len(import_infos["errors"]) + for expected_error in expected_errors: + assert ( + len([e for e in import_infos["errors"] if expected_error["code"] == e["code"]]) > 0 + ), f"L'erreur de code {expected_error['code']} n'a pas été trouvée" for key in expected_infos: txt_err = f"schema_code: {schema_code}, key: {key}, expected: {expected_infos.get(key)}, import: {getAttr(import_infos, key)}" @@ -41,11 +44,11 @@ def test_data_file( return import_infos +@pytest.mark.skip() def test_import_code(import_code=None, data_dir_path=None, expected_infos=[]): - if not (import_code and data_dir_path): - return - - imports = TImport.process_import_code(import_code, data_dir_path, commit=False) + imports = TImport.process_import_code( + import_code, data_dir_path, insert_data=True, commit=False + ) assert len(imports) > 0 for impt in imports: diff --git a/config/definitions/utils/synthese/syn.synthese.schema.yml b/config/definitions/utils/synthese/syn.synthese.schema.yml index b25b51d7..1d113921 100644 --- a/config/definitions/utils/synthese/syn.synthese.schema.yml +++ b/config/definitions/utils/synthese/syn.synthese.schema.yml @@ -15,6 +15,6 @@ meta: unique: - id_source - entity_source_pk_value - properties: - entity_source_pk_value: - type: string \ No newline at end of file +properties: + entity_source_pk_value: + type: string \ No newline at end of file diff --git a/doc/technique/import.md b/doc/technique/import.md index 6f569db8..3a002230 100644 --- a/doc/technique/import.md +++ b/doc/technique/import.md @@ -5,9 +5,13 @@ ## Les étapes -### 1) Données () +### 1) Données Chargement du fichier de données dans une table `gn_modulator_import.t_` + - Toutes les colonnes sont de type `VARCHAR` + - On passe tous les champs de valeur `''` à `NULL` + - La première ligne donne le nom des colonnes + - seulement pour les fichiers csv, à voir si l'on prévoit d'autres formats ### 2) Mapping (optionnel) From 460d28231158acae6668e156681c81e8c2f137f9 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 14 Mar 2023 14:32:56 +0100 Subject: [PATCH 020/142] doc and test exemple --- .github/workflows/pytest.yml | 6 ++ backend/gn_modulator/imports/mixins/data.py | 14 +-- .../gn_modulator/imports/mixins/mapping.py | 11 ++- backend/gn_modulator/imports/mixins/raw.py | 25 ++--- backend/gn_modulator/schema/auto.py | 3 + backend/gn_modulator/schema/sql/base.py | 25 +++-- .../tests/import_test/ref_geo.area.csv | 3 + backend/gn_modulator/tests/test_import.py | 16 ++++ backend/gn_modulator/tests/utils/imports.py | 2 +- doc/technique/import.md | 53 ++++++++-- .../import_exemple_avance_synthese.md | 3 + .../import_exemple_simple_ref_geo_area.md | 96 +++++++++++++++++++ frontend/app/services/map/base.ts | 1 + 13 files changed, 224 insertions(+), 34 deletions(-) create mode 100644 backend/gn_modulator/tests/import_test/ref_geo.area.csv create mode 100644 doc/technique/import_exemple_avance_synthese.md create mode 100644 doc/technique/import_exemple_simple_ref_geo_area.md diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 1a5ad2c8..e75fbe53 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -156,6 +156,12 @@ jobs: env: GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml + - name: install m_sipaf + run: geonature modulator install -f m_sipaf + env: + GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml + + - name: Pytest gn_modulator run: pytest -v --cov --cov-report xml env: diff --git a/backend/gn_modulator/imports/mixins/data.py b/backend/gn_modulator/imports/mixins/data.py index 6e94e2e8..f0d8a9cb 100644 --- a/backend/gn_modulator/imports/mixins/data.py +++ b/backend/gn_modulator/imports/mixins/data.py @@ -2,6 +2,7 @@ from .utils import ImportMixinUtils from gn_modulator.schema import SchemaMethods from geonature.utils.env import db +import csv class ImportMixinData(ImportMixinUtils): @@ -96,17 +97,18 @@ def copy_csv_data(self, f, dest_table, table_columns): def insert_csv_data(self, f, dest_table, table_columns): sql_columns_fields = ", ".join(table_columns) - values = "" - for line in f: - data = "', '".join((line.replace('"', "").replace("\n", "").split(self.csv_delimiter))) - values += f"('{data}')," + values = [] + csvreader = csv.reader(f, delimiter=self.csv_delimiter, quotechar='"') + for row in csvreader: + data = ",".join(map(lambda x: f"'{x}'", row)) + values.append(f"({data})") if not values: return - values = values[:-1] + values_txt = ",\n".join(values) self.sql[ "data_insert" - ] = f"INSERT INTO {dest_table} ({sql_columns_fields}) VALUES {values}" + ] = f"INSERT INTO {dest_table} ({sql_columns_fields}) VALUES {values_txt}" try: SchemaMethods.c_sql_exec_txt(self.sql["data_insert"]) except Exception as e: diff --git a/backend/gn_modulator/imports/mixins/mapping.py b/backend/gn_modulator/imports/mixins/mapping.py index 97b36ff1..753f352f 100644 --- a/backend/gn_modulator/imports/mixins/mapping.py +++ b/backend/gn_modulator/imports/mixins/mapping.py @@ -50,6 +50,9 @@ def sql_mapping(self): "UPDATE ", "EXECUTE ", "TRUNCATE ", + "ALTER ", + "CREATE ", + "GRANT ", ]: if forbidden_word in mapping_select: forbidden_words.append(forbidden_word.strip()) @@ -66,12 +69,18 @@ def sql_mapping(self): msg=f"La selection de mapping doit contenir 'FROM :table_data {mapping_select}", ) + if "SELECT" not in mapping_select: + self.add_error( + code="ERR_IMPORT_MAPPING_MISSING_SELECT", + msg=f"La selection de mapping doit contenir 'SELECT {mapping_select}", + ) + mapping_select = mapping_select.replace(":TABLE_DATA", from_table) if "ID_IMPORT" not in mapping_select: self.add_error( code="ERR_IMPORT_MAPPING_MISSING_IMPORT", - msg=f"La selection de mapping doit contenir le champs id_import dans {self.mapping_file_path}", + msg=f"La selection de mapping doit contenir le champs 'id_import' dans {self.mapping_file_path}", ) sql_mapping = f""" diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index 6bad6d87..a579d17c 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -14,6 +14,7 @@ def process_raw_view(self): self.sql["raw_view"] = self.sql_raw_view(from_table, dest_table) try: SchemaMethods.c_sql_exec_txt(self.sql["raw_view"]) + except Exception as e: self.add_error( code="ERR_IMPORT_CREATE_RAW_VIEW", @@ -60,14 +61,15 @@ def sql_raw_view( v_txt_columns = list(map(lambda x: self.process_raw_import_column(x), columns)) - txt_primary_column = ( - f"""CONCAT({", '|', ".join(sm.attr('meta.unique'))}) AS {sm.pk_field_name()}""" - ) + txt_primary_column = f"""CONCAT({", '|', ".join( + map( + lambda x: f"pp.{x}", + sm.attr('meta.unique')))}) AS {sm.pk_field_name()}""" v_txt_columns.insert(0, txt_primary_column) txt_columns = ",\n ".join(v_txt_columns) txt_pre_process_columns = ",\n ".join(v_txt_pre_process_columns) - txt_limit = f"LIMIT {limit}" if limit else "" + txt_limit = f"\nLIMIT {limit}" if limit else "" if "id_import" not in txt_pre_process_columns: txt_pre_process_columns = f"id_import, {txt_pre_process_columns}" @@ -80,12 +82,11 @@ def sql_raw_view( WITH pre_process AS ( SELECT {txt_pre_process_columns} -FROM {from_table} -{txt_limit} +FROM {from_table}{txt_limit} ) SELECT {txt_columns} -FROM pre_process; +FROM pre_process pp; """ def pre_process_raw_import_columns(self, key, key_unnest=None): @@ -113,7 +114,7 @@ def pre_process_raw_import_columns(self, key, key_unnest=None): return f"""{geometry_type}( ST_SETSRID( ST_FORCE2D( - ST_GEOMFROMEWKT({key}) + {key}::GEOMETRY ), {sm.property(key).get('srid')} ) ) @@ -142,15 +143,15 @@ def process_raw_import_column(self, key): sm = SchemaMethods(self.schema_code) if not sm.has_property(key): - return f"{key}" + return f"pp.{key}" property = sm.property(key) # pour les nomenclature (on rajoute le type) if nomenclature_type := property.get("nomenclature_type"): return f"""CASE - WHEN {key} IS NOT NULL AND {key} NOT LIKE '%%|%%' THEN CONCAT('{nomenclature_type}|', {key}) - ELSE {key} + WHEN pp.{key} IS NOT NULL AND pp.{key} NOT LIKE '%%|%%' THEN CONCAT('{nomenclature_type}|', {key}) + ELSE pp.{key} END AS {key}""" - return f"{key}" + return f"pp.{key}" diff --git a/backend/gn_modulator/schema/auto.py b/backend/gn_modulator/schema/auto.py index 81a640d2..b98d2d18 100644 --- a/backend/gn_modulator/schema/auto.py +++ b/backend/gn_modulator/schema/auto.py @@ -220,6 +220,9 @@ def process_column_auto(self, column, sql_schema_name, sql_table_name): and (column.key != "meta_create_date") ) + if column_info.get("geometry_type"): + property["geometry_type"] = column_info["geometry_type"].lower() + if condition_required: property["required"] = True diff --git a/backend/gn_modulator/schema/sql/base.py b/backend/gn_modulator/schema/sql/base.py index 3274d163..041b944b 100644 --- a/backend/gn_modulator/schema/sql/base.py +++ b/backend/gn_modulator/schema/sql/base.py @@ -83,13 +83,19 @@ def get_columns_info(cls): # on récupère les info des colonnes depuis information_schema.columns sql_txt_get_columns_info = f""" SELECT - c.table_schema, - c.table_name, - column_name, - column_default, - is_nullable + c.table_schema, + c.table_name, + column_name, + column_default, + is_nullable, + DATA_TYPE AS TYPE, + gc.TYPE AS geometry_type FROM - information_schema.columns c + information_schema.columns c +LEFT JOIN GEOMETRY_COLUMNS GC ON + c.TABLE_SCHEMA = GC.F_TABLE_SCHEMA + AND c.TABLE_NAME= GC.F_TABLE_NAME + AND c.COLUMN_NAME = gc.F_GEOMETRY_COLUMN WHERE CONCAT(c.table_schema, '.', c.table_name) IN ('{"', '".join(cls.auto_sql_schemas_dot_tables())}') """ @@ -104,7 +110,12 @@ def get_columns_info(cls): columns_info[schema_name] = columns_info.get(schema_name) or {} columns_info[schema_name][table_name] = columns_info[schema_name].get(table_name) or {} - column_info = {"default": r[3], "nullable": r[4] == "YES"} + column_info = { + "default": r[3], + "nullable": r[4] == "YES", + "type": r[5], + "geometry_type": r[6], + } columns_info[schema_name][table_name][column_name] = column_info # set_global_cache(["columns", schema_name, table_name, column_name], column_info) set_global_cache(["columns"], columns_info) diff --git a/backend/gn_modulator/tests/import_test/ref_geo.area.csv b/backend/gn_modulator/tests/import_test/ref_geo.area.csv new file mode 100644 index 00000000..3e3897aa --- /dev/null +++ b/backend/gn_modulator/tests/import_test/ref_geo.area.csv @@ -0,0 +1,3 @@ +id_type,area_name, area_code,geom +ZC,Parc National du Triangle,PNTRI,"POLYGON((6.48 48.87, 5.22 47.84, 6.87 47.96, 6.48 48.87))" +ZC,Parc National du Carré,PNCAR,"POLYGON((3.29 45.05, 5.49 44.91, 5.42 43.80, 3.12 44.11, 3.29 45.05))" diff --git a/backend/gn_modulator/tests/test_import.py b/backend/gn_modulator/tests/test_import.py index c040bc46..34281c41 100644 --- a/backend/gn_modulator/tests/test_import.py +++ b/backend/gn_modulator/tests/test_import.py @@ -73,6 +73,22 @@ def test_ref_geo_linear(self): schema_code, data_file_path, mapping_file_path, expected_infos=expected_infos ) + def test_ref_geo_area(self): + schema_code = "ref_geo.area" + data_file_path = import_test_dir / "ref_geo.area.csv" + expected_infos = { + "res.nb_data": 2, + "res.nb_insert": 2, + "res.nb_update": 0, + "res.nb_unchanged": 0, + } + impt = test_data_file(schema_code, data_file_path, expected_infos=expected_infos) + + # print(impt.sql["process_view"]) + # assert 1 == 0 + + # Test remontées d'erreurs + def test_error_ERR_IMPORT_INVALID_VALUE_FOR_TYPE(self): schema_code = "syn.synthese" data_file_path = import_test_dir / "synthese_ERR_IMPORT_INVALID_VALUE_FOR_TYPE.csv" diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index e5669501..e9e15960 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -41,7 +41,7 @@ def test_data_file( txt_err = f"schema_code: {schema_code}, key: {key}, expected: {expected_infos.get(key)}, import: {getAttr(import_infos, key)}" assert getAttr(import_infos, key) == expected_infos.get(key), txt_err - return import_infos + return impt @pytest.mark.skip() diff --git a/doc/technique/import.md b/doc/technique/import.md index 3a002230..607937d8 100644 --- a/doc/technique/import.md +++ b/doc/technique/import.md @@ -1,8 +1,33 @@ # Import -# TODO - donner exemple pour chaque étapes +## Introduction et principes + +Dans le cadre de ce module, nous avons implémenter des fonctionalité d'import à destination (théoriquement) de n'importe quelle table de la base. + +Les étapes de la procédure d'import sont illustrés par des exemple, où l'on détaille le code sql produit pour chaque étape. + +- [Exemle simple (ref_geo.l_areas)](./import_exemple_simple_ref_geo_area.md) +- [Exemle avancé (gn_synthese.synthese)](./import_exemple_avance) + +### L'unicité + +Une condition essentielle au bon déroulement de l'import est de pouvoir définir, pour toutes les tables concernées par l'import, un ou plusieurs champs qui cont permettre d'assurer la continuité d'une ligne. Cela peut être +- un code +- un uuid, lorsque celui ci est fourni dans les données et non généré par l'application +- une combinaison de type et de code (pour le ref_geo ou la nomenclature) + +Cela permet + +- de résoudre la clé primaire lorsque les champs d'unicité sont présent dans les données + +- de résoudre les clés étrangère, qui sont renseigner sous forme de code (ou de combinaison de valeurs s'il y a plusieurs champs d'unicité pour la tablé associée à la clé étrangère) + +Pour une ligne du fichier à importer on peux être dans deux cas + +- la clé primaire ne peux pas être résolue, il n'y a pas de ligne correspondante dans la table. Il s'agit d'une nouvelle données et on va faire un `INSERT` pour ces lignes + +- la clé primaire peut être résolue, il existe déjà une ligne correspondant à cette données. On a la possibilité de faire un `UPDATE` pour ces lignes ## Les étapes ### 1) Données @@ -13,30 +38,44 @@ Chargement du fichier de données dans une table `gn_modulator_import.t_ { map.coordinatesTxt = `${event.latlng.lng}, ${event.latlng.lat}`; navigator.clipboard.writeText(`${event.latlng.lng}, ${event.latlng.lat}`); + console.log(`${event.latlng.lng} ${event.latlng.lat}`); }); // init panes From 82931ccccf34c15b640065dc5f2a46f918293bdd Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 14 Mar 2023 14:53:35 +0100 Subject: [PATCH 021/142] check unique & test --- .../gn_modulator/imports/mixins/__init__.py | 7 +++--- backend/gn_modulator/imports/mixins/check.py | 22 ++++++++++++++++--- ...ref_geo.area_ERR_IMPORT_MISSING_UNIQUE.csv | 3 +++ backend/gn_modulator/tests/test_import.py | 8 +++++++ backend/gn_modulator/tests/utils/imports.py | 2 ++ doc/technique/import.md | 10 ++++++++- 6 files changed, 45 insertions(+), 7 deletions(-) create mode 100644 backend/gn_modulator/tests/import_test/ref_geo.area_ERR_IMPORT_MISSING_UNIQUE.csv diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index 70cb105a..97510a92 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -40,7 +40,8 @@ def process_import_schema(self): return self db.session.flush() - self.process_check_types() + self.check_uniques() + self.check_types() if self.errors: return self db.session.flush() @@ -55,8 +56,8 @@ def process_import_schema(self): return self db.session.flush() - self.process_check_required() - self.process_check_resolve_keys() + self.check_required() + self.check_resolve_keys() if self.errors: return self diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index 156b0b60..181e5405 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -5,7 +5,23 @@ class ImportMixinCheck(ImportMixinUtils): - def process_check_types(self): + def check_uniques(self): + # avant raw + # on verifie la présence des colonne d'unicité + + table_test = self.tables.get("mapping") or self.tables["data"] + columns = self.get_table_columns(table_test) + sm = SchemaMethods(self.schema_code) + unique = sm.attr("meta.unique") + + missing_unique = [key for key in unique if key not in columns] + if missing_unique: + self.add_error( + code="ERR_IMPORT_MISSING_UNIQUE", + msg=f"Il manque des champs d'unicité : {', '.join(missing_unique) }", + ) + + def check_types(self): # avant raw # verifie si les champs des colonnes correspondent bien aux types # avec la fonction @@ -51,7 +67,7 @@ def process_check_types(self): msg=f"Il y a des valeurs invalides pour la colonne {key} de type {sql_type}. {nb_lines} ligne(s) concernée(s) : [{str_lines}]", ) - def process_check_required(self): + def check_required(self): # apres raw # pour toutes les colonnes de raw # si une colonne est requise @@ -86,7 +102,7 @@ def process_check_required(self): return - def process_check_resolve_keys(self): + def check_resolve_keys(self): raw_table = self.tables["raw"] process_table = self.tables["process"] sm = SchemaMethods(self.schema_code) diff --git a/backend/gn_modulator/tests/import_test/ref_geo.area_ERR_IMPORT_MISSING_UNIQUE.csv b/backend/gn_modulator/tests/import_test/ref_geo.area_ERR_IMPORT_MISSING_UNIQUE.csv new file mode 100644 index 00000000..6b8a86c7 --- /dev/null +++ b/backend/gn_modulator/tests/import_test/ref_geo.area_ERR_IMPORT_MISSING_UNIQUE.csv @@ -0,0 +1,3 @@ +id_type,area_name, aera_code,geom +ZC,Parc National du Triangle,PNTRI,"POLYGON((6.48 48.87, 5.22 47.84, 6.87 47.96, 6.48 48.87))" +ZC,Parc National du Carré,PNCAR,"POLYGON((3.29 45.05, 5.49 44.91, 5.42 43.80, 3.12 44.11, 3.29 45.05))" diff --git a/backend/gn_modulator/tests/test_import.py b/backend/gn_modulator/tests/test_import.py index 34281c41..981ba813 100644 --- a/backend/gn_modulator/tests/test_import.py +++ b/backend/gn_modulator/tests/test_import.py @@ -96,3 +96,11 @@ def test_error_ERR_IMPORT_INVALID_VALUE_FOR_TYPE(self): test_data_file( schema_code, data_file_path, mapping_file_path=None, expected_infos=expected_infos ) + + def test_error_ERR_IMPORT_MISSING_UNIQUE(self): + schema_code = "ref_geo.area" + data_file_path = import_test_dir / "ref_geo.area_ERR_IMPORT_MISSING_UNIQUE.csv" + expected_infos = {"errors": [{"code": "ERR_IMPORT_MISSING_UNIQUE"}]} + test_data_file( + schema_code, data_file_path, mapping_file_path=None, expected_infos=expected_infos + ) diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index e9e15960..b06b0d81 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -31,6 +31,8 @@ def test_data_file( assert len(import_infos["errors"]) == 0, import_infos["errors"] else: # on teste si on rencontre bien les erreurs attendues parmi les erreurs rencontrées + print(expected_errors) + print(import_infos["errors"]) assert len(expected_errors) == len(import_infos["errors"]) for expected_error in expected_errors: assert ( diff --git a/doc/technique/import.md b/doc/technique/import.md index 607937d8..ad02683f 100644 --- a/doc/technique/import.md +++ b/doc/technique/import.md @@ -44,10 +44,18 @@ On rajoute un champs `id_import` (clé primaire, `SERIAL`) afin de pouvoir numé Creation du vue de mapping pour réorganiser les colonnes de la table de données à partir d'une instruction select `SELECT` -### 3) Vérification du typage des données +### 3) Vérification + +#### Verification du typage des données On verifie pour chaque colonnes (sauf clé étrangères) que les données des colonnes vont bien pouvoir être convertie dans le type de la colonne destinataire. +#### Verification de la présence des champs pour l'unicité + +Les champs d'unicité permette de résoudre la clé primaire et de voir si la ligne est présente en base. +On vérifie que ces clé sont bien présente dans les données à ce stade (pour la vue de mapping si elle existe ou table de données sinon). + + ### 4) Vue brute (`raw`) La table de départ est la de mapping si elle existe, ou de la table de donnée From e1ebeb4c3c7933dceb1a4e9eca0bf4644805797c Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 14 Mar 2023 15:21:33 +0100 Subject: [PATCH 022/142] fix sql_type --- backend/gn_modulator/imports/mixins/check.py | 5 ++--- backend/gn_modulator/imports/mixins/utils.py | 12 +++++++++++ .../3920371728d8_gn_modulator_import_init.py | 20 +++++++++---------- 3 files changed, 24 insertions(+), 13 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index 181e5405..6671564c 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -33,9 +33,8 @@ def check_types(self): lambda x: sm.is_column(x) and not sm.property(x).get("foreign_key"), self.get_table_columns(table_test), ): - sql_type = sm.sql_type(key)["type"] - if sql_type == "DATETIME": - sql_type = "TIMESTAMP" + sql_type = self.sql_type_dict[sm.property(key)["type"]] + sql_check_type_for_column = f""" SELECT COUNT(*), diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py index 0eeaa466..73317b85 100644 --- a/backend/gn_modulator/imports/mixins/utils.py +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -5,6 +5,18 @@ class ImportMixinUtils: + sql_type_dict = { + "integer": "INTEGER", + "boolean": "BOOLEAN", + "number": "FLOAT", + "string": "VARCHAR", + "date": "DATE", + "datetime": "TIMESTAMP", + "uuid": "UUID", + "geometry": "GEOMETRY", + "json": "JSONB", + } + def init_import(self): SchemaMethods.c_sql_exec_txt(f"CREATE SCHEMA IF NOT EXISTS {schema_import}") diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py index a1c27b60..7976d09f 100644 --- a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -51,16 +51,16 @@ def upgrade(): EXECUTE PROCEDURE public.fct_trg_meta_dates_change(); CREATE OR REPLACE FUNCTION gn_modulator.check_value_for_type(type_in VARCHAR, value_in ANYELEMENT) -RETURNS BOOLEAN AS -$$ -BEGIN - EXECUTE FORMAT('SELECT (''%s'')::%s', value_in, type_in); - RETURN TRUE; -EXCEPTION WHEN OTHERS THEN - RETURN FALSE; -END; -$$ -LANGUAGE 'plpgsql' COST 100 + RETURNS BOOLEAN AS + $$ + BEGIN + EXECUTE FORMAT('SELECT (''%s'')::%s', value_in, type_in); + RETURN TRUE; + EXCEPTION WHEN OTHERS THEN + RETURN FALSE; + END; + $$ + LANGUAGE 'plpgsql' COST 100 ; """ From e85201ca4d5bc45070f98c786e2ff20d94ba5b63 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 14 Mar 2023 17:00:57 +0100 Subject: [PATCH 023/142] composant layout-import --- backend/gn_modulator/definition/dynamic.py | 1 + .../gn_modulator/imports/mixins/__init__.py | 2 +- backend/gn_modulator/imports/mixins/data.py | 2 +- .../gn_modulator/imports/mixins/mapping.py | 1 + backend/gn_modulator/imports/models.py | 9 +- .../3920371728d8_gn_modulator_import_init.py | 1 + backend/gn_modulator/tests/utils/imports.py | 5 +- config/layouts/tests/test_import.layout.yml | 10 ++ config/layouts/utils/utils.import.layout.yml | 51 +++++++ .../layout/base/layout-import.component.html | 6 + .../layout/base/layout-import.component.scss | 0 .../layout/base/layout-import.component.ts | 41 +++++ .../layout/base/layout.component.html | 11 ++ .../layout/form/generic-form.component.ts | 3 - frontend/app/components/layout/index.ts | 3 +- .../test/test-layout.component.html | 1 + frontend/app/services/action.service.ts | 113 -------------- frontend/app/services/import.service.ts | 142 ++++++++++++++++++ frontend/app/services/index.ts | 2 + frontend/app/services/object.service.ts | 9 +- 20 files changed, 286 insertions(+), 127 deletions(-) create mode 100644 config/layouts/tests/test_import.layout.yml create mode 100644 config/layouts/utils/utils.import.layout.yml create mode 100644 frontend/app/components/layout/base/layout-import.component.html create mode 100644 frontend/app/components/layout/base/layout-import.component.scss create mode 100644 frontend/app/components/layout/base/layout-import.component.ts create mode 100644 frontend/app/services/import.service.ts diff --git a/backend/gn_modulator/definition/dynamic.py b/backend/gn_modulator/definition/dynamic.py index 0bc3ce12..f7bc50e1 100644 --- a/backend/gn_modulator/definition/dynamic.py +++ b/backend/gn_modulator/definition/dynamic.py @@ -71,6 +71,7 @@ def str_x_for_test(cls): label: () => {}, labels: () => {}, du_label: () => {}, + des_labels: () => {}, data_label: () => {}, tab_label: () => {}, title_details: () => {}, diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index 97510a92..68dbfcfd 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -113,7 +113,7 @@ def process_import_code(cls, import_code, data_dir_path, insert_data=True, commi schema_code=import_definition["schema_code"], data_file_path=data_file_path, mapping_file_path=mapping_file_path, - _insert_data=False, + options={"insert_data": False}, ) # pour éviter d'avoir à recharger les données diff --git a/backend/gn_modulator/imports/mixins/data.py b/backend/gn_modulator/imports/mixins/data.py index f0d8a9cb..f999bb77 100644 --- a/backend/gn_modulator/imports/mixins/data.py +++ b/backend/gn_modulator/imports/mixins/data.py @@ -66,7 +66,7 @@ def import_csv_file(self, dest_table): # pour faire marcher les tests pytest on passe par un insert # TODO faire marche copy_expert avec pytest # manière de récupérer cursor ? - if self._insert_data: + if self.options.get("insert_data"): self.insert_csv_data(f, dest_table, import_table_columns) else: self.copy_csv_data(f, dest_table, import_table_columns) diff --git a/backend/gn_modulator/imports/mixins/mapping.py b/backend/gn_modulator/imports/mixins/mapping.py index 753f352f..81bd36af 100644 --- a/backend/gn_modulator/imports/mixins/mapping.py +++ b/backend/gn_modulator/imports/mixins/mapping.py @@ -53,6 +53,7 @@ def sql_mapping(self): "ALTER ", "CREATE ", "GRANT ", + "COPY ", ]: if forbidden_word in mapping_select: forbidden_words.append(forbidden_word.strip()) diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index c9bd7eca..6c59f0a6 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -9,23 +9,20 @@ class TImport(db.Model, ImportMixin): __tablename__ = "t_imports" __table_args__ = {"schema": "gn_modulator"} - def __init__( - self, schema_code=None, data_file_path=None, mapping_file_path=None, _insert_data=False - ): + def __init__(self, schema_code=None, data_file_path=None, mapping_file_path=None, options={}): self.id_digitiser = g.current_user.id_role if hasattr(g, "current_user") else None self.schema_code = schema_code self.data_file_path = data_file_path and str(data_file_path) self.mapping_file_path = mapping_file_path and str(mapping_file_path) - self._insert_data = _insert_data + self.options = options self.res = {} self.errors = [] self.sql = {} self.tables = {} - _insert_data = False _columns = {} id_import = db.Column(db.Integer, primary_key=True) @@ -44,6 +41,8 @@ def __init__( sql = db.Column(JSONB) errors = db.Column(JSONB) + options = db.Column(JSONB) + def as_dict(self): return { "id_import": self.id_import, diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py index 7976d09f..55bd35f9 100644 --- a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -32,6 +32,7 @@ def upgrade(): tables JSONB, errors JSONB, sql JSONB, + options JSONB, meta_create_date timestamp without time zone DEFAULT now(), meta_update_date timestamp without time zone DEFAULT now() ); diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index b06b0d81..746b3b5a 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -9,13 +9,14 @@ def test_data_file( schema_code=None, data_file_path=None, mapping_file_path=None, expected_infos={} ): with db.session.begin_nested(): - # ici _insert_data est à true pour intégrer les avec un insert + # ici options={"insert_data": True} est à true pour intégrer les avec un insert + # et non un copy qui ne marche pas en test impt = TImport( schema_code=schema_code, data_file_path=data_file_path, mapping_file_path=mapping_file_path, - _insert_data=True, + options={"insert_data": True}, ) db.session.add(impt) assert impt.id_import is not None diff --git a/config/layouts/tests/test_import.layout.yml b/config/layouts/tests/test_import.layout.yml new file mode 100644 index 00000000..3ccf2f89 --- /dev/null +++ b/config/layouts/tests/test_import.layout.yml @@ -0,0 +1,10 @@ +type: layout +code: test_import +title: test import +description: test pour le composant d'import + +layout: + type: import + object_code: site + module_code: m_sipaf +data: \ No newline at end of file diff --git a/config/layouts/utils/utils.import.layout.yml b/config/layouts/utils/utils.import.layout.yml new file mode 100644 index 00000000..9cf85e62 --- /dev/null +++ b/config/layouts/utils/utils.import.layout.yml @@ -0,0 +1,51 @@ +type: layout +code: utils.import +title: layout du composant d'import +description: layout du composant d'import +layout: + title: __f__`Importer ${o.des_labels(x, 'C')}` + # hidden: __f__!(o.is_action_allowed(x)) + type: form + items: + - items: + - type: message + html: __f__data.importMsg?.html + class: __f__data.importMsg?.class + hidden: __f__!data.importMsg + - hidden: true + items: + - key: importMsg + - key: object_code + type: string + hidden: true + default: __f__context.object_code + - title: import + display: fieldset + items: + - key: data_file + type: file + title: Fichier d'import + required: __f__!data.tables?.data + description: Choisir un fichier à importer + - title: Options additionnelles + display: fieldset + type: dict + items: + - key: options.enable_update + title: "Autoriser les mises à jour" + type: boolean + default: __f__false + + - direction: row + items: + - type: button + color: primary + title: Valider + description: Valider + action: import + disabled: __f__!(formGroup.valid ) + - type: button + color: primary + title: Annuler + description: Annuler + action: close diff --git a/frontend/app/components/layout/base/layout-import.component.html b/frontend/app/components/layout/base/layout-import.component.html new file mode 100644 index 00000000..a663b841 --- /dev/null +++ b/frontend/app/components/layout/base/layout-import.component.html @@ -0,0 +1,6 @@ + diff --git a/frontend/app/components/layout/base/layout-import.component.scss b/frontend/app/components/layout/base/layout-import.component.scss new file mode 100644 index 00000000..e69de29b diff --git a/frontend/app/components/layout/base/layout-import.component.ts b/frontend/app/components/layout/base/layout-import.component.ts new file mode 100644 index 00000000..4ccf8130 --- /dev/null +++ b/frontend/app/components/layout/base/layout-import.component.ts @@ -0,0 +1,41 @@ +import { Component, OnInit, Injector, ViewEncapsulation } from '@angular/core'; +import { ModulesLayoutComponent } from './layout.component'; +import { ModulesImportService } from '../../../services/import.service'; + +import utils from '../../../utils'; +@Component({ + selector: 'modules-layout-import', + templateUrl: 'layout-import.component.html', + styleUrls: ['layout-import.component.scss', '../../base/base.scss'], + encapsulation: ViewEncapsulation.None, +}) +export class ModulesLayoutImportComponent extends ModulesLayoutComponent implements OnInit { + importData: {}; + importLayout: any; // layout pour l'import + importContext: any; + _mImport: ModulesImportService; + + constructor(_injector: Injector) { + super(_injector); + this._name = 'layout-import'; + this.bPostComputeLayout = true; + this._mImport = this._injector.get(ModulesImportService); + } + + postComputeLayout() { + this.importContext = { + module_code: this.context.module_code, + object_code: this.context.object_code, + }; + this.importLayout = { + code: 'utils.import', + }; + } + + processAction(event: any): void { + const { action, context, value = null, data = null, layout = null } = event; + if (action == 'import') { + return this._mImport.processImport(context, data); + } + } +} diff --git a/frontend/app/components/layout/base/layout.component.html b/frontend/app/components/layout/base/layout.component.html index 10d846d4..bbd97cc7 100644 --- a/frontend/app/components/layout/base/layout.component.html +++ b/frontend/app/components/layout/base/layout.component.html @@ -79,6 +79,17 @@ > + + + + + +
diff --git a/frontend/app/services/action.service.ts b/frontend/app/services/action.service.ts index b80a35ee..2f80aa58 100644 --- a/frontend/app/services/action.service.ts +++ b/frontend/app/services/action.service.ts @@ -130,117 +130,4 @@ export class ModulesActionService { } ); } - - processImport(context, data) { - data.importMsg = { - html: 'Traitement en cours', - class: null, - }; - this._mLayout.reComputeLayout(); - this._mData - .import(context.module_code, data) - .pipe() - .subscribe( - (importEvent) => { - if (importEvent.type === HttpEventType.UploadProgress) { - const uploadPerCentDone = Math.round((100 * importEvent.loaded) / importEvent.total); - } - if (importEvent instanceof HttpResponse) { - this._mLayout.stopActionProcessing(''); - const response = importEvent.body as any; - if (response.errors?.length) { - for (let error of response.errors) { - console.error(`${error.code} : ${error.msg}`); - } - data.importMsg = { - class: 'error', - html: this.importHTMLMsgError(response), - }; - return; - } - - let txtImport = this.importHTMLMsgSuccess(response); - - data.importMsg = { - class: 'success', - html: txtImport, - }; - - setTimeout(() => this._mLayout.reComputeLayout(), 100); - // this._commonService.regularToaster('success', txtImport); - } - }, - (error: HttpErrorResponse) => { - this._commonService.regularToaster('error', `Import : ${error.error.msg}`); - } - ); - } - - importHTMLMsgSuccess(impt) { - let txtImport = `
Import réussi
`; - let res = impt.res; - - if (res.nb_data) { - txtImport += `data: ${res.nb_data}
`; - } - - if (res.nb_raw != res.nb_data) { - txtImport += `raw: ${res.nb_raw}
`; - } - - if (res.nb_insert) { - txtImport += `insert: ${res.nb_insert}
`; - } - - if (res.nb_update) { - txtImport += `update: ${res.nb_update}
`; - } - - if (res.nb_unchanged) { - txtImport += `unchanged: ${res.nb_unchanged}
`; - } - return txtImport; - } - - importHTMLMsgError(impt) { - let txtImport = `

${impt.errors.length} erreurs

`; - - let txtErrorRequired; - for (let error of impt.errors.filter((e) => e.code == 'ERR_IMPORT_REQUIRED')) { - if (!txtErrorRequired) { - txtErrorRequired = `
Champs requis manquants
`; - } - txtErrorRequired += `${error.key} ${error.lines.length} ligne(s): [${error.lines}]
`; - } - if (txtErrorRequired) { - txtImport += '
'; - txtImport += txtErrorRequired; - } - - let txtErrorUnresolved; - for (let error of impt.errors.filter((e) => e.code == 'ERR_IMPORT_UNRESOLVED')) { - if (!txtErrorUnresolved) { - txtErrorUnresolved = `
Champs non résolus
`; - } - txtErrorUnresolved += `${error.key} ${error.lines.length} ligne(s): [${error.lines}]
`; - if (error.values) { - txtErrorUnresolved += `Valeurs parmi : ${error.values - .map((v) => v.cd_nomenclature) - .join(', ')}
`; - } - } - if (txtErrorUnresolved) { - txtImport += '
'; - txtImport += txtErrorUnresolved; - } - - for (let error of impt.errors.filter( - (e) => !['ERR_IMPORT_REQUIRED', 'ERR_IMPORT_UNRESOLVED'].includes(e.code) - )) { - txtImport += '
'; - txtImport += `${error.code}: ${error.msg}`; - } - - return txtImport; - } } diff --git a/frontend/app/services/import.service.ts b/frontend/app/services/import.service.ts new file mode 100644 index 00000000..a0aa8aed --- /dev/null +++ b/frontend/app/services/import.service.ts @@ -0,0 +1,142 @@ +import { Injectable, Injector } from '@angular/core'; +import { ModulesDataService } from './data.service'; +import { ModulesLayoutService } from './layout.service'; +import { ModulesObjectService } from './object.service'; +import { ModulesConfigService } from './config.service'; +import { ModulesRouteService } from './route.service'; +import { CommonService } from '@geonature_common/service/common.service'; +import { HttpEventType, HttpResponse, HttpErrorResponse } from '@angular/common/http'; +import { catchError, map, filter, switchMap } from 'rxjs/operators'; +import { of } from 'rxjs'; + +@Injectable() +export class ModulesImportService { + _mData: ModulesDataService; + _mLayout: ModulesLayoutService; + _mObject: ModulesObjectService; + _commonService: CommonService; + _mConfig: ModulesConfigService; + _mRoute: ModulesRouteService; + + constructor(private _injector: Injector) { + this._mData = this._injector.get(ModulesDataService); + this._mLayout = this._injector.get(ModulesLayoutService); + this._mObject = this._injector.get(ModulesObjectService); + this._commonService = this._injector.get(CommonService); + this._mConfig = this._injector.get(ModulesConfigService); + this._mRoute = this._injector.get(ModulesRouteService); + } + + processImport(context, data) { + data.importMsg = { + html: 'Traitement en cours', + class: null, + }; + this._mLayout.reComputeLayout(); + this._mData + .import(context.module_code, data) + .pipe() + .subscribe( + (importEvent) => { + if (importEvent.type === HttpEventType.UploadProgress) { + const uploadPerCentDone = Math.round((100 * importEvent.loaded) / importEvent.total); + } + if (importEvent instanceof HttpResponse) { + this._mLayout.stopActionProcessing(''); + const response = importEvent.body as any; + if (response.errors?.length) { + for (let error of response.errors) { + console.error(`${error.code} : ${error.msg}`); + } + data.importMsg = { + class: 'error', + html: this.importHTMLMsgError(response), + }; + return; + } + + let txtImport = this.importHTMLMsgSuccess(response); + + data.importMsg = { + class: 'success', + html: txtImport, + }; + + setTimeout(() => this._mLayout.reComputeLayout(), 100); + // this._commonService.regularToaster('success', txtImport); + } + }, + (error: HttpErrorResponse) => { + this._commonService.regularToaster('error', `Import : ${error.error.msg}`); + } + ); + } + + importHTMLMsgSuccess(impt) { + let txtImport = `
Import réussi
`; + let res = impt.res; + + if (res.nb_data) { + txtImport += `data: ${res.nb_data}
`; + } + + if (res.nb_raw != res.nb_data) { + txtImport += `raw: ${res.nb_raw}
`; + } + + if (res.nb_insert) { + txtImport += `insert: ${res.nb_insert}
`; + } + + if (res.nb_update) { + txtImport += `update: ${res.nb_update}
`; + } + + if (res.nb_unchanged) { + txtImport += `unchanged: ${res.nb_unchanged}
`; + } + return txtImport; + } + + importHTMLMsgError(impt) { + let txtImport = `

${impt.errors.length} erreurs

`; + + let txtErrorRequired; + for (let error of impt.errors.filter((e) => e.code == 'ERR_IMPORT_REQUIRED')) { + if (!txtErrorRequired) { + txtErrorRequired = `
Champs requis manquants
`; + } + txtErrorRequired += `${error.key} ${error.lines.length} ligne(s): [${error.lines}]
`; + } + if (txtErrorRequired) { + txtImport += '
'; + txtImport += txtErrorRequired; + } + + let txtErrorUnresolved; + for (let error of impt.errors.filter((e) => e.code == 'ERR_IMPORT_UNRESOLVED')) { + if (!txtErrorUnresolved) { + txtErrorUnresolved = `
Champs non résolus
`; + } + txtErrorUnresolved += `${error.key} ${error.lines.length} ligne(s): [${error.lines}]
`; + if (error.values) { + txtErrorUnresolved += `Valeurs parmi : ${error.values + .map((v) => v.cd_nomenclature) + .join(', ')}
`; + } + } + if (txtErrorUnresolved) { + txtImport += '
'; + txtImport += txtErrorUnresolved; + } + + for (let error of impt.errors.filter( + (e) => !['ERR_IMPORT_REQUIRED', 'ERR_IMPORT_UNRESOLVED'].includes(e.code) + )) { + txtImport += '
'; + txtImport += `${error.code}: ${error.msg}`; + } + + return txtImport; + } +} diff --git a/frontend/app/services/index.ts b/frontend/app/services/index.ts index 7df8d296..3d3ef926 100644 --- a/frontend/app/services/index.ts +++ b/frontend/app/services/index.ts @@ -5,6 +5,7 @@ import { ModulesRouteService } from './route.service'; import { ModulesDataService } from './data.service'; import { ModulesLayoutService } from './layout.service'; import { ModulesFormService } from './form.service'; +import { ModulesImportService } from './import.service'; import { ModulesRequestService } from './request.service'; import { ModulesMapService } from './map.service'; import { ModulesTableService } from './table.service'; @@ -18,6 +19,7 @@ export default [ ModulesContextService, ModulesDataService, ModulesLayoutService, + ModulesImportService, ModulesFormService, ModulesRequestService, ListFormService, diff --git a/frontend/app/services/object.service.ts b/frontend/app/services/object.service.ts index bc861626..fee2f3fb 100644 --- a/frontend/app/services/object.service.ts +++ b/frontend/app/services/object.service.ts @@ -42,8 +42,9 @@ export class ModulesObjectService { } const objectModuleConfig = this._mConfig.moduleConfig(moduleCode).objects[objectCode]; + if (!objectModuleConfig) { - // console.error(`L'object ${objectCode} du module ${moduleCode} n'est pas présent`); + console.error(`L'object ${objectCode} du module ${moduleCode} n'est pas présent`); return; } @@ -76,6 +77,7 @@ export class ModulesObjectService { } this._cacheObjectConfig[cacheKey] = utils.copy(objectConfig); + return objectConfig; } @@ -165,6 +167,10 @@ export class ModulesObjectService { return this.objectConfigContext(context)?.display.du_label; } + desLabels({ context }) { + return this.objectConfigContext(context)?.display.des_labels; + } + display({ context }) { return this.objectConfigContext(context).display; } @@ -375,6 +381,7 @@ export class ModulesObjectService { schema_code: this.schemaCode.bind(this), label: this.label.bind(this), du_label: this.duLabel.bind(this), + des_labels: this.desLabels.bind(this), data_label: this.dataLabel.bind(this), labels: this.labels.bind(this), tab_label: this.tabLabel.bind(this), From 366a2dfd95f37a0da2acfbaa48e70c6ac81df932 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 15 Mar 2023 13:29:42 +0100 Subject: [PATCH 024/142] import module_code object_code --- .../gn_modulator/imports/mixins/__init__.py | 17 +++-- backend/gn_modulator/imports/mixins/check.py | 8 +-- backend/gn_modulator/imports/mixins/count.py | 70 +++++++++++++++++++ backend/gn_modulator/imports/mixins/insert.py | 6 +- .../gn_modulator/imports/mixins/process.py | 8 +-- backend/gn_modulator/imports/mixins/raw.py | 6 +- .../gn_modulator/imports/mixins/relation.py | 4 +- backend/gn_modulator/imports/mixins/update.py | 33 +-------- backend/gn_modulator/imports/mixins/utils.py | 4 +- backend/gn_modulator/imports/models.py | 19 ++++- .../3920371728d8_gn_modulator_import_init.py | 12 ++-- backend/gn_modulator/routes/imports.py | 29 +++++--- backend/gn_modulator/tests/test_import.py | 58 ++++++++++----- backend/gn_modulator/tests/test_import_api.py | 12 ++-- backend/gn_modulator/tests/utils/imports.py | 7 +- config/layouts/utils/utils.import.layout.yml | 9 ++- config/modules/MODULATOR.module.yml | 4 ++ .../m_sipaf/imports/m_sipaf.pf_V1.import.yml | 9 ++- .../m_sipaf/imports/ref_geo.route.import.yml | 9 ++- .../contrib/m_sipaf/m_sipaf.module.yml | 4 ++ config/references/import.reference.yml | 7 +- doc/technique/import.md | 33 +++++---- .../import_exemple_simple_ref_geo_area.md | 9 ++- .../components/test/test-layout.component.ts | 15 +--- frontend/app/services/data.service.ts | 10 --- frontend/app/services/form.service.ts | 3 +- frontend/app/services/import.service.ts | 18 ++++- frontend/app/services/request.service.ts | 3 +- 28 files changed, 275 insertions(+), 151 deletions(-) create mode 100644 backend/gn_modulator/imports/mixins/count.py diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index 68dbfcfd..96b88cc0 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -3,6 +3,7 @@ from gn_modulator.definition import DefinitionMethods from .check import ImportMixinCheck +from .count import ImportMixinCount from .data import ImportMixinData from .insert import ImportMixinInsert from .mapping import ImportMixinMapping @@ -16,6 +17,7 @@ class ImportMixin( ImportMixinRelation, ImportMixinCheck, + ImportMixinCount, ImportMixinData, ImportMixinInsert, ImportMixinMapping, @@ -63,6 +65,14 @@ def process_import_schema(self): return self db.session.flush() + self.process_count() + if self.errors: + return self + db.session.flush() + + if self.options.get("check_only"): + return self + self.process_insert() if self.errors: return self @@ -78,10 +88,6 @@ def process_import_schema(self): return self db.session.flush() - self.res["nb_unchanged"] = ( - self.res["nb_process"] - self.res["nb_insert"] - self.res["nb_update"] - ) - return self @classmethod @@ -110,7 +116,8 @@ def process_import_code(cls, import_code, data_dir_path, insert_data=True, commi ) impt = cls( - schema_code=import_definition["schema_code"], + module_code=import_definition["module_code"], + object_code=import_definition["object_code"], data_file_path=data_file_path, mapping_file_path=mapping_file_path, options={"insert_data": False}, diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index 6671564c..3d631f92 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -11,7 +11,7 @@ def check_uniques(self): table_test = self.tables.get("mapping") or self.tables["data"] columns = self.get_table_columns(table_test) - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) unique = sm.attr("meta.unique") missing_unique = [key for key in unique if key not in columns] @@ -28,7 +28,7 @@ def check_types(self): # gn_modulator.check_type(TYPE_IN, val) table_test = self.tables.get("mapping") or self.tables["data"] - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) for key in filter( lambda x: sm.is_column(x) and not sm.property(x).get("foreign_key"), self.get_table_columns(table_test), @@ -73,7 +73,7 @@ def check_required(self): # et que la valeur dans raw est nulle # erreur raw_table = self.tables["raw"] - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) for key in self.get_table_columns(raw_table): if not sm.is_required(key): @@ -104,7 +104,7 @@ def check_required(self): def check_resolve_keys(self): raw_table = self.tables["raw"] process_table = self.tables["process"] - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) for key in self.get_table_columns(raw_table): if not (sm.has_property(key) and sm.property(key).get("foreign_key")): diff --git a/backend/gn_modulator/imports/mixins/count.py b/backend/gn_modulator/imports/mixins/count.py new file mode 100644 index 00000000..a9720a1d --- /dev/null +++ b/backend/gn_modulator/imports/mixins/count.py @@ -0,0 +1,70 @@ +from gn_modulator import SchemaMethods +from .utils import ImportMixinUtils + + +class ImportMixinCount(ImportMixinUtils): + def process_count(self): + self.count_insert() + self.count_update() + self.res["nb_unchanged"] = ( + self.res["nb_process"] - self.res["nb_insert"] - self.res["nb_update"] + ) + + def sql_nb_update(self, from_table): + sm = SchemaMethods(self.schema_code()) + + columns = self.get_table_columns(from_table) + + v_update_conditions = list( + map( + lambda x: f"(t.{x}::TEXT IS DISTINCT FROM a.{x}::TEXT)", + filter( + lambda x: sm.has_property(x) + and sm.is_column(x) + and not sm.property(x).get("primary_key"), + columns, + ), + ) + ) + + txt_update_conditions = "" + "\n OR ".join(v_update_conditions) + "" + + return f""" + SELECT + COUNT(*) + FROM {sm.sql_schema_dot_table()} t + JOIN {from_table} a + ON a.{sm.pk_field_name()} = t.{sm.pk_field_name()} + WHERE {txt_update_conditions} +; +""" + + def count_update(self): + from_table = self.tables["process"] + + self.sql["nb_update"] = self.sql_nb_update(from_table) + + try: + self.res["nb_update"] = SchemaMethods.c_sql_exec_txt(self.sql["nb_update"]).scalar() + except Exception as e: + self.add_error( + code="ERR_IMPORT_UPDATE_COUNT", + msg=f"Erreur lors du comptage du nombre d'update: {str(e)}", + ) + return + + def count_insert(self): + from_table = self.tables["process"] + sm = SchemaMethods(self.schema_code()) + self.sql[ + "nb_insert" + ] = f"SELECT COUNT(*) FROM {from_table} WHERE {sm.pk_field_name()} IS NULL" + + try: + self.res["nb_insert"] = SchemaMethods.c_sql_exec_txt(self.sql["nb_insert"]).scalar() + except Exception as e: + self.add_error( + code="ERR_IMPORT_INSERT_COUNT", + msg=f"Erreur lors du comptage du nombre d'insert: {str(e)}", + ) + return diff --git a/backend/gn_modulator/imports/mixins/insert.py b/backend/gn_modulator/imports/mixins/insert.py index 48f677b4..03c48d34 100644 --- a/backend/gn_modulator/imports/mixins/insert.py +++ b/backend/gn_modulator/imports/mixins/insert.py @@ -6,7 +6,7 @@ class ImportMixinInsert(ImportMixinUtils): def process_insert(self): from_table = self.tables["process"] - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) self.sql[ "nb_insert" ] = f"SELECT COUNT(*) FROM {from_table} WHERE {sm.pk_field_name()} IS NULL" @@ -32,11 +32,11 @@ def process_insert(self): raise e self.add_error( code="ERR_IMPORT_INSERT", - msg=f"Erreur durant l'insert de {from_table} vers {self.schema_code} : {str(e)}", + msg=f"Erreur durant l'insert de {from_table} vers {self.schema_code()} : {str(e)}", ) def sql_insert(self, from_table, dest_table=None, keys=None): - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) table_name = dest_table or sm.sql_schema_dot_table() diff --git a/backend/gn_modulator/imports/mixins/process.py b/backend/gn_modulator/imports/mixins/process.py index ca7dbacb..6069b222 100644 --- a/backend/gn_modulator/imports/mixins/process.py +++ b/backend/gn_modulator/imports/mixins/process.py @@ -25,7 +25,7 @@ def sql_process_view(self, from_table, dest_table, keys=None): requete pour créer une vue qui résoud les clé """ - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) v_columns = [] v_joins = [] @@ -58,7 +58,7 @@ def sql_process_view(self, from_table, dest_table, keys=None): v_joins += v_join txt_pk_column, v_join = self.resolve_key( - self.schema_code, sm.pk_field_name(), alias_join_base="j_pk", solved_keys=solved_keys + self.schema_code(), sm.pk_field_name(), alias_join_base="j_pk", solved_keys=solved_keys ) v_columns.append(txt_pk_column) v_joins += v_join @@ -70,7 +70,7 @@ def sql_process_view(self, from_table, dest_table, keys=None): txt_id_digitiser = "" if self.id_digitiser: for key in ["id_digitiser", "id_digitizer"]: - if SchemaMethods(self.schema_code).has_property(key): + if SchemaMethods(self.schema_code()).has_property(key): txt_id_digitiser = f"{self.id_digitiser} AS {key}," break @@ -175,7 +175,7 @@ def process_column_import_view(self, index, key): """ process column for processed view """ - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) if not sm.has_property(key): return key, [] diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index a579d17c..9374a65b 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -38,7 +38,7 @@ def sql_raw_view( on passe les champs valant '' à NULL """ - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) from_table_columns = self.get_table_columns(from_table) @@ -94,7 +94,7 @@ def pre_process_raw_import_columns(self, key, key_unnest=None): TODO gérer les null dans l'import csv (ou dans l'insert) """ - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) if key == "id_import": return key @@ -140,7 +140,7 @@ def pre_process_raw_import_columns(self, key, key_unnest=None): def process_raw_import_column(self, key): """ """ - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) if not sm.has_property(key): return f"pp.{key}" diff --git a/backend/gn_modulator/imports/mixins/relation.py b/backend/gn_modulator/imports/mixins/relation.py index 73c36a97..14390c4b 100644 --- a/backend/gn_modulator/imports/mixins/relation.py +++ b/backend/gn_modulator/imports/mixins/relation.py @@ -8,7 +8,7 @@ class ImportMixinRelation(ImportMixinInsert, ImportMixinProcess, ImportMixinRaw, ImportMixinUtils): def process_relations(self): from_table = self.tables.get("mapping") or self.tables["data"] - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) columns = self.get_table_columns(from_table) @@ -22,7 +22,7 @@ def process_relations(self): self.import_relation_n_n(from_table, key) def import_relation_n_n(self, from_table, key): - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) self.sql[key] = {} diff --git a/backend/gn_modulator/imports/mixins/update.py b/backend/gn_modulator/imports/mixins/update.py index e691ab82..a5c425ee 100644 --- a/backend/gn_modulator/imports/mixins/update.py +++ b/backend/gn_modulator/imports/mixins/update.py @@ -29,11 +29,11 @@ def process_update(self): raise e self.add_error( code="ERR_IMPORT_UPDATE", - msg=f"Erreur durant l'update de {from_table} vers {self.schema_code} : {str(e)}", + msg=f"Erreur durant l'update de {from_table} vers {self.schema_code()} : {str(e)}", ) def sql_update(self, from_table): - sm = SchemaMethods(self.schema_code) + sm = SchemaMethods(self.schema_code()) columns = self.get_table_columns(from_table) @@ -81,33 +81,4 @@ def sql_update(self, from_table): WHERE a.{sm.pk_field_name()} = t.{sm.pk_field_name()} AND {txt_update_conditions} ; -""" - - def sql_nb_update(self, from_table): - sm = SchemaMethods(self.schema_code) - - columns = self.get_table_columns(from_table) - - v_update_conditions = list( - map( - lambda x: f"(t.{x}::TEXT IS DISTINCT FROM a.{x}::TEXT)", - filter( - lambda x: sm.has_property(x) - and sm.is_column(x) - and not sm.property(x).get("primary_key"), - columns, - ), - ) - ) - - txt_update_conditions = "" + "\n OR ".join(v_update_conditions) + "" - - return f""" - SELECT - COUNT(*) - FROM {sm.sql_schema_dot_table()} t - JOIN {from_table} a - ON a.{sm.pk_field_name()} = t.{sm.pk_field_name()} - WHERE {txt_update_conditions} -; """ diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py index 73317b85..bc199445 100644 --- a/backend/gn_modulator/imports/mixins/utils.py +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -25,7 +25,7 @@ def pretty_infos(self): if self.res.get("nb_data") is not None: txt += f"\n-- import csv file {Path(self.data_file_path).name}" txt += f" {self.res.get('nb_data')} lignes\n\n" - txt += f" - {self.schema_code}\n" + txt += f" - {self.schema_code()}\n" if self.res.get("nb_raw") != self.res.get("nb_process"): txt += f" raw : {self.res.get('nb_raw'):10d}\n" if self.res.get("nb_process"): @@ -70,7 +70,7 @@ def table_name(self, type, key=None): return f"{schema_import}.t_{self.id_import}_{type}" else: rel = f"_{key}" if key is not None else "" - return f"{schema_import}.v_{self.id_import}_{type}_{self.schema_code.replace('.', '_')}{rel}" + return f"{schema_import}.v_{self.id_import}_{type}_{self.schema_code().replace('.', '_')}{rel}" def add_error(self, code=None, msg=None, key=None, lines=None, values=None): self.errors.append( diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index 6c59f0a6..5981d693 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -3,16 +3,20 @@ from sqlalchemy.dialects.postgresql import JSONB from geonature.utils.env import db from .mixins import ImportMixin +from gn_modulator import ModuleMethods class TImport(db.Model, ImportMixin): __tablename__ = "t_imports" __table_args__ = {"schema": "gn_modulator"} - def __init__(self, schema_code=None, data_file_path=None, mapping_file_path=None, options={}): + def __init__( + self, module_code, object_code, data_file_path=None, mapping_file_path=None, options={} + ): self.id_digitiser = g.current_user.id_role if hasattr(g, "current_user") else None - self.schema_code = schema_code + self.module_code = module_code + self.object_code = object_code self.data_file_path = data_file_path and str(data_file_path) self.mapping_file_path = mapping_file_path and str(mapping_file_path) @@ -28,7 +32,13 @@ def __init__(self, schema_code=None, data_file_path=None, mapping_file_path=None id_import = db.Column(db.Integer, primary_key=True) id_digitiser = db.Column(db.Integer, db.ForeignKey("utilisateurs.t_roles.id_role")) - schema_code = db.Column(db.Unicode) + module_code = db.Column(db.Unicode) + object_code = db.Column(db.Unicode) + + def schema_code(self): + return ModuleMethods.schema_code(self.module_code, self.object_code) + + status = db.Column(db.Unicode) data_file_path = db.Column(db.Unicode) mapping_file_path = db.Column(db.Unicode) @@ -51,4 +61,7 @@ def as_dict(self): "csv_delimiter": self.csv_delimiter, "res": self.res, "errors": self.errors, + "options": self.options, + "tables": self.tables, + "status": self.status, } diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py index 55bd35f9..aa387481 100644 --- a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -22,12 +22,14 @@ def upgrade(): CREATE TABLE gn_modulator.t_imports( id_import SERIAL NOT NULL, - id_digitiser INTEGER, - schema_code VARCHAR, - data_file_path VARCHAR, - mapping_file_path VARCHAR, - csv_delimiter VARCHAR, + id_digitiser INTEGER, -- qui a fait l'import + module_code VARCHAR NOT NULL, -- + object_code VARCHAR NOT NULL, -- + data_file_path VARCHAR, -- stocker dans un blob ?? + mapping_file_path VARCHAR, -- varchar ou table mapping + csv_delimiter VARCHAR, -- data_type VARCHAR, + status VARCHAR, res JSONB, tables JSONB, errors JSONB, diff --git a/backend/gn_modulator/routes/imports.py b/backend/gn_modulator/routes/imports.py index 124d2d7d..e1fdcd37 100644 --- a/backend/gn_modulator/routes/imports.py +++ b/backend/gn_modulator/routes/imports.py @@ -1,5 +1,7 @@ -from flask import request +import json +from flask import request, jsonify +from sqlalchemy import orm from geonature.core.gn_permissions.decorators import check_cruved_scope from geonature.utils.env import db @@ -10,12 +12,13 @@ from gn_modulator.imports.models import TImport -@check_cruved_scope("R") # object import ?? -@blueprint.route("import/", methods=["POST"]) -def api_import(module_code): - object_code = None - if request.form: - object_code = request.form.get("object_code") +@check_cruved_scope("C") # object import ?? +@blueprint.route("import///", methods=["POST"]) +@blueprint.route( + "import///", methods=["POST"], defaults={"id_import": None} +) +def api_import(module_code, object_code, id_import): + options = json.loads(request.form.get("options")) if request.form.get("options") else {} schema_code = ModuleMethods.schema_code(module_code, object_code) @@ -29,9 +32,15 @@ def api_import(module_code): ] } - impt = TImport(schema_code=schema_code) - db.session.add(impt) - db.session.flush() + if id_import: + try: + impt = TImport.query().filter_by(id_import=id_import).one() + except orm.exc.NoResultFound: + return f"Pas d'import trouvé pour id_import={id_import}", 404 + else: + impt = TImport(module_code, object_code, options=options) + db.session.add(impt) + db.session.flush() files_path = {} if request.files: diff --git a/backend/gn_modulator/tests/test_import.py b/backend/gn_modulator/tests/test_import.py index 981ba813..adbd297f 100644 --- a/backend/gn_modulator/tests/test_import.py +++ b/backend/gn_modulator/tests/test_import.py @@ -10,7 +10,8 @@ def test_synthese(self): premier test ajout d'une ligne dans la synthese """ - schema_code = "syn.synthese" + module_code = "MODULATOR" + object_code = "syn.synthese" data_file_path = import_test_dir / "synthese_1.csv" expected_infos = { "res.nb_data": 2, @@ -20,14 +21,15 @@ def test_synthese(self): "data_type": "csv", "csv_delimiter": ",", } - test_data_file(schema_code, data_file_path, expected_infos=expected_infos) + test_data_file(module_code, object_code, data_file_path, expected_infos=expected_infos) def test_synthese2(self): """ pour être sur que le premier import n'est pas persistant """ - schema_code = "syn.synthese" + module_code = "MODULATOR" + object_code = "syn.synthese" data_file_path = import_test_dir / "synthese_1.csv" expected_infos = { "res.nb_data": 2, @@ -35,19 +37,21 @@ def test_synthese2(self): "res.nb_update": 0, "res.nb_unchanged": 0, } - test_data_file(schema_code, data_file_path, expected_infos=expected_infos) + test_data_file(module_code, object_code, data_file_path, expected_infos=expected_infos) def test_ref_geo_linear(self): """ test import_route """ - schema_code = "ref_geo.linear_type" + module_code = "MODULATOR" + object_code = "ref_geo.linear_type" data_file_path = import_test_dir / "route/linear_type.csv" expected_infos = {"res.nb_data": 1} - test_data_file(schema_code, data_file_path, expected_infos=expected_infos) + test_data_file(module_code, object_code, data_file_path, expected_infos=expected_infos) - schema_code = "ref_geo.linear_group" + module_code = "MODULATOR" + object_code = "ref_geo.linear_group" data_file_path = import_test_dir / "route/route.csv" mapping_file_path = import_test_dir / "route/pp_linear_group.sql" expected_infos = { @@ -57,10 +61,15 @@ def test_ref_geo_linear(self): "res.nb_unchanged": 0, } test_data_file( - schema_code, data_file_path, mapping_file_path, expected_infos=expected_infos + module_code, + object_code, + data_file_path, + mapping_file_path, + expected_infos=expected_infos, ) - schema_code = "ref_geo.linear" + module_code = "MODULATOR" + object_code = "ref_geo.linear" data_file_path = import_test_dir / "route/route.csv" mapping_file_path = import_test_dir / "route/pp_linear.sql" expected_infos = { @@ -70,11 +79,16 @@ def test_ref_geo_linear(self): "res.nb_unchanged": 0, } test_data_file( - schema_code, data_file_path, mapping_file_path, expected_infos=expected_infos + module_code, + object_code, + data_file_path, + mapping_file_path, + expected_infos=expected_infos, ) def test_ref_geo_area(self): - schema_code = "ref_geo.area" + module_code = "MODULATOR" + object_code = "ref_geo.area" data_file_path = import_test_dir / "ref_geo.area.csv" expected_infos = { "res.nb_data": 2, @@ -82,7 +96,9 @@ def test_ref_geo_area(self): "res.nb_update": 0, "res.nb_unchanged": 0, } - impt = test_data_file(schema_code, data_file_path, expected_infos=expected_infos) + impt = test_data_file( + module_code, object_code, data_file_path, expected_infos=expected_infos + ) # print(impt.sql["process_view"]) # assert 1 == 0 @@ -90,17 +106,27 @@ def test_ref_geo_area(self): # Test remontées d'erreurs def test_error_ERR_IMPORT_INVALID_VALUE_FOR_TYPE(self): - schema_code = "syn.synthese" + module_code = "MODULATOR" + object_code = "syn.synthese" data_file_path = import_test_dir / "synthese_ERR_IMPORT_INVALID_VALUE_FOR_TYPE.csv" expected_infos = {"errors": [{"code": "ERR_IMPORT_INVALID_VALUE_FOR_TYPE"}]} test_data_file( - schema_code, data_file_path, mapping_file_path=None, expected_infos=expected_infos + module_code, + object_code, + data_file_path, + mapping_file_path=None, + expected_infos=expected_infos, ) def test_error_ERR_IMPORT_MISSING_UNIQUE(self): - schema_code = "ref_geo.area" + module_code = "MODULATOR" + object_code = "ref_geo.area" data_file_path = import_test_dir / "ref_geo.area_ERR_IMPORT_MISSING_UNIQUE.csv" expected_infos = {"errors": [{"code": "ERR_IMPORT_MISSING_UNIQUE"}]} test_data_file( - schema_code, data_file_path, mapping_file_path=None, expected_infos=expected_infos + module_code, + object_code, + data_file_path, + mapping_file_path=None, + expected_infos=expected_infos, ) diff --git a/backend/gn_modulator/tests/test_import_api.py b/backend/gn_modulator/tests/test_import_api.py index d46f8c54..edcd7629 100644 --- a/backend/gn_modulator/tests/test_import_api.py +++ b/backend/gn_modulator/tests/test_import_api.py @@ -12,9 +12,11 @@ class TestImportApi: def test_import_synthese(self, users): set_logged_user_cookie(self.client, users["admin_user"]) with open(import_test_dir / "synthese_1.csv", "rb") as f: - data = {"data_file": (f, "synthese.csv"), "object_code": "syn.synthese"} + data = {"data_file": (f, "synthese.csv")} r = self.client.post( - url_for("modulator.api_import", module_code="MODULATOR"), + url_for( + "modulator.api_import", module_code="MODULATOR", object_code="syn.synthese" + ), data=data, headers=Headers({"Content-Type": "multipart/form-data"}), ) @@ -31,9 +33,11 @@ def test_import_synthese(self, users): def test_import_synthese2(self, users): set_logged_user_cookie(self.client, users["admin_user"]) with open(import_test_dir / "synthese_1.csv", "rb") as f: - data = {"data_file": (f, "synthese.csv"), "object_code": "syn.synthese"} + data = {"data_file": (f, "synthese.csv")} r = self.client.post( - url_for("modulator.api_import", module_code="MODULATOR"), + url_for( + "modulator.api_import", module_code="MODULATOR", object_code="syn.synthese" + ), data=data, headers=Headers({"Content-Type": "multipart/form-data"}), ) diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index 746b3b5a..45b3a2ad 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -6,14 +6,15 @@ @pytest.mark.skip() def test_data_file( - schema_code=None, data_file_path=None, mapping_file_path=None, expected_infos={} + module_code, object_code, data_file_path=None, mapping_file_path=None, expected_infos={} ): with db.session.begin_nested(): # ici options={"insert_data": True} est à true pour intégrer les avec un insert # et non un copy qui ne marche pas en test impt = TImport( - schema_code=schema_code, + module_code, + object_code, data_file_path=data_file_path, mapping_file_path=mapping_file_path, options={"insert_data": True}, @@ -41,7 +42,7 @@ def test_data_file( ), f"L'erreur de code {expected_error['code']} n'a pas été trouvée" for key in expected_infos: - txt_err = f"schema_code: {schema_code}, key: {key}, expected: {expected_infos.get(key)}, import: {getAttr(import_infos, key)}" + txt_err = f"module_code: {module_code}, object_code: {object_code}, key: {key}, expected: {expected_infos.get(key)}, import: {getAttr(import_infos, key)}" assert getAttr(import_infos, key) == expected_infos.get(key), txt_err return impt diff --git a/config/layouts/utils/utils.import.layout.yml b/config/layouts/utils/utils.import.layout.yml index 9cf85e62..f9b23b30 100644 --- a/config/layouts/utils/utils.import.layout.yml +++ b/config/layouts/utils/utils.import.layout.yml @@ -4,7 +4,6 @@ title: layout du composant d'import description: layout du composant d'import layout: title: __f__`Importer ${o.des_labels(x, 'C')}` - # hidden: __f__!(o.is_action_allowed(x)) type: form items: - items: @@ -35,6 +34,14 @@ layout: title: "Autoriser les mises à jour" type: boolean default: __f__false + - key: options.check_only + title: Verifier + description: | + L'import se fait en deux temps. + Une premiere étape de validation des données. + Et une deuxième étape d'insertion et de mise à jour des données. + type: boolean + default: __f__true - direction: row items: diff --git a/config/modules/MODULATOR.module.yml b/config/modules/MODULATOR.module.yml index 8d59ed4e..34856b06 100644 --- a/config/modules/MODULATOR.module.yml +++ b/config/modules/MODULATOR.module.yml @@ -12,6 +12,10 @@ objects: cruved: R ref_geo.linear_group: cruved: R + ref_geo.linear_type: + cruved: R + ref_geo.linear: + cruved: R user.role: cruved: R user.organisme: diff --git a/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_V1.import.yml b/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_V1.import.yml index 02d37324..3be4b6a0 100644 --- a/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_V1.import.yml +++ b/config/modules/contrib/m_sipaf/imports/m_sipaf.pf_V1.import.yml @@ -3,14 +3,17 @@ code: m_sipaf.pf_V1 title: Données d'exemple m_sipaf description: import données d'exemple de passage à faune pour SIPAF items: - - schema_code: user.organisme + - object_code: user.organisme + module_code: MODULATOR data: pf_V1.csv mapping: scripts/ppi_organism_V1.sql - - schema_code: m_sipaf.pf + - object_code: site + module_code: m_sipaf data: pf_V1.csv mapping: scripts/ppi_pf_V1.sql keep_raw: true - - schema_code: m_sipaf.actor + - object_code: actor + module_code: m_sipaf data: pf_V1.csv mapping: scripts/ppi_actor_V1.sql keep_raw: true diff --git a/config/modules/contrib/m_sipaf/imports/ref_geo.route.import.yml b/config/modules/contrib/m_sipaf/imports/ref_geo.route.import.yml index 9f98942a..a84ed43b 100644 --- a/config/modules/contrib/m_sipaf/imports/ref_geo.route.import.yml +++ b/config/modules/contrib/m_sipaf/imports/ref_geo.route.import.yml @@ -3,12 +3,15 @@ code: ref_geo.route title: import ref_geo.route description: scenario d'import de données pour le ref_geo lineaire (route, autoroute) items: - - schema_code: ref_geo.linear_type + - module_code: MODULATOR + object_code: ref_geo.linear_type data: linear_type.csv - - schema_code: ref_geo.linear_group + - module_code: MODULATOR + object_code: ref_geo.linear_group data: TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv mapping: scripts/ppi_groupe_route_na.sql - - schema_code: ref_geo.linear + - module_code: MODULATOR + object_code: ref_geo.linear ref_geo.linear: ref_geo.linear data: TRONCON_DE_ROUTE_NATIONALE_AUTOROUTE.csv mapping: scripts/ppi_troncon_route_na.sql diff --git a/config/modules/contrib/m_sipaf/m_sipaf.module.yml b/config/modules/contrib/m_sipaf/m_sipaf.module.yml index ae394bb7..5a78d743 100644 --- a/config/modules/contrib/m_sipaf/m_sipaf.module.yml +++ b/config/modules/contrib/m_sipaf/m_sipaf.module.yml @@ -19,6 +19,10 @@ objects: site: schema_code: m_sipaf.pf cruved: CRUDE + actor: + schema_code: m_sipaf.actor + cruved: R + tree: pf: diff --git a/config/references/import.reference.yml b/config/references/import.reference.yml index bda7c882..95d2dec3 100644 --- a/config/references/import.reference.yml +++ b/config/references/import.reference.yml @@ -23,11 +23,14 @@ properties: type: array items: required: - - schema_code + - module_code + - object_code - data type: object properties: - schema_code: + module_code: + type: string + object_code: type: string data: type: string diff --git a/doc/technique/import.md b/doc/technique/import.md index ad02683f..febbf9b7 100644 --- a/doc/technique/import.md +++ b/doc/technique/import.md @@ -30,7 +30,8 @@ Pour une ligne du fichier à importer on peux être dans deux cas - la clé primaire peut être résolue, il existe déjà une ligne correspondant à cette données. On a la possibilité de faire un `UPDATE` pour ces lignes ## Les étapes -### 1) Données +### 1) Chargement, pré-traitement et verification des données +#### 1.1) Chargement des données Chargement du fichier de données dans une table `gn_modulator_import.t_` - Toutes les colonnes sont de type `VARCHAR` @@ -39,24 +40,24 @@ Chargement du fichier de données dans une table `gn_modulator_import.t_ { - formGroup.patchValue({ - oups: !formGroup.value.oups, - layout_definition: '', - layout_from_list: { code: data.layout_from_list.code }, - }); - }`, - }, { flex: '0', type: 'boolean', @@ -116,7 +103,7 @@ export class TestLayoutComponent implements OnInit { title_field_name: 'description', return_object: true, oup: '__f__data.oups', - // reload_on_search: true, + reload_on_search: true, default_item: this.layoutCode && { code: this.layoutCode }, }, { diff --git a/frontend/app/services/data.service.ts b/frontend/app/services/data.service.ts index 36d4e84e..80cb94dd 100644 --- a/frontend/app/services/data.service.ts +++ b/frontend/app/services/data.service.ts @@ -89,16 +89,6 @@ export class ModulesDataService { }); } - import(moduleCode, data, params = {}) { - return this._mRequest.postRequestWithFormData( - `${this._mConfig.backendModuleUrl()}/import/${moduleCode}`, - { - data, - params, - } - ); - } - getBreadcrumbs(context: any) { return this._mRequest.request( 'get', diff --git a/frontend/app/services/form.service.ts b/frontend/app/services/form.service.ts index ca786086..b15ea434 100644 --- a/frontend/app/services/form.service.ts +++ b/frontend/app/services/form.service.ts @@ -329,7 +329,8 @@ export class ModulesFormService { : utils.isObject(formValue) ? !utils.isObject(data) ? false - : Object.entries(formValue) + : // Object.keys(formValue).length == Object.keys(data).length && + Object.entries(formValue) .filter(([k, v]) => k != 'pendingRequest') .every(([k, v]) => this.isEqual(v, data[k])) : Array.isArray(formValue) diff --git a/frontend/app/services/import.service.ts b/frontend/app/services/import.service.ts index a0aa8aed..3eedd371 100644 --- a/frontend/app/services/import.service.ts +++ b/frontend/app/services/import.service.ts @@ -4,6 +4,7 @@ import { ModulesLayoutService } from './layout.service'; import { ModulesObjectService } from './object.service'; import { ModulesConfigService } from './config.service'; import { ModulesRouteService } from './route.service'; +import { ModulesRequestService } from './request.service'; import { CommonService } from '@geonature_common/service/common.service'; import { HttpEventType, HttpResponse, HttpErrorResponse } from '@angular/common/http'; import { catchError, map, filter, switchMap } from 'rxjs/operators'; @@ -17,6 +18,7 @@ export class ModulesImportService { _commonService: CommonService; _mConfig: ModulesConfigService; _mRoute: ModulesRouteService; + _mRequest: ModulesRequestService; constructor(private _injector: Injector) { this._mData = this._injector.get(ModulesDataService); @@ -25,6 +27,19 @@ export class ModulesImportService { this._commonService = this._injector.get(CommonService); this._mConfig = this._injector.get(ModulesConfigService); this._mRoute = this._injector.get(ModulesRouteService); + this._mRequest = this._injector.get(ModulesRequestService); + } + + importRequest(moduleCode, object_code, data, params = {}) { + return this._mRequest.postRequestWithFormData( + `${this._mConfig.backendModuleUrl()}/import/${moduleCode}/${object_code}/${ + data.id_import || '' + }`, + { + data, + params, + } + ); } processImport(context, data) { @@ -33,8 +48,7 @@ export class ModulesImportService { class: null, }; this._mLayout.reComputeLayout(); - this._mData - .import(context.module_code, data) + this.importRequest(context.module_code, context.object_code, data) .pipe() .subscribe( (importEvent) => { diff --git a/frontend/app/services/request.service.ts b/frontend/app/services/request.service.ts index e494c559..059caba5 100644 --- a/frontend/app/services/request.service.ts +++ b/frontend/app/services/request.service.ts @@ -29,7 +29,8 @@ export class ModulesRequestService { for (const [key, value] of Object.entries(data).filter( ([key, value]) => !utils.isFile(value) )) { - formData.append(key, value as any); + const processedValue = utils.isObject(value) ? JSON.stringify(value) : value; + formData.append(key, processedValue as any); } for (const [key, value] of Object.entries(data).filter(([key, value]) => utils.isFile(value))) { formData.append(key, value as any); From 2d96ea39295fbd3e6e756b718e8f227d55d04a49 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 16 Mar 2023 10:14:19 +0100 Subject: [PATCH 025/142] import status --- backend/gn_modulator/commands.py | 11 ++- .../gn_modulator/imports/mixins/__init__.py | 99 ++++++++----------- backend/gn_modulator/imports/mixins/check.py | 16 ++- backend/gn_modulator/imports/mixins/count.py | 4 +- backend/gn_modulator/imports/mixins/insert.py | 6 +- .../gn_modulator/imports/mixins/process.py | 9 +- backend/gn_modulator/imports/mixins/raw.py | 6 +- .../gn_modulator/imports/mixins/relation.py | 4 +- backend/gn_modulator/imports/mixins/update.py | 4 +- backend/gn_modulator/imports/mixins/utils.py | 16 ++- backend/gn_modulator/imports/models.py | 19 +++- .../3920371728d8_gn_modulator_import_init.py | 5 +- backend/gn_modulator/routes/imports.py | 37 ++----- backend/gn_modulator/tests/utils/imports.py | 4 +- config/layouts/utils/utils.import.layout.yml | 16 ++- frontend/app/components/base/base.scss | 6 ++ .../layout/base/layout-import.component.ts | 50 +++++++++- frontend/app/services/import.service.ts | 66 +------------ 18 files changed, 189 insertions(+), 189 deletions(-) diff --git a/backend/gn_modulator/commands.py b/backend/gn_modulator/commands.py index d1576752..333b941a 100644 --- a/backend/gn_modulator/commands.py +++ b/backend/gn_modulator/commands.py @@ -143,7 +143,12 @@ def cmd_doc_schema(schema_code, force=False): ) @with_appcontext def cmd_import_bulk_data( - schema_code=None, import_code=None, data_path=None, mapping_file_path=None, verbose=None + module_code=None, + object_code=None, + import_code=None, + data_path=None, + mapping_file_path=None, + verbose=None, ): """ importe des données pour un schema @@ -151,9 +156,9 @@ def cmd_import_bulk_data( init_gn_modulator() - if schema_code and data_path: + if module_code and object_code and data_path: impt = TImport( - schema_code=schema_code, data_file_path=data_path, mapping_file_path=mapping_file_path + module_code, object_code, data_file_path=data_path, mapping_file_path=mapping_file_path ) impt.process_import_schema() print(impt.pretty_infos()) diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index 96b88cc0..a40b936c 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -27,68 +27,53 @@ class ImportMixin( ImportMixinUtils, ): def process_import_schema(self): - self.init_import() - if self.errors: - return self - db.session.flush() - - self.process_data_table() - if self.errors: - return self - db.session.flush() - - self.process_mapping_view() - if self.errors: - return self - db.session.flush() - - self.check_uniques() - self.check_types() - if self.errors: - return self - db.session.flush() - - self.process_raw_view() - if self.errors: - return self - db.session.flush() - - self.process_view() - if self.errors: - return self - db.session.flush() - - self.check_required() - self.check_resolve_keys() - - if self.errors: - return self - db.session.flush() - - self.process_count() - if self.errors: - return self - db.session.flush() + if self.status in ["DONE", "PROCESSING"]: + return - if self.options.get("check_only"): - return self + if self.status is None: + self.process_load_data_and_check() - self.process_insert() - if self.errors: - return self - db.session.flush() - - self.process_update() - if self.errors: - return self - db.session.flush() + if self.status == "ERROR": + return - self.process_relations() - if self.errors: + if self.options.get("check_only") and not self.status == "READY": + self.status = "READY" return self - db.session.flush() - return self + print(self.as_dict()) + + self.process_insert_and_update() + + def process_load_data_and_check(self): + for action in [ + "init_import", + "process_data_table", + "process_mapping_view", + "process_pre_check", + "process_raw_view", + "process_view", + "process_post_check", + "process_count", + ]: + getattr(self, action)() + if self.status == "ERROR": + return self + db.session.flush() + + def process_insert_and_update(self): + self.status = "PROCESSING" + + for action in [ + "process_insert", + "process_update", + "process_relations", + ]: + getattr(self, action)() + if self.status == "ERROR": + return self + db.session.flush() + + self.status = "DONE" @classmethod def process_import_code(cls, import_code, data_dir_path, insert_data=True, commit=True): diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index 3d631f92..7900c2ae 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -5,13 +5,21 @@ class ImportMixinCheck(ImportMixinUtils): + def process_pre_check(self): + self.check_types() + self.check_uniques() + + def process_post_check(self): + self.check_required() + self.check_resolve_keys() + def check_uniques(self): # avant raw # on verifie la présence des colonne d'unicité table_test = self.tables.get("mapping") or self.tables["data"] columns = self.get_table_columns(table_test) - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) unique = sm.attr("meta.unique") missing_unique = [key for key in unique if key not in columns] @@ -28,7 +36,7 @@ def check_types(self): # gn_modulator.check_type(TYPE_IN, val) table_test = self.tables.get("mapping") or self.tables["data"] - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) for key in filter( lambda x: sm.is_column(x) and not sm.property(x).get("foreign_key"), self.get_table_columns(table_test), @@ -73,7 +81,7 @@ def check_required(self): # et que la valeur dans raw est nulle # erreur raw_table = self.tables["raw"] - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) for key in self.get_table_columns(raw_table): if not sm.is_required(key): @@ -104,7 +112,7 @@ def check_required(self): def check_resolve_keys(self): raw_table = self.tables["raw"] process_table = self.tables["process"] - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) for key in self.get_table_columns(raw_table): if not (sm.has_property(key) and sm.property(key).get("foreign_key")): diff --git a/backend/gn_modulator/imports/mixins/count.py b/backend/gn_modulator/imports/mixins/count.py index a9720a1d..b389198a 100644 --- a/backend/gn_modulator/imports/mixins/count.py +++ b/backend/gn_modulator/imports/mixins/count.py @@ -11,7 +11,7 @@ def process_count(self): ) def sql_nb_update(self, from_table): - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) columns = self.get_table_columns(from_table) @@ -55,7 +55,7 @@ def count_update(self): def count_insert(self): from_table = self.tables["process"] - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) self.sql[ "nb_insert" ] = f"SELECT COUNT(*) FROM {from_table} WHERE {sm.pk_field_name()} IS NULL" diff --git a/backend/gn_modulator/imports/mixins/insert.py b/backend/gn_modulator/imports/mixins/insert.py index 03c48d34..48f677b4 100644 --- a/backend/gn_modulator/imports/mixins/insert.py +++ b/backend/gn_modulator/imports/mixins/insert.py @@ -6,7 +6,7 @@ class ImportMixinInsert(ImportMixinUtils): def process_insert(self): from_table = self.tables["process"] - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) self.sql[ "nb_insert" ] = f"SELECT COUNT(*) FROM {from_table} WHERE {sm.pk_field_name()} IS NULL" @@ -32,11 +32,11 @@ def process_insert(self): raise e self.add_error( code="ERR_IMPORT_INSERT", - msg=f"Erreur durant l'insert de {from_table} vers {self.schema_code()} : {str(e)}", + msg=f"Erreur durant l'insert de {from_table} vers {self.schema_code} : {str(e)}", ) def sql_insert(self, from_table, dest_table=None, keys=None): - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) table_name = dest_table or sm.sql_schema_dot_table() diff --git a/backend/gn_modulator/imports/mixins/process.py b/backend/gn_modulator/imports/mixins/process.py index 6069b222..8e3f7bf6 100644 --- a/backend/gn_modulator/imports/mixins/process.py +++ b/backend/gn_modulator/imports/mixins/process.py @@ -25,7 +25,7 @@ def sql_process_view(self, from_table, dest_table, keys=None): requete pour créer une vue qui résoud les clé """ - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) v_columns = [] v_joins = [] @@ -58,7 +58,7 @@ def sql_process_view(self, from_table, dest_table, keys=None): v_joins += v_join txt_pk_column, v_join = self.resolve_key( - self.schema_code(), sm.pk_field_name(), alias_join_base="j_pk", solved_keys=solved_keys + self.schema_code, sm.pk_field_name(), alias_join_base="j_pk", solved_keys=solved_keys ) v_columns.append(txt_pk_column) v_joins += v_join @@ -70,7 +70,7 @@ def sql_process_view(self, from_table, dest_table, keys=None): txt_id_digitiser = "" if self.id_digitiser: for key in ["id_digitiser", "id_digitizer"]: - if SchemaMethods(self.schema_code()).has_property(key): + if SchemaMethods(self.schema_code).has_property(key): txt_id_digitiser = f"{self.id_digitiser} AS {key}," break @@ -93,7 +93,6 @@ def resolve_key( le champs de la colonne qui doit contenir la clé la ou les jointures nécessaire pour résoudre la clé """ - sm = SchemaMethods(schema_code) alias_join = alias_join_base if index is None else f"{alias_join_base}_{index}" @@ -175,7 +174,7 @@ def process_column_import_view(self, index, key): """ process column for processed view """ - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) if not sm.has_property(key): return key, [] diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index 9374a65b..a579d17c 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -38,7 +38,7 @@ def sql_raw_view( on passe les champs valant '' à NULL """ - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) from_table_columns = self.get_table_columns(from_table) @@ -94,7 +94,7 @@ def pre_process_raw_import_columns(self, key, key_unnest=None): TODO gérer les null dans l'import csv (ou dans l'insert) """ - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) if key == "id_import": return key @@ -140,7 +140,7 @@ def pre_process_raw_import_columns(self, key, key_unnest=None): def process_raw_import_column(self, key): """ """ - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) if not sm.has_property(key): return f"pp.{key}" diff --git a/backend/gn_modulator/imports/mixins/relation.py b/backend/gn_modulator/imports/mixins/relation.py index 14390c4b..73c36a97 100644 --- a/backend/gn_modulator/imports/mixins/relation.py +++ b/backend/gn_modulator/imports/mixins/relation.py @@ -8,7 +8,7 @@ class ImportMixinRelation(ImportMixinInsert, ImportMixinProcess, ImportMixinRaw, ImportMixinUtils): def process_relations(self): from_table = self.tables.get("mapping") or self.tables["data"] - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) columns = self.get_table_columns(from_table) @@ -22,7 +22,7 @@ def process_relations(self): self.import_relation_n_n(from_table, key) def import_relation_n_n(self, from_table, key): - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) self.sql[key] = {} diff --git a/backend/gn_modulator/imports/mixins/update.py b/backend/gn_modulator/imports/mixins/update.py index a5c425ee..68dd0ac7 100644 --- a/backend/gn_modulator/imports/mixins/update.py +++ b/backend/gn_modulator/imports/mixins/update.py @@ -29,11 +29,11 @@ def process_update(self): raise e self.add_error( code="ERR_IMPORT_UPDATE", - msg=f"Erreur durant l'update de {from_table} vers {self.schema_code()} : {str(e)}", + msg=f"Erreur durant l'update de {from_table} vers {self.schema_code} : {str(e)}", ) def sql_update(self, from_table): - sm = SchemaMethods(self.schema_code()) + sm = SchemaMethods(self.schema_code) columns = self.get_table_columns(from_table) diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py index bc199445..1a4458b8 100644 --- a/backend/gn_modulator/imports/mixins/utils.py +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -2,6 +2,7 @@ from gn_modulator.schema import SchemaMethods from gn_modulator.utils.env import schema_import +from gn_modulator import ModuleMethods class ImportMixinUtils: @@ -18,6 +19,16 @@ class ImportMixinUtils: } def init_import(self): + self.schema_code = self.schema_code or ModuleMethods.schema_code( + self.module_code, self.object_code + ) + if not self.schema_code: + self.add_error( + { + "code": "ERR_IMPORT_SCHEMA_CODE_NOT_FOND", + "msg": f"Il n'y a pas de schema pour module_code={self.module_code}, object_code={self.object_code}", + } + ) SchemaMethods.c_sql_exec_txt(f"CREATE SCHEMA IF NOT EXISTS {schema_import}") def pretty_infos(self): @@ -25,7 +36,7 @@ def pretty_infos(self): if self.res.get("nb_data") is not None: txt += f"\n-- import csv file {Path(self.data_file_path).name}" txt += f" {self.res.get('nb_data')} lignes\n\n" - txt += f" - {self.schema_code()}\n" + txt += f" - {self.schema_code}\n" if self.res.get("nb_raw") != self.res.get("nb_process"): txt += f" raw : {self.res.get('nb_raw'):10d}\n" if self.res.get("nb_process"): @@ -70,12 +81,13 @@ def table_name(self, type, key=None): return f"{schema_import}.t_{self.id_import}_{type}" else: rel = f"_{key}" if key is not None else "" - return f"{schema_import}.v_{self.id_import}_{type}_{self.schema_code().replace('.', '_')}{rel}" + return f"{schema_import}.v_{self.id_import}_{type}_{self.schema_code.replace('.', '_')}{rel}" def add_error(self, code=None, msg=None, key=None, lines=None, values=None): self.errors.append( {"code": code, "msg": msg, "key": key, "lines": lines, "values": values} ) + self.status = "ERROR" def get_table_columns(self, table_name): if not self._columns.get(table_name): diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index 5981d693..67f130af 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -11,10 +11,17 @@ class TImport(db.Model, ImportMixin): __table_args__ = {"schema": "gn_modulator"} def __init__( - self, module_code, object_code, data_file_path=None, mapping_file_path=None, options={} + self, + module_code=None, + object_code=None, + schema_code=None, + data_file_path=None, + mapping_file_path=None, + options={}, ): self.id_digitiser = g.current_user.id_role if hasattr(g, "current_user") else None + self.schema_code = schema_code self.module_code = module_code self.object_code = object_code self.data_file_path = data_file_path and str(data_file_path) @@ -25,7 +32,8 @@ def __init__( self.res = {} self.errors = [] self.sql = {} - self.tables = {} + self.tables = {"prout": "oug"} + print("init import ", self.id_import) _columns = {} @@ -34,9 +42,7 @@ def __init__( id_digitiser = db.Column(db.Integer, db.ForeignKey("utilisateurs.t_roles.id_role")) module_code = db.Column(db.Unicode) object_code = db.Column(db.Unicode) - - def schema_code(self): - return ModuleMethods.schema_code(self.module_code, self.object_code) + schema_code = db.Column(db.Unicode) status = db.Column(db.Unicode) @@ -56,6 +62,9 @@ def schema_code(self): def as_dict(self): return { "id_import": self.id_import, + "schema_code": self.schema_code, + "module_code": self.module_code, + "object_code": self.object_code, "id_digitiser": self.id_digitiser, "data_type": self.data_type, "csv_delimiter": self.csv_delimiter, diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py index aa387481..105eeb8a 100644 --- a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -23,8 +23,9 @@ def upgrade(): CREATE TABLE gn_modulator.t_imports( id_import SERIAL NOT NULL, id_digitiser INTEGER, -- qui a fait l'import - module_code VARCHAR NOT NULL, -- - object_code VARCHAR NOT NULL, -- + module_code VARCHAR, -- + object_code VARCHAR, -- + schema_code VARCHAR, -- data_file_path VARCHAR, -- stocker dans un blob ?? mapping_file_path VARCHAR, -- varchar ou table mapping csv_delimiter VARCHAR, -- diff --git a/backend/gn_modulator/routes/imports.py b/backend/gn_modulator/routes/imports.py index e1fdcd37..557317c2 100644 --- a/backend/gn_modulator/routes/imports.py +++ b/backend/gn_modulator/routes/imports.py @@ -20,21 +20,9 @@ def api_import(module_code, object_code, id_import): options = json.loads(request.form.get("options")) if request.form.get("options") else {} - schema_code = ModuleMethods.schema_code(module_code, object_code) - - if not schema_code: - return { - "errors": [ - { - "msg": f"Il n'y pas de schema pour module_code={module_code} et object_code={object_code}", - "code": "ERR_IMPORT_SCHEMA_CODE", - } - ] - } - if id_import: try: - impt = TImport.query().filter_by(id_import=id_import).one() + impt = TImport.query.filter_by(id_import=id_import).one() except orm.exc.NoResultFound: return f"Pas d'import trouvé pour id_import={id_import}", 404 else: @@ -42,24 +30,19 @@ def api_import(module_code, object_code, id_import): db.session.add(impt) db.session.flush() - files_path = {} - if request.files: - for file_key in request.files: - file = request.files.get(file_key) - files_path[file_key] = upload_import_file( - module_code, object_code, impt.id_import, file - ) + if not impt.status: + files_path = {} + if request.files: + for file_key in request.files: + file = request.files.get(file_key) + files_path[file_key] = upload_import_file( + module_code, object_code, impt.id_import, file + ) - impt.data_file_path = files_path.get("data_file") and str(files_path.get("data_file")) + impt.data_file_path = files_path.get("data_file") and str(files_path.get("data_file")) impt.process_import_schema() - if impt.errors: - out = {"errors": impt.errors} - db.session.commit() - return out - out = impt.as_dict() - db.session.commit() return out diff --git a/backend/gn_modulator/tests/utils/imports.py b/backend/gn_modulator/tests/utils/imports.py index 45b3a2ad..df45b6cb 100644 --- a/backend/gn_modulator/tests/utils/imports.py +++ b/backend/gn_modulator/tests/utils/imports.py @@ -13,8 +13,8 @@ def test_data_file( # et non un copy qui ne marche pas en test impt = TImport( - module_code, - object_code, + module_code=module_code, + object_code=object_code, data_file_path=data_file_path, mapping_file_path=mapping_file_path, options={"insert_data": True}, diff --git a/config/layouts/utils/utils.import.layout.yml b/config/layouts/utils/utils.import.layout.yml index f9b23b30..a251dd29 100644 --- a/config/layouts/utils/utils.import.layout.yml +++ b/config/layouts/utils/utils.import.layout.yml @@ -11,13 +11,16 @@ layout: html: __f__data.importMsg?.html class: __f__data.importMsg?.class hidden: __f__!data.importMsg + - type: message + json: __f__data - hidden: true items: - key: importMsg - - key: object_code type: string - hidden: true - default: __f__context.object_code + - title: '__f__`Statut: ${data.status}`' + - key: status + type: string + - title: import display: fieldset items: @@ -46,8 +49,11 @@ layout: - direction: row items: - type: button - color: primary - title: Valider + color: success + title: | + __f__data.status == 'READY' + ? 'Importer les données' + : 'Charger et valider les données' description: Valider action: import disabled: __f__!(formGroup.valid ) diff --git a/frontend/app/components/base/base.scss b/frontend/app/components/base/base.scss index a7b12019..50ed8af0 100644 --- a/frontend/app/components/base/base.scss +++ b/frontend/app/components/base/base.scss @@ -41,6 +41,12 @@ color: #fff; } +.layout-buttons .mat-button-disabled { + color: rgba(0,0,0,.26); + background-color: rgba(0,0,0,.12); +} + + .content-container { min-width: 50%; } diff --git a/frontend/app/components/layout/base/layout-import.component.ts b/frontend/app/components/layout/base/layout-import.component.ts index 4ccf8130..1ad01bc6 100644 --- a/frontend/app/components/layout/base/layout-import.component.ts +++ b/frontend/app/components/layout/base/layout-import.component.ts @@ -1,6 +1,7 @@ import { Component, OnInit, Injector, ViewEncapsulation } from '@angular/core'; import { ModulesLayoutComponent } from './layout.component'; import { ModulesImportService } from '../../../services/import.service'; +import { HttpEventType, HttpResponse, HttpErrorResponse } from '@angular/common/http'; import utils from '../../../utils'; @Component({ @@ -15,6 +16,8 @@ export class ModulesLayoutImportComponent extends ModulesLayoutComponent impleme importContext: any; _mImport: ModulesImportService; + uploadPercentDone; + constructor(_injector: Injector) { super(_injector); this._name = 'layout-import'; @@ -32,10 +35,55 @@ export class ModulesLayoutImportComponent extends ModulesLayoutComponent impleme }; } + processImport(context, data) { + data.importMsg = { + html: 'Traitement en cours', + class: null, + }; + this._mLayout.reComputeLayout(); + this._mImport + .importRequest(context.module_code, context.object_code, data) + .pipe() + .subscribe((importEvent) => { + if (importEvent.type === HttpEventType.UploadProgress) { + this.uploadPercentDone = Math.round((100 * importEvent.loaded) / importEvent.total); + } + if (importEvent instanceof HttpResponse) { + this._mLayout.stopActionProcessing(''); + const response = importEvent.body as any; + this.importData = response; + // if (response.errors?.length) { + // for (let error of response.errors) { + // console.error(`${error.code} : ${error.msg}`); + // } + // data.importMsg = { + // class: 'error', + // html: this._mImport.importHTMLMsgError(response), + // }; + // for (const key of response) { + // console.log(key) + // data[key] = response[key] + // } + // return; + // } + + // let txtImport = this._mImport.importHTMLMsgSuccess(response); + + // data.importMsg = { + // class: 'success', + // html: txtImport, + // }; + + // setTimeout(() => this._mLayout.reComputeLayout(), 100); + // this._commonService.regularToaster('success', txtImport); + } + }); + } + processAction(event: any): void { const { action, context, value = null, data = null, layout = null } = event; if (action == 'import') { - return this._mImport.processImport(context, data); + return this.processImport(context, data); } } } diff --git a/frontend/app/services/import.service.ts b/frontend/app/services/import.service.ts index 3eedd371..bdbe98f8 100644 --- a/frontend/app/services/import.service.ts +++ b/frontend/app/services/import.service.ts @@ -1,33 +1,15 @@ import { Injectable, Injector } from '@angular/core'; -import { ModulesDataService } from './data.service'; -import { ModulesLayoutService } from './layout.service'; -import { ModulesObjectService } from './object.service'; import { ModulesConfigService } from './config.service'; -import { ModulesRouteService } from './route.service'; import { ModulesRequestService } from './request.service'; -import { CommonService } from '@geonature_common/service/common.service'; -import { HttpEventType, HttpResponse, HttpErrorResponse } from '@angular/common/http'; -import { catchError, map, filter, switchMap } from 'rxjs/operators'; -import { of } from 'rxjs'; @Injectable() export class ModulesImportService { - _mData: ModulesDataService; - _mLayout: ModulesLayoutService; - _mObject: ModulesObjectService; - _commonService: CommonService; _mConfig: ModulesConfigService; - _mRoute: ModulesRouteService; _mRequest: ModulesRequestService; constructor(private _injector: Injector) { - this._mData = this._injector.get(ModulesDataService); - this._mLayout = this._injector.get(ModulesLayoutService); - this._mObject = this._injector.get(ModulesObjectService); - this._commonService = this._injector.get(CommonService); - this._mConfig = this._injector.get(ModulesConfigService); - this._mRoute = this._injector.get(ModulesRouteService); this._mRequest = this._injector.get(ModulesRequestService); + this._mConfig = this._injector.get(ModulesConfigService); } importRequest(moduleCode, object_code, data, params = {}) { @@ -36,56 +18,12 @@ export class ModulesImportService { data.id_import || '' }`, { - data, + data: data.id_import ? {} : data, params, } ); } - processImport(context, data) { - data.importMsg = { - html: 'Traitement en cours', - class: null, - }; - this._mLayout.reComputeLayout(); - this.importRequest(context.module_code, context.object_code, data) - .pipe() - .subscribe( - (importEvent) => { - if (importEvent.type === HttpEventType.UploadProgress) { - const uploadPerCentDone = Math.round((100 * importEvent.loaded) / importEvent.total); - } - if (importEvent instanceof HttpResponse) { - this._mLayout.stopActionProcessing(''); - const response = importEvent.body as any; - if (response.errors?.length) { - for (let error of response.errors) { - console.error(`${error.code} : ${error.msg}`); - } - data.importMsg = { - class: 'error', - html: this.importHTMLMsgError(response), - }; - return; - } - - let txtImport = this.importHTMLMsgSuccess(response); - - data.importMsg = { - class: 'success', - html: txtImport, - }; - - setTimeout(() => this._mLayout.reComputeLayout(), 100); - // this._commonService.regularToaster('success', txtImport); - } - }, - (error: HttpErrorResponse) => { - this._commonService.regularToaster('error', `Import : ${error.error.msg}`); - } - ); - } - importHTMLMsgSuccess(impt) { let txtImport = `
Import réussi
`; let res = impt.res; From 5f695be2678f5035fa10edf6f92b7a015b4687cc Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 16 Mar 2023 10:57:08 +0100 Subject: [PATCH 026/142] fix jsonb update with mutable --- backend/gn_modulator/imports/mixins/data.py | 1 + backend/gn_modulator/imports/mixins/mapping.py | 1 + backend/gn_modulator/imports/mixins/process.py | 1 + backend/gn_modulator/imports/mixins/raw.py | 1 + backend/gn_modulator/imports/mixins/utils.py | 1 + backend/gn_modulator/imports/models.py | 14 +++++++------- 6 files changed, 12 insertions(+), 7 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/data.py b/backend/gn_modulator/imports/mixins/data.py index f999bb77..1a3147ce 100644 --- a/backend/gn_modulator/imports/mixins/data.py +++ b/backend/gn_modulator/imports/mixins/data.py @@ -11,6 +11,7 @@ def process_data_table(self): return self.tables["data"] = self.table_name("data") + self.tables = self.tables if Path(self.data_file_path).suffix == ".csv": self.data_type = "csv" diff --git a/backend/gn_modulator/imports/mixins/mapping.py b/backend/gn_modulator/imports/mixins/mapping.py index 81bd36af..e47d90b8 100644 --- a/backend/gn_modulator/imports/mixins/mapping.py +++ b/backend/gn_modulator/imports/mixins/mapping.py @@ -12,6 +12,7 @@ def process_mapping_view(self): return self.tables["mapping"] = self.table_name("mapping") + self.tables = self.tables if not Path(self.mapping_file_path).exists(): self.add_error( diff --git a/backend/gn_modulator/imports/mixins/process.py b/backend/gn_modulator/imports/mixins/process.py index 8e3f7bf6..c2af9571 100644 --- a/backend/gn_modulator/imports/mixins/process.py +++ b/backend/gn_modulator/imports/mixins/process.py @@ -6,6 +6,7 @@ class ImportMixinProcess(ImportMixinUtils): def process_view(self, keys=None): from_table = self.tables["raw"] dest_table = self.tables["process"] = self.table_name("process") + self.tables = self.tables self.sql["process_view"] = self.sql_process_view(from_table, dest_table, keys) diff --git a/backend/gn_modulator/imports/mixins/raw.py b/backend/gn_modulator/imports/mixins/raw.py index a579d17c..c233566b 100644 --- a/backend/gn_modulator/imports/mixins/raw.py +++ b/backend/gn_modulator/imports/mixins/raw.py @@ -11,6 +11,7 @@ def process_raw_view(self): from_table = self.tables.get("mapping") or self.tables["data"] dest_table = self.tables["raw"] = self.table_name("raw") + self.tables = self.tables self.sql["raw_view"] = self.sql_raw_view(from_table, dest_table) try: SchemaMethods.c_sql_exec_txt(self.sql["raw_view"]) diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py index 1a4458b8..14ac816f 100644 --- a/backend/gn_modulator/imports/mixins/utils.py +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -1,3 +1,4 @@ +import json from pathlib import Path from gn_modulator.schema import SchemaMethods diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index 67f130af..d0b1a310 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -3,7 +3,7 @@ from sqlalchemy.dialects.postgresql import JSONB from geonature.utils.env import db from .mixins import ImportMixin -from gn_modulator import ModuleMethods +from sqlalchemy.ext.mutable import MutableDict, MutableList class TImport(db.Model, ImportMixin): @@ -32,7 +32,7 @@ def __init__( self.res = {} self.errors = [] self.sql = {} - self.tables = {"prout": "oug"} + self.tables = {} print("init import ", self.id_import) _columns = {} @@ -52,12 +52,12 @@ def __init__( csv_delimiter = db.Column(db.Unicode) data_type = db.Column(db.Unicode) - res = db.Column(JSONB) - tables = db.Column(JSONB) - sql = db.Column(JSONB) - errors = db.Column(JSONB) + res = db.Column(MutableDict.as_mutable(JSONB)) + tables = db.Column(MutableDict.as_mutable(JSONB)) + sql = db.Column(MutableDict.as_mutable(JSONB)) + errors = db.Column(MutableList.as_mutable(JSONB)) - options = db.Column(JSONB) + options = db.Column(MutableDict.as_mutable(JSONB)) def as_dict(self): return { From 88552821fb5d9d3d8ec29de3ea7133f8b32fd313 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 16 Mar 2023 11:05:55 +0100 Subject: [PATCH 027/142] fix command import --- backend/gn_modulator/commands.py | 3 ++- backend/gn_modulator/imports/mixins/__init__.py | 2 -- backend/gn_modulator/imports/models.py | 1 - 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/backend/gn_modulator/commands.py b/backend/gn_modulator/commands.py index 333b941a..b6d4d60d 100644 --- a/backend/gn_modulator/commands.py +++ b/backend/gn_modulator/commands.py @@ -123,7 +123,8 @@ def cmd_doc_schema(schema_code, force=False): @click.command("import") -@click.option("-s", "schema_code") +@click.option("-o", "object_code") +@click.option("-m", "module_code") @click.option("-d", "data_path", type=click.Path(exists=True)) @click.option( "-m", diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index a40b936c..de9e3eab 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -40,8 +40,6 @@ def process_import_schema(self): self.status = "READY" return self - print(self.as_dict()) - self.process_insert_and_update() def process_load_data_and_check(self): diff --git a/backend/gn_modulator/imports/models.py b/backend/gn_modulator/imports/models.py index d0b1a310..8f85a193 100644 --- a/backend/gn_modulator/imports/models.py +++ b/backend/gn_modulator/imports/models.py @@ -33,7 +33,6 @@ def __init__( self.errors = [] self.sql = {} self.tables = {} - print("init import ", self.id_import) _columns = {} From 322f685be19c00ebb4007a3ad56eb502f3826ba9 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 16 Mar 2023 11:18:49 +0100 Subject: [PATCH 028/142] cmd import print available import_code --- backend/gn_modulator/commands.py | 10 +++++++++- backend/gn_modulator/imports/mixins/__init__.py | 8 ++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/backend/gn_modulator/commands.py b/backend/gn_modulator/commands.py index b6d4d60d..d98aebe8 100644 --- a/backend/gn_modulator/commands.py +++ b/backend/gn_modulator/commands.py @@ -165,7 +165,15 @@ def cmd_import_bulk_data( print(impt.pretty_infos()) if import_code: - TImport.process_import_code(import_code, data_path) + res = TImport.process_import_code(import_code, data_path) + if res is None: + print(f"L'import de code {import_code} n'existe pas\n") + import_codes = sorted(DefinitionMethods.definition_codes_for_type("import")) + print(f"Veuillez choisir parmi codes suivants\n") + for import_code in import_codes: + print( + f"- {import_code:>15} : {DefinitionMethods.get_definition('import', import_code)['title']}" + ) return True diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index de9e3eab..fce9b3d8 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -75,10 +75,14 @@ def process_insert_and_update(self): @classmethod def process_import_code(cls, import_code, data_dir_path, insert_data=True, commit=True): - print(f"\nProcess scenario d'import {import_code}") - # get import definition import_definitions = DefinitionMethods.get_definition("import", import_code) + + if not import_definitions: + return None + + print(f"\nProcess scenario d'import {import_code}") + import_definitions_file_path = DefinitionMethods.get_file_path("import", import_code) # for all definition items From f30910af8aff02583fa72b15ab037afa2446eaa3 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 16 Mar 2023 12:02:05 +0100 Subject: [PATCH 029/142] fix gn_modulator.check_value_for_type --- .../gn_modulator/imports/mixins/__init__.py | 7 +++--- backend/gn_modulator/imports/mixins/check.py | 2 +- .../gn_modulator/imports/mixins/mapping.py | 1 + .../3920371728d8_gn_modulator_import_init.py | 24 +++++++++++++++---- 4 files changed, 25 insertions(+), 9 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/__init__.py b/backend/gn_modulator/imports/mixins/__init__.py index fce9b3d8..94cb2a28 100644 --- a/backend/gn_modulator/imports/mixins/__init__.py +++ b/backend/gn_modulator/imports/mixins/__init__.py @@ -120,14 +120,15 @@ def process_import_code(cls, import_code, data_dir_path, insert_data=True, commi impt.process_import_schema() imports.append(impt) + if commit: + db.session.commit() + if impt.errors: - print(f"Il y a des erreurs dans l'import {import_definition['schema_code']}") + print(f"Il y a des erreurs dans l'import {import_definition['object_code']}") for error in impt.errors: print(f"- {error['code']} : {error['msg']}") return imports print(impt.pretty_infos()) - if commit: - db.session.commit() print(f"Import {import_code} terminé") return imports diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index 7900c2ae..ab773544 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -52,7 +52,7 @@ def check_types(self): WHERE NOT ( {key} is NULL OR - gn_modulator.check_value_for_type('{sql_type}', {key}) + gn_modulator.check_value_for_type('{sql_type}', {key}::VARCHAR) ) GROUP BY id_import ORDER BY id_import diff --git a/backend/gn_modulator/imports/mixins/mapping.py b/backend/gn_modulator/imports/mixins/mapping.py index e47d90b8..ca427d0e 100644 --- a/backend/gn_modulator/imports/mixins/mapping.py +++ b/backend/gn_modulator/imports/mixins/mapping.py @@ -55,6 +55,7 @@ def sql_mapping(self): "CREATE ", "GRANT ", "COPY ", + "PERFORM ", ]: if forbidden_word in mapping_select: forbidden_words.append(forbidden_word.strip()) diff --git a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py index 105eeb8a..897c78e2 100644 --- a/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py +++ b/backend/gn_modulator/migrations/versions/3920371728d8_gn_modulator_import_init.py @@ -54,18 +54,28 @@ def upgrade(): FOR EACH ROW EXECUTE PROCEDURE public.fct_trg_meta_dates_change(); -CREATE OR REPLACE FUNCTION gn_modulator.check_value_for_type(type_in VARCHAR, value_in ANYELEMENT) + +DROP FUNCTION IF EXISTS gn_modulator.check_value_for_type(VARCHAR, varchar); +DROP FUNCTION IF EXISTS gn_modulator.check_value_for_type(VARCHAR, anyelement); +CREATE OR REPLACE FUNCTION gn_modulator.check_value_for_type(type_in VARCHAR, value_in varchar) RETURNS BOOLEAN AS $$ BEGIN - EXECUTE FORMAT('SELECT (''%s'')::%s', value_in, type_in); - RETURN TRUE; - EXCEPTION WHEN OTHERS THEN + IF type_in = 'VARCHAR' THEN PERFORM value_in::VARCHAR; RETURN TRUE; END IF; + IF type_in = 'INTEGER' THEN PERFORM value_in::INTEGER; RETURN TRUE; END IF; + IF type_in = 'BOOLEAN' THEN PERFORM value_in::BOOLEAN; RETURN TRUE; END IF; + IF type_in = 'FLOAT' THEN PERFORM value_in::FLOAT; RETURN TRUE; END IF; + IF type_in = 'DATE' THEN PERFORM value_in::DATE; RETURN TRUE; END IF; + IF type_in = 'TIMESTAMP' THEN PERFORM value_in::TIMESTAMP; RETURN TRUE; END IF; + IF type_in = 'UUID' THEN PERFORM value_in::UUID; RETURN TRUE; END IF; + IF type_in = 'GEOMETRY' THEN PERFORM value_in::GEOMETRY; RETURN TRUE; END IF; + IF type_in = 'JSONB' THEN PERFORM value_in::JSONB; RETURN TRUE; END IF; RETURN FALSE; + EXCEPTION WHEN OTHERS THEN + RETURN FALSE; END; $$ LANGUAGE 'plpgsql' COST 100 -; """ ) @@ -76,6 +86,10 @@ def downgrade(): op.execute( """ DROP TABLE gn_modulator.t_imports; + + DROP FUNCTION IF EXISTS gn_modulator.check_value_for_type(VARCHAR, varchar); + DROP FUNCTION IF EXISTS gn_modulator.check_value_for_type(VARCHAR, anyelement); + """ ) pass From ea77cc74156f2b9cfdaa2f989eff4ad5f528680b Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 16 Mar 2023 12:31:22 +0100 Subject: [PATCH 030/142] fix form default with data_keys --- frontend/app/services/form.service.ts | 2 +- frontend/app/utils/commons.ts | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/frontend/app/services/form.service.ts b/frontend/app/services/form.service.ts index b15ea434..b9adbe63 100644 --- a/frontend/app/services/form.service.ts +++ b/frontend/app/services/form.service.ts @@ -242,7 +242,7 @@ export class ModulesFormService { if (computedLayout.default && [null, undefined].includes(control.value)) { control.setValue(computedLayout.default); if (data) { - data[computedLayout.key] = computedLayout.default; + utils.setAttr(data, [...context.data_keys, computedLayout.key], computedLayout.default); } } diff --git a/frontend/app/utils/commons.ts b/frontend/app/utils/commons.ts index ab5634c5..21dd18bb 100644 --- a/frontend/app/utils/commons.ts +++ b/frontend/app/utils/commons.ts @@ -142,7 +142,9 @@ const filterAttr = (obj, paths, value) => { const setAttr = (obj, paths, value) => { var inter = obj; - const v_path = Object.entries(paths.split('.')) as any; + const v_path = Array.isArray(paths) + ? Object.entries(paths) + : (Object.entries(paths.split('.')) as any); for (const [index, path] of v_path) { if (index < v_path.length - 1) { inter[path] = inter[path] || {}; From dc43ca1ef96cc3960fbadeb556119830a0639b00 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 16 Mar 2023 17:07:56 +0100 Subject: [PATCH 031/142] import frontend --- config/layouts/utils/utils.import.layout.yml | 35 ++++---- .../utils/utils.import.layout.yml_save | 57 +++++++++++++ frontend/app/components/layout/base/index.ts | 25 ++++++ .../layout/base/layout-import.component.html | 6 -- .../layout/base/layout-import.component.scss | 0 .../layout/form/form-element.component.html | 17 +++- frontend/app/components/layout/form/index.ts | 5 ++ .../app/components/layout/import/index.ts | 3 + .../import/layout-import.component.html | 69 ++++++++++++++++ .../import/layout-import.component.scss | 10 +++ .../layout-import.component.ts | 79 +++++++++++-------- frontend/app/components/layout/index.ts | 46 ++--------- .../app/components/layout/object/index.ts | 11 +++ frontend/app/services/import.service.ts | 37 +++++++++ 14 files changed, 300 insertions(+), 100 deletions(-) create mode 100644 config/layouts/utils/utils.import.layout.yml_save create mode 100644 frontend/app/components/layout/base/index.ts delete mode 100644 frontend/app/components/layout/base/layout-import.component.html delete mode 100644 frontend/app/components/layout/base/layout-import.component.scss create mode 100644 frontend/app/components/layout/form/index.ts create mode 100644 frontend/app/components/layout/import/index.ts create mode 100644 frontend/app/components/layout/import/layout-import.component.html create mode 100644 frontend/app/components/layout/import/layout-import.component.scss rename frontend/app/components/layout/{base => import}/layout-import.component.ts (58%) create mode 100644 frontend/app/components/layout/object/index.ts diff --git a/config/layouts/utils/utils.import.layout.yml b/config/layouts/utils/utils.import.layout.yml index a251dd29..8543508d 100644 --- a/config/layouts/utils/utils.import.layout.yml +++ b/config/layouts/utils/utils.import.layout.yml @@ -3,24 +3,11 @@ code: utils.import title: layout du composant d'import description: layout du composant d'import layout: - title: __f__`Importer ${o.des_labels(x, 'C')}` + flex: '0' type: form items: - - items: - - type: message - html: __f__data.importMsg?.html - class: __f__data.importMsg?.class - hidden: __f__!data.importMsg - - type: message - json: __f__data - - hidden: true - items: - - key: importMsg - type: string - - title: '__f__`Statut: ${data.status}`' - - key: status - type: string - + - flex: '0' + items: - title: import display: fieldset items: @@ -29,6 +16,7 @@ layout: title: Fichier d'import required: __f__!data.tables?.data description: Choisir un fichier à importer + disabled: __f__data.id_import - title: Options additionnelles display: fieldset type: dict @@ -47,16 +35,23 @@ layout: default: __f__true - direction: row + flex: '0' items: - type: button color: success - title: | - __f__data.status == 'READY' - ? 'Importer les données' - : 'Charger et valider les données' + title: Valider description: Valider action: import disabled: __f__!(formGroup.valid ) + hidden: __f__data.status == 'DONE' + - type: button + color: info + icon: refresh + title: nouvel import + description: Faire un nouvel import + action: reset + hidden: __f__!data.status + - type: button color: primary title: Annuler diff --git a/config/layouts/utils/utils.import.layout.yml_save b/config/layouts/utils/utils.import.layout.yml_save new file mode 100644 index 00000000..f213600d --- /dev/null +++ b/config/layouts/utils/utils.import.layout.yml_save @@ -0,0 +1,57 @@ +type: layout +code: utils.import +title: layout du composant d'import +description: layout du composant d'import +layout: + title: __f__`Importer ${o.des_labels(x, 'C')}` + type: form + items: + - items: + - type: message + html: __f__data.importMsg?.html + class: __f__data.importMsg?.class + hidden: __f__!data.importMsg + - hidden: true + items: + - key: importMsg + type: string + + - title: import + display: fieldset + items: + - key: data_file + type: file + title: Fichier d'import + required: __f__!data.tables?.data + description: Choisir un fichier à importer + disabled: __f__data.id_import + - title: Options additionnelles + display: fieldset + type: dict + items: + - key: options.enable_update + title: "Autoriser les mises à jour" + type: boolean + default: __f__false + - key: options.check_only + title: Verifier + description: | + L'import se fait en deux temps. + Une premiere étape de validation des données. + Et une deuxième étape d'insertion et de mise à jour des données. + type: boolean + default: __f__true + + - direction: row + items: + - type: button + color: success + title: Valider + description: Valider + action: import + disabled: __f__!(formGroup.valid ) + - type: button + color: primary + title: Annuler + description: Annuler + action: close diff --git a/frontend/app/components/layout/base/index.ts b/frontend/app/components/layout/base/index.ts new file mode 100644 index 00000000..3975cc54 --- /dev/null +++ b/frontend/app/components/layout/base/index.ts @@ -0,0 +1,25 @@ +import { ModulesLayoutComponent } from './layout.component'; +import { ModulesLayoutDebugComponent } from './layout-debug.component'; +import { ModulesLayoutModalComponent } from './layout-modal.component'; +import { ModulesLayoutSectionComponent } from './layout-section.component'; +import { ModulesLayoutCardComponent } from './layout-card.component'; +import { ModulesLayoutElementComponent } from './layout-element.component'; +import { ModulesLayoutMapComponent } from './layout-map.component'; +import { ModulesLayoutArrayComponent } from './layout-array.component'; +import { ModulesLayoutBreadcrumbsComponent } from './layout-breadcrumbs.component'; +import { ModulesLayoutItemsComponent } from './layout-items.component'; +import { ModulesLayoutMediasComponent } from './layout-medias.component'; + +export default [ + ModulesLayoutMediasComponent, + ModulesLayoutMapComponent, + ModulesLayoutDebugComponent, + ModulesLayoutBreadcrumbsComponent, + ModulesLayoutComponent, + ModulesLayoutModalComponent, + ModulesLayoutSectionComponent, + ModulesLayoutCardComponent, + ModulesLayoutElementComponent, + ModulesLayoutArrayComponent, + ModulesLayoutItemsComponent, +]; diff --git a/frontend/app/components/layout/base/layout-import.component.html b/frontend/app/components/layout/base/layout-import.component.html deleted file mode 100644 index a663b841..00000000 --- a/frontend/app/components/layout/base/layout-import.component.html +++ /dev/null @@ -1,6 +0,0 @@ - diff --git a/frontend/app/components/layout/base/layout-import.component.scss b/frontend/app/components/layout/base/layout-import.component.scss deleted file mode 100644 index e69de29b..00000000 diff --git a/frontend/app/components/layout/form/form-element.component.html b/frontend/app/components/layout/form/form-element.component.html index afc87916..64a332c4 100644 --- a/frontend/app/components/layout/form/form-element.component.html +++ b/frontend/app/components/layout/form/form-element.component.html @@ -1,7 +1,7 @@ - @@ -18,9 +18,10 @@ [attr.id]="_id + '_inputfile'" type="file" hidden + [disabled]="computedLayout.disabled" (change)="fileChange($event.target.files)" /> - {{ formControl.value?.name}} + {{ formControl.value?.name }} + {{ computedLayout.title || elementKey }} + + help + + diff --git a/frontend/app/components/layout/form/index.ts b/frontend/app/components/layout/form/index.ts new file mode 100644 index 00000000..7afba9c9 --- /dev/null +++ b/frontend/app/components/layout/form/index.ts @@ -0,0 +1,5 @@ +import { ModulesGenericFormComponent } from './generic-form.component'; +import { ModulesFormElementComponent } from './form-element.component'; +import { ModulesListFormComponent } from './list-form.component'; + +export default [ModulesListFormComponent, ModulesGenericFormComponent, ModulesFormElementComponent]; diff --git a/frontend/app/components/layout/import/index.ts b/frontend/app/components/layout/import/index.ts new file mode 100644 index 00000000..be142bdf --- /dev/null +++ b/frontend/app/components/layout/import/index.ts @@ -0,0 +1,3 @@ +import { ModulesLayoutImportComponent } from './layout-import.component'; + +export default [ModulesLayoutImportComponent]; diff --git a/frontend/app/components/layout/import/layout-import.component.html b/frontend/app/components/layout/import/layout-import.component.html new file mode 100644 index 00000000..465b02b5 --- /dev/null +++ b/frontend/app/components/layout/import/layout-import.component.html @@ -0,0 +1,69 @@ +

Importer des données

+ + + + + + + + + +
+ +

Veuillez choisir un fichier et appuyer sur Valider

+
+ + +
Chargement du fichier et verification des données terminé
+

- Nombre de lignes {{ importData.res.nb_raw }}

+

- Nombre d'insertion {{ importData.res.nb_insert }}

+

+ - Nombre de mise à jour {{ importData.res.nb_update }} + {{ + importData.options.enable_update + ? '' + : '(MAJ Non + autorisée)' + }} +

+

Veuillez appuyer sur valider pour insérer les données`,

+
+ + +
Import terminé
+

- Nombre de lignes {{ importData.res.nb_raw }}

+

- Nombre d'insertion {{ importData.res.nb_insert }}

+

+ - Nombre de mise à jour {{ importData.res.nb_update }} + {{ + importData.options.enable_update + ? '' + : '(MAJ Non + autorisée)' + }} +

+
+ + + diff --git a/frontend/app/components/layout/import/layout-import.component.scss b/frontend/app/components/layout/import/layout-import.component.scss new file mode 100644 index 00000000..67e68326 --- /dev/null +++ b/frontend/app/components/layout/import/layout-import.component.scss @@ -0,0 +1,10 @@ +.import-msg { + height: 200px; + overflow-y: scroll; +} +:host ::ng-deep .mat-step-header[aria-selected="true"] { + background-color: #c8adf0; + border-radius: 20px; + margin: 10px; + +} \ No newline at end of file diff --git a/frontend/app/components/layout/base/layout-import.component.ts b/frontend/app/components/layout/import/layout-import.component.ts similarity index 58% rename from frontend/app/components/layout/base/layout-import.component.ts rename to frontend/app/components/layout/import/layout-import.component.ts index 1ad01bc6..37461e05 100644 --- a/frontend/app/components/layout/base/layout-import.component.ts +++ b/frontend/app/components/layout/import/layout-import.component.ts @@ -1,23 +1,31 @@ -import { Component, OnInit, Injector, ViewEncapsulation } from '@angular/core'; -import { ModulesLayoutComponent } from './layout.component'; +import { Component, OnInit, Injector, ViewEncapsulation, ViewChild } from '@angular/core'; +import { ModulesLayoutComponent } from '../base/layout.component'; import { ModulesImportService } from '../../../services/import.service'; import { HttpEventType, HttpResponse, HttpErrorResponse } from '@angular/common/http'; +import { MatStepper } from '@angular/material/stepper'; import utils from '../../../utils'; @Component({ selector: 'modules-layout-import', templateUrl: 'layout-import.component.html', styleUrls: ['layout-import.component.scss', '../../base/base.scss'], - encapsulation: ViewEncapsulation.None, }) export class ModulesLayoutImportComponent extends ModulesLayoutComponent implements OnInit { - importData: {}; + importData: any = {}; importLayout: any; // layout pour l'import importContext: any; _mImport: ModulesImportService; + step = 0; + uploadPercentDone; + editableStep = false; + + importMsg: any = {}; + + @ViewChild('stepper') stepper: MatStepper; + constructor(_injector: Injector) { super(_injector); this._name = 'layout-import'; @@ -26,6 +34,10 @@ export class ModulesLayoutImportComponent extends ModulesLayoutComponent impleme } postComputeLayout() { + this.initImport(); + } + + initImport() { this.importContext = { module_code: this.context.module_code, object_code: this.context.object_code, @@ -33,14 +45,34 @@ export class ModulesLayoutImportComponent extends ModulesLayoutComponent impleme this.importLayout = { code: 'utils.import', }; + this.importMsg = this._mImport.processMessage(this.importData); + this.importData = {}; + this.setStep(); + } + + setStep() { + // this.editableStep = true; + this.step = this.importData.status == 'READY' ? 1 : this.importData.status == 'DONE' ? 2 : 0; + + if (this.stepper) { + this.editableStep = true; + setTimeout(() => { + const diff = this.step - this.stepper.selectedIndex; + for (let i = 0; i < Math.abs(diff); i++) { + if (diff > 0) { + this.stepper.next(); + } + if (diff < 0) { + this.stepper.previous(); + } + } + this.editableStep = false; + }); + } + this._mImport.processMessage(this.importData); } processImport(context, data) { - data.importMsg = { - html: 'Traitement en cours', - class: null, - }; - this._mLayout.reComputeLayout(); this._mImport .importRequest(context.module_code, context.object_code, data) .pipe() @@ -52,30 +84,7 @@ export class ModulesLayoutImportComponent extends ModulesLayoutComponent impleme this._mLayout.stopActionProcessing(''); const response = importEvent.body as any; this.importData = response; - // if (response.errors?.length) { - // for (let error of response.errors) { - // console.error(`${error.code} : ${error.msg}`); - // } - // data.importMsg = { - // class: 'error', - // html: this._mImport.importHTMLMsgError(response), - // }; - // for (const key of response) { - // console.log(key) - // data[key] = response[key] - // } - // return; - // } - - // let txtImport = this._mImport.importHTMLMsgSuccess(response); - - // data.importMsg = { - // class: 'success', - // html: txtImport, - // }; - - // setTimeout(() => this._mLayout.reComputeLayout(), 100); - // this._commonService.regularToaster('success', txtImport); + this.setStep(); } }); } @@ -85,5 +94,9 @@ export class ModulesLayoutImportComponent extends ModulesLayoutComponent impleme if (action == 'import') { return this.processImport(context, data); } + if (action == 'reset') { + this.initImport(); + this._mLayout.stopActionProcessing(''); + } } } diff --git a/frontend/app/components/layout/index.ts b/frontend/app/components/layout/index.ts index 9a736c20..a0667e85 100644 --- a/frontend/app/components/layout/index.ts +++ b/frontend/app/components/layout/index.ts @@ -1,41 +1,11 @@ -import { ModulesLayoutComponent } from './base/layout.component'; -import { ModulesLayoutDebugComponent } from './base/layout-debug.component'; -import { ModulesLayoutModalComponent } from './base/layout-modal.component'; -import { ModulesLayoutSectionComponent } from './base/layout-section.component'; -import { ModulesLayoutCardComponent } from './base/layout-card.component'; -import { ModulesLayoutElementComponent } from './base/layout-element.component'; -import { ModulesLayoutMapComponent } from './base/layout-map.component'; -import { ModulesLayoutArrayComponent } from './base/layout-array.component'; -import { ModulesLayoutBreadcrumbsComponent } from './base/layout-breadcrumbs.component'; -import { ModulesLayoutItemsComponent } from './base/layout-items.component'; -import { ModulesLayoutMediasComponent } from './base/layout-medias.component'; -import { ModulesLayoutObjectComponent } from './object/layout-object.component'; -import { ModulesLayoutObjectTableComponent } from './object/layout-object-table.component'; -import { ModulesLayoutObjectGeoJSONComponent } from './object/layout-object-geojson.component'; -import { ModulesLayoutObjectFiltersComponent } from './object/layout-object-filters.component'; -import { ModulesLayoutImportComponent } from './base/layout-import.component'; -import { ModulesGenericFormComponent } from './form/generic-form.component'; -import { ModulesFormElementComponent } from './form/form-element.component'; -import { ModulesListFormComponent } from './form/list-form.component'; +import ModuleLayoutsBase from './base'; +import ModuleLayoutsObject from './object'; +import ModuleLayoutsImport from './import'; +import ModuleLayoutsForm from './form'; export default [ - ModulesLayoutDebugComponent, - ModulesLayoutBreadcrumbsComponent, - ModulesLayoutComponent, - ModulesLayoutModalComponent, - ModulesLayoutSectionComponent, - ModulesLayoutCardComponent, - ModulesLayoutElementComponent, - ModulesLayoutArrayComponent, - ModulesLayoutItemsComponent, - ModulesLayoutImportComponent, - ModulesLayoutMediasComponent, - ModulesLayoutMapComponent, - ModulesLayoutObjectComponent, - ModulesLayoutObjectGeoJSONComponent, - ModulesLayoutObjectTableComponent, - ModulesLayoutObjectFiltersComponent, - ModulesListFormComponent, - ModulesGenericFormComponent, - ModulesFormElementComponent, + ...ModuleLayoutsBase, + ...ModuleLayoutsObject, + ...ModuleLayoutsImport, + ...ModuleLayoutsForm, ]; diff --git a/frontend/app/components/layout/object/index.ts b/frontend/app/components/layout/object/index.ts new file mode 100644 index 00000000..fd43d039 --- /dev/null +++ b/frontend/app/components/layout/object/index.ts @@ -0,0 +1,11 @@ +import { ModulesLayoutObjectComponent } from './layout-object.component'; +import { ModulesLayoutObjectTableComponent } from './layout-object-table.component'; +import { ModulesLayoutObjectGeoJSONComponent } from './layout-object-geojson.component'; +import { ModulesLayoutObjectFiltersComponent } from './layout-object-filters.component'; + +export default [ + ModulesLayoutObjectComponent, + ModulesLayoutObjectGeoJSONComponent, + ModulesLayoutObjectTableComponent, + ModulesLayoutObjectFiltersComponent, +]; diff --git a/frontend/app/services/import.service.ts b/frontend/app/services/import.service.ts index bdbe98f8..cd937e20 100644 --- a/frontend/app/services/import.service.ts +++ b/frontend/app/services/import.service.ts @@ -24,6 +24,43 @@ export class ModulesImportService { ); } + processMessage(data) { + if (!data.id_import) { + return { + html: ` +
Etape 1/3
+
Chargement et validation des données
+ Veuillez choisir un fichier et appuyer sur Valider`, + class: 'info', + }; + } + + if (data.status == 'READY') { + return { + html: ` +
Etape 1/3
+
Insertion des données
+

- Nombre de lignes ${data.res.nb_raw}

+

- Nombre d'insertion ${data.res.nb_insert}

+

- Nombre de mise à jour ${data.res.nb_update} ${ + data.options.enable_update ? '' : '(MAJ Non autorisée)' + }

+ Veuillez appuyer sur valider pour insérer les données`, + class: 'info', + }; + } + + if (data.status == 'DONE') { + return { + html: ` +
Etape 3/3
+
Import Terminé
+ `, + class: 'info', + }; + } + } + importHTMLMsgSuccess(impt) { let txtImport = `
Import réussi
`; let res = impt.res; From 321cc2e8809802b6f1ecbbd4746b54c0f45ea56b Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Fri, 17 Mar 2023 15:57:04 +0100 Subject: [PATCH 032/142] up frontend import --- backend/gn_modulator/imports/mixins/check.py | 6 +- .../tests/import_test/tests/pf_test_ok.csv | 2 +- config/layouts/utils/utils.import.layout.yml | 79 ++++++--- .../layouts/m_sipaf.site_list.layout.yml | 37 +---- frontend/app/components/base/base.scss | 2 +- .../layout/base/layout-map.component.ts | 3 - .../layout/base/layout-modal.component.html | 11 +- .../layout/base/layout-modal.component.scss | 7 +- .../layout/base/layout-modal.component.ts | 3 - .../layout/form/form-element.component.html | 1 - .../layout/form/generic-form.component.ts | 5 +- .../import/layout-import.component.html | 15 +- .../import/layout-import.component.scss | 4 + .../layout/import/layout-import.component.ts | 16 +- .../object/layout-object-table.component.ts | 2 - .../layout/object/layout-object.component.ts | 10 ++ frontend/app/services/import.service.ts | 156 +++++++++++------- frontend/app/services/object.service.ts | 8 + 18 files changed, 217 insertions(+), 150 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index ab773544..35da710e 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -97,14 +97,14 @@ def check_required(self): res = SchemaMethods.c_sql_exec_txt(txt_check_required).fetchone() nb_lines = res[0] lines = res[1] - str_lines = lines and ", ".join(map(lambda x: str(x), lines)) or "" + # str_lines = lines and ", ".join(map(lambda x: str(x), lines)) or "" if nb_lines == 0: continue self.add_error( code="ERR_IMPORT_REQUIRED", key=key, lines=lines, - msg=f"La colonne {key} est obligatoire. {nb_lines} ligne(s) concernée(s) : [{str_lines}]", + msg="Champs obligatoire à null", ) return @@ -149,6 +149,6 @@ def check_resolve_keys(self): code="ERR_IMPORT_UNRESOLVED", key=key, lines=lines, - msg=f"La colonne {key} est non nulle et n'a pas de correspondance. {nb_lines} ligne(s) concernée(s) : [{str_lines}]", + msg=f"Clé étrangère non résolue", values=values, ) diff --git a/backend/gn_modulator/tests/import_test/tests/pf_test_ok.csv b/backend/gn_modulator/tests/import_test/tests/pf_test_ok.csv index 816973e6..8ce42dfb 100644 --- a/backend/gn_modulator/tests/import_test/tests/pf_test_ok.csv +++ b/backend/gn_modulator/tests/import_test/tests/pf_test_ok.csv @@ -1,4 +1,4 @@ code_passage_faune, geom, id_nomenclature_ouvrage_hydrau_position _TEST01, POINT (43.676265 4.028108),RD _TEST02, POINT (43.676262 4.028108), -_TEST023, POINT (43.676262 4.028108),RG \ No newline at end of file +_TEST02345, POINT (43.676262 4.028108),RG \ No newline at end of file diff --git a/config/layouts/utils/utils.import.layout.yml b/config/layouts/utils/utils.import.layout.yml index 8543508d..9b6a1706 100644 --- a/config/layouts/utils/utils.import.layout.yml +++ b/config/layouts/utils/utils.import.layout.yml @@ -3,39 +3,68 @@ code: utils.import title: layout du composant d'import description: layout du composant d'import layout: - flex: '0' type: form + form_group_id: form_import items: - - flex: '0' + - display: tabs items: - - title: import - display: fieldset + - label: Import items: - - key: data_file - type: file + - hidden: true + items: + - key: id_import + - key: importMsg + - key: errorMsgType + - key: errorMsgLine + - key: errors + - key: status + - type: message + html: __f__data.importMsg.html + class: __f__data.importMsg.class + - display: fieldset title: Fichier d'import - required: __f__!data.tables?.data - description: Choisir un fichier à importer - disabled: __f__data.id_import - - title: Options additionnelles - display: fieldset - type: dict + items: + - key: data_file + type: file + title: '__f__data.data_file ? data.data_file.name : "Choisir un fichier"' + required: __f__!data.tables?.data + description: Choisir un fichier à importer + disabled: __f__data.id_import + - display: fieldset + title: Options + items: + - key: options.enable_update + title: "Autoriser les mises à jour" + type: boolean + default: __f__false + - key: options.check_only + title: Verifier + description: | + L'import se fait en deux temps. + Une premiere étape de validation des données. + Et une deuxième étape d'insertion et de mise à jour des données. + type: boolean + default: __f__true + - label: __f__`Erreurs (${data.errors?.length})` + hidden: __f__!data.errors?.length items: - - key: options.enable_update - title: "Autoriser les mises à jour" + - key: error_display_line + title: Afficher les erreurs en ligne type: boolean - default: __f__false - - key: options.check_only - title: Verifier - description: | - L'import se fait en deux temps. - Une premiere étape de validation des données. - Et une deuxième étape d'insertion et de mise à jour des données. - type: boolean - default: __f__true + - style: + max-height: 400px + overflow-y: scroll + items: + - type: message + html: __f__data.errorMsgType + class: error + hidden: __f__!!data.error_display_line + - type: message + html: __f__data.errorMsgLine + class: error + hidden: __f__!data.error_display_line - direction: row - flex: '0' items: - type: button color: success @@ -50,7 +79,7 @@ layout: title: nouvel import description: Faire un nouvel import action: reset - hidden: __f__!data.status + hidden: __f__!data.id_import - type: button color: primary diff --git a/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml b/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml index 4912b796..423aeb14 100644 --- a/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml +++ b/config/modules/contrib/m_sipaf/layouts/m_sipaf.site_list.layout.yml @@ -46,41 +46,10 @@ layout: hidden: __f__!o.is_action_allowed(x, 'C') - type: modal modal_name: import + # style: + # width: 1200px items: - title: Importer des passage à faune - type: form - items: - - items: - - type: message - html: __f__data.importMsg?.html - class: __f__data.importMsg?.class - hidden: __f__!data.importMsg - - - key: importMsg - hidden: true - - key: data_file - type: file - title: Fichier d'import - required: true - description: Choisir un fichier à importer - - key: object_code - type: string - hidden: true - default: __f__context.object_code - - - direction: row - items: - - type: button - color: primary - title: Valider - description: Valider - action: import - disabled: __f__!(formGroup.valid ) - - type: button - color: primary - title: Annuler - description: Annuler - action: close + type: import flex: "0" - type: object diff --git a/frontend/app/components/base/base.scss b/frontend/app/components/base/base.scss index 50ed8af0..25c0153c 100644 --- a/frontend/app/components/base/base.scss +++ b/frontend/app/components/base/base.scss @@ -170,7 +170,7 @@ div.layout-items > div { background-color: lightgrey; color: grey; border-radius: 15px; - border: solid 1px; + // border: solid 1px; display: inline-block; } diff --git a/frontend/app/components/layout/base/layout-map.component.ts b/frontend/app/components/layout/base/layout-map.component.ts index afd262ba..f40594ac 100644 --- a/frontend/app/components/layout/base/layout-map.component.ts +++ b/frontend/app/components/layout/base/layout-map.component.ts @@ -28,9 +28,6 @@ export class ModulesLayoutMapComponent extends ModulesLayoutComponent implements this.bPostComputeLayout = true; } - /** initialisaiton de la carte */ - postInit(): void {} - /** * action quand un modal (gps, gpx etc... est validé) */ diff --git a/frontend/app/components/layout/base/layout-modal.component.html b/frontend/app/components/layout/base/layout-modal.component.html index 21b9d837..44ed3b39 100644 --- a/frontend/app/components/layout/base/layout-modal.component.html +++ b/frontend/app/components/layout/base/layout-modal.component.html @@ -1,8 +1,13 @@ diff --git a/frontend/app/components/layout/base/layout.component.ts b/frontend/app/components/layout/base/layout.component.ts index 56af53ba..082eb72b 100644 --- a/frontend/app/components/layout/base/layout.component.ts +++ b/frontend/app/components/layout/base/layout.component.ts @@ -225,7 +225,7 @@ export class ModulesLayoutComponent implements OnInit { 'hidden_options', ]) { if (this.parentContext[key] != null || layout[key] != null) { - this.context[key] = this.parentContext[key] || layout[key]; + this.context[key] = layout[key] == null ? this.parentContext[key] : layout[key]; } } diff --git a/frontend/app/components/layout/form/form-element.component.html b/frontend/app/components/layout/form/form-element.component.html index f4ce5b58..1ea5a73b 100644 --- a/frontend/app/components/layout/form/form-element.component.html +++ b/frontend/app/components/layout/form/form-element.component.html @@ -44,13 +44,12 @@ [placeholder]="computedLayout.placeholder" [type]="computedLayout.type" [formControl]="formControl" - [required]="computedLayout.required && !context.skip_required" [min]="computedLayout.min" [max]="computedLayout.max" (change)="onInputChange()" [errorStateMatcher]="_mForm.matcher" /> - + - - - {{ computedLayout.hint }} diff --git a/frontend/app/components/layout/form/generic-form.component.ts b/frontend/app/components/layout/form/generic-form.component.ts index d4cbdf94..381ef17a 100644 --- a/frontend/app/components/layout/form/generic-form.component.ts +++ b/frontend/app/components/layout/form/generic-form.component.ts @@ -85,6 +85,7 @@ export class ModulesGenericFormComponent extends ModulesLayoutComponent implemen this.context.appearance = this.layout.appearance; this.context.skip_required = this.layout.skip_required; this._formService.setControls({ context: this.context, layout: this.layout, data: this.data }); + this._formService.updateData(this.data, this.formGroup.value); this.formGroup.valueChanges.subscribe((value) => { this.onFormGroupChange(); }); diff --git a/frontend/app/components/page.component.ts b/frontend/app/components/page.component.ts index 27e3f7f9..a7867246 100644 --- a/frontend/app/components/page.component.ts +++ b/frontend/app/components/page.component.ts @@ -4,6 +4,7 @@ import { ModulesDataService } from '../services/data.service'; import { ModulesLayoutService } from '../services/layout.service'; import { ModulesContextService } from '../services/context.service'; import { ModulesActionService } from '../services/action.service'; +import { ModulesNomenclatureService } from '../services/nomenclature.service'; import { ModuleService } from '@geonature/services/module.service'; import { ActivatedRoute } from '@angular/router'; import { mergeMap } from '@librairies/rxjs/operators'; @@ -23,7 +24,7 @@ export class PageComponent implements OnInit { _mAction: ModulesActionService; _mContext: ModulesContextService; _gnModuleService: ModuleService; - + _mNomenclature: ModulesNomenclatureService; debug = false; // pour activer le mode debug (depuis les queryParams) routeParams; // paramètre d'url @@ -51,6 +52,7 @@ export class PageComponent implements OnInit { this._mLayout = this._injector.get(ModulesLayoutService); this._mContext = this._injector.get(ModulesContextService); this._mAction = this._injector.get(ModulesActionService); + this._mNomenclature = this._injector.get(ModulesNomenclatureService); this._gnModuleService = this._injector.get(ModuleService); } @@ -69,6 +71,11 @@ export class PageComponent implements OnInit { this._sub = this._mConfig .init() .pipe( + mergeMap(() => { + // processRigths + + return this._mNomenclature.init(); + }), mergeMap(() => { // processRigths diff --git a/frontend/app/components/test/test-layout.component.ts b/frontend/app/components/test/test-layout.component.ts index c13bc305..50fadf06 100644 --- a/frontend/app/components/test/test-layout.component.ts +++ b/frontend/app/components/test/test-layout.component.ts @@ -69,7 +69,7 @@ export class TestLayoutComponent implements OnInit { layout_definition: '' }); formGroup.patchValue({ - layout_definition: x.utils.YML.dump(event.layout_from_list) + layout_definition: x.u.YML.dump(event.layout_from_list) }); } } diff --git a/frontend/app/services/form.service.ts b/frontend/app/services/form.service.ts index f714fc30..a9eaf7d4 100644 --- a/frontend/app/services/form.service.ts +++ b/frontend/app/services/form.service.ts @@ -225,13 +225,14 @@ export class ModulesFormService { // control pour array if (layout.type == 'array') { let controlData = utils.getAttr(data || {}, [...context.data_keys, computedLayout.key]) || []; - if (controlData.length == control.value.length) { - return; + if (controlData.length != control.value.length) { + control.clear(); + for (let [index, elem] of Object.entries(controlData)) { + let elemControl = this.createFormGroup(layout.items, context); + control.push(elemControl); + } } - control.clear(); for (let [index, elem] of Object.entries(controlData)) { - let elemControl = this.createFormGroup(layout.items, context); - control.push(elemControl); const arrayItemContext = { ...context, data_keys: utils.addKey(utils.copy(context.data_keys), `${layout.key}.${index}`), diff --git a/frontend/app/services/index.ts b/frontend/app/services/index.ts index 3d3ef926..12971aca 100644 --- a/frontend/app/services/index.ts +++ b/frontend/app/services/index.ts @@ -12,6 +12,7 @@ import { ModulesTableService } from './table.service'; import { ModulesObjectService } from './object.service'; import { ModulesSchemaService } from './schema.service'; import { ListFormService } from './list-form.service'; +import { ModulesNomenclatureService } from './nomenclature.service'; export default [ ModulesActionService, @@ -28,4 +29,5 @@ export default [ ModulesTableService, ModulesObjectService, ModulesSchemaService, + ModulesNomenclatureService, ]; diff --git a/frontend/app/services/layout.service.ts b/frontend/app/services/layout.service.ts index 2e137ca3..dcf64fe0 100644 --- a/frontend/app/services/layout.service.ts +++ b/frontend/app/services/layout.service.ts @@ -4,12 +4,14 @@ import { Subject } from '@librairies/rxjs'; import { ModulesConfigService } from '../services/config.service'; import { ModulesRequestService } from '../services/request.service'; import { ModulesObjectService } from './object.service'; +import { ModulesNomenclatureService } from './nomenclature.service'; @Injectable() export class ModulesLayoutService { _mConfig: ModulesConfigService; _mRequest: ModulesRequestService; _mObject: ModulesObjectService; + _mNomenclature: ModulesNomenclatureService; _utils: any; _utilsObject: any; @@ -20,6 +22,7 @@ export class ModulesLayoutService { constructor(private _injector: Injector) { this._mConfig = this._injector.get(ModulesConfigService); this._mObject = this._injector.get(ModulesObjectService); + this._mNomenclature = this._injector.get(ModulesNomenclatureService); } $reComputeLayout = new Subject(); @@ -57,6 +60,7 @@ export class ModulesLayoutService { today: utils.today, // renvoie la date du jour (defaut) departementsForRegion: utils.departementsForRegion, // liste des dept pour une region YML: utils.YML, + get_cd_nomenclature: this._mNomenclature.get_cd_nomenclature.bind(this._mNomenclature), }; this._utilsObject = this._mObject.utilsObject(); @@ -255,7 +259,7 @@ export class ModulesLayoutService { } strFunction = `{ - const {layout, data, globalData, utils, context, formGroup, o} = x; + const {layout, data, globalData, u, context, formGroup, o} = x; ${strFunction.substr(1)} `; @@ -270,17 +274,18 @@ export class ModulesLayoutService { if (typeof element == 'function') { const globalData = data; - const localData = utils.getAttr(globalData, context.keys); + const localData = utils.getAttr(globalData, context.data_keys); const formGroup = context.form_group_id && this._formControls[context.form_group_id]; const val = element({ layout, data: localData, globalData, - utils: this._utils, + u: this._utils, o: this._utilsObject, context, formGroup, }); + return val !== undefined ? val : null; // on veut eviter le undefined } diff --git a/frontend/app/services/nomenclature.service.ts b/frontend/app/services/nomenclature.service.ts new file mode 100644 index 00000000..2e989de1 --- /dev/null +++ b/frontend/app/services/nomenclature.service.ts @@ -0,0 +1,40 @@ +import { Injectable, Injector } from '@angular/core'; +import { ModulesDataService } from './data.service'; +import { mergeMap, map, filter, switchMap } from 'rxjs/operators'; +import { of } from 'rxjs'; + +@Injectable() +export class ModulesNomenclatureService { + _nomenclatures: any[] = []; + + _mData: ModulesDataService; + + constructor(private _injector: Injector) { + this._mData = this._injector.get(ModulesDataService); + this.init().subscribe(() => {}); + } + + init() { + if (this._nomenclatures.length) { + return of(true); + } + return this._mData + .dataRequest('get', 'MODULATOR', 'ref_nom.nomenclature', { + params: { fields: ['id_nomenclature', 'cd_nomenclature'] }, + }) + .pipe( + mergeMap((res) => { + this._nomenclatures = res.data; + return of(true); + }) + ); + } + + get_cd_nomenclature(id_nomenclature) { + if (!(this._nomenclatures.length && !!id_nomenclature)) { + return; + } + let nomenclature = this._nomenclatures.find((n) => n.id_nomenclature == id_nomenclature); + return nomenclature?.cd_nomenclature; + } +} diff --git a/frontend/app/utils/commons.ts b/frontend/app/utils/commons.ts index 21dd18bb..615065a9 100644 --- a/frontend/app/utils/commons.ts +++ b/frontend/app/utils/commons.ts @@ -37,6 +37,10 @@ const addKey = (keys, key) => { }; const getAttr = (obj, paths, index = 0) => { + if (paths == null && index == 0) { + console.log('?????'); + console.trace(); + } if (paths == null) { return obj; } From 256f5de6957611ad922ee86be5170a6c80a0c649 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 19 Apr 2023 15:43:19 +0200 Subject: [PATCH 077/142] pf diag clotures --- .../tests/test_form_constraint.layout.yml | 21 + config/references/data.reference.yml | 4 +- ....site_template.module-tempate_defaults.yml | 6 +- .../migrations/data/schema_diagnostic.sql | 299 +++++++++--- .../90f6e5531f7c_diagnostic_m_sipaf.py | 13 +- contrib/m_sipaf/backend/m_sipaf/models.py | 129 ++++- contrib/m_sipaf/config/config.yml | 46 +- .../definitions/m_sipaf.diag.schema.yml | 81 +++- .../m_sipaf.diag_cloture.schema.yml | 29 ++ .../config/features/m_sipaf.utils.data.yml | 446 +++++------------- .../m_sipaf.diagnostic_edit.layout.yml | 92 +++- 11 files changed, 725 insertions(+), 441 deletions(-) create mode 100644 config/layouts/tests/test_form_constraint.layout.yml create mode 100644 contrib/m_sipaf/config/definitions/m_sipaf.diag_cloture.schema.yml diff --git a/config/layouts/tests/test_form_constraint.layout.yml b/config/layouts/tests/test_form_constraint.layout.yml new file mode 100644 index 00000000..f65d8cb9 --- /dev/null +++ b/config/layouts/tests/test_form_constraint.layout.yml @@ -0,0 +1,21 @@ +type: layout +code: test_form_constraint +title: Layout formulaire et contraintes +description: Layout de test pour l'affichage + +data: + +layout: + title: Test Form and display + items: + - direction: row + items: + - title: Formulaire + type: form + items: + - key: a + type: string + - key: b + type: string + required: __f__data.a==1 + disabled: __f__data.a!=1 diff --git a/config/references/data.reference.yml b/config/references/data.reference.yml index 7fd1112b..7cb3b080 100644 --- a/config/references/data.reference.yml +++ b/config/references/data.reference.yml @@ -33,7 +33,9 @@ properties: items: type: array items: - type: object + OneOf: + - type: object, + - type: array required: - items - schema_code diff --git a/contrib/m_monitoring/config/m_monitoring.site_template.module-tempate_defaults.yml b/contrib/m_monitoring/config/m_monitoring.site_template.module-tempate_defaults.yml index 7aaa3bdb..a146b8f6 100644 --- a/contrib/m_monitoring/config/m_monitoring.site_template.module-tempate_defaults.yml +++ b/contrib/m_monitoring/config/m_monitoring.site_template.module-tempate_defaults.yml @@ -41,7 +41,7 @@ site_filters_fields: sort: code_name filters: | __f__data?.region - ? `area_code in ${utils.departementsForRegion(data.region.area_code).join(';')}` + ? `area_code in ${u.departementsForRegion(data.region.area_code).join(';')}` : null - key: commune @@ -54,7 +54,7 @@ site_filters_fields: __f__data?.departement ? `area_code like ${data.departement.area_code}%` : data?.region - ? utils.departementsForRegion(data.region.area_code) + ? u.departementsForRegion(data.region.area_code) .map(departementCode => `area_code like ${departementCode}%`) .join(',|,') : null @@ -166,7 +166,7 @@ visit_form_fields: default: __f__context.current_user?.id_role hidden: true - key: date_min - default: __f__utils.today() + default: u.today() - date_max - key: id_module object_code: module diff --git a/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql b/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql index 432fff07..a6083c0a 100644 --- a/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql +++ b/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql @@ -1,56 +1,247 @@ -- schema diagnostic +-- table diagnostic +CREATE TABLE pr_sipaf.t_diagnostics ( + id_diagnostic SERIAL NOT NULL, + id_passage_faune INTEGER NOT NULL, + id_role INTEGER, + id_organisme INTEGER, + date_diagnostic DATE NOT NULL, + commentaire_diagnostic VARCHAR, + commentaire_perturbation_obstacle VARCHAR, + obstacle_autre VARCHAR, + perturbation_autre VARCHAR, + id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq INTEGER, + amenagement_biodiv_autre VARCHAR +); - -- table diagnostic - - CREATE TABLE pr_sipaf.t_diagnostics ( - id_diagnostic SERIAL NOT NULL, - id_passage_faune INTEGER NOT NULL, - id_role INTEGER, - id_organisme INTEGER, - date_diagnostic DATE NOT NULL, - comment VARCHAR, - obstacle_autre VARCHAR - ); - - ALTER TABLE pr_sipaf.t_diagnostics - ADD CONSTRAINT pk_sipaf_t_diagnostic_id_diagnostic PRIMARY KEY (id_diagnostic); - - ALTER TABLE pr_sipaf.t_diagnostics - ADD CONSTRAINT fk_sipaf_t_diag_t_pf_id_passage_faune FOREIGN KEY (id_passage_faune) - REFERENCES pr_sipaf.t_passages_faune(id_passage_faune) - ON UPDATE CASCADE ON DELETE CASCADE; - - ALTER TABLE pr_sipaf.t_diagnostics - ADD CONSTRAINT fk_sipaf_t_diag_t_rol_id_role FOREIGN KEY (id_role) - REFERENCES utilisateurs.t_roles(id_role) - ON UPDATE CASCADE ON DELETE SET NULL; - - ALTER TABLE pr_sipaf.t_diagnostics - ADD CONSTRAINT fk_sipaf_t_diag_b_org_id_organisme FOREIGN KEY (id_organisme) - REFERENCES utilisateurs.bib_organismes(id_organisme) - ON UPDATE CASCADE ON DELETE SET NULL; - - - -- cor diag nomenclature obstacle - CREATE TABLE IF NOT EXISTS pr_sipaf.cor_diag_nomenclature_obstacle ( - id_diagnostic INTEGER NOT NULL NOT NULL, - id_nomenclature INTEGER NOT NULL NOT NULL - ); - - ALTER TABLE pr_sipaf.cor_diag_nomenclature_obstacle - ADD CONSTRAINT pk_pr_sipaf_cor_diag_nomenclature_obstacle_id_diagnostic_id_nomenclature PRIMARY KEY (id_diagnostic, id_nomenclature); - - ALTER TABLE pr_sipaf.cor_diag_nomenclature_obstacle - ADD CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_obstacle_id_diagnostic FOREIGN KEY (id_diagnostic) - REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) - ON UPDATE CASCADE ON DELETE CASCADE; - - ALTER TABLE pr_sipaf.cor_diag_nomenclature_obstacle - ADD CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_obstacle_id_nomenclature FOREIGN KEY (id_nomenclature) - REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) - ON UPDATE CASCADE ON DELETE CASCADE; - - ALTER TABLE pr_sipaf.cor_diag_nomenclature_obstacle - ADD CONSTRAINT check_nom_type_pr_sipaf_cor_diag_nomenclature_obstacle_id_ure_pf_ype - CHECK (ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature,'PF_DIAG_OBSTACLE')) - NOT VALID; +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT pk_sipaf_t_diagnostic_id_diagnostic PRIMARY KEY (id_diagnostic); + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT fk_sipaf_t_diag_t_pf_id_passage_faune FOREIGN KEY (id_passage_faune) REFERENCES pr_sipaf.t_passages_faune(id_passage_faune) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT fk_sipaf_t_diag_t_rol_id_role FOREIGN KEY (id_role) REFERENCES utilisateurs.t_roles(id_role) ON UPDATE CASCADE ON DELETE +SET + NULL; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT fk_sipaf_t_diag_b_org_id_organisme FOREIGN KEY (id_organisme) REFERENCES utilisateurs.bib_organismes(id_organisme) ON UPDATE CASCADE ON DELETE +SET + NULL; + +-- cor diag nomenclature obstacle +CREATE TABLE IF NOT EXISTS pr_sipaf.cor_diag_nomenclature_obstacle ( + id_diagnostic INTEGER NOT NULL NOT NULL, + id_nomenclature INTEGER NOT NULL NOT NULL +); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_obstacle +ADD + CONSTRAINT pk_pr_sipaf_cor_diag_nomenclature_obstacle_id_diagnostic_id_nomenclature PRIMARY KEY (id_diagnostic, id_nomenclature); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_obstacle +ADD + CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_obstacle_id_diagnostic FOREIGN KEY (id_diagnostic) REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_obstacle +ADD + CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_obstacle_id_nomenclature FOREIGN KEY (id_nomenclature) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +-- cor diag nomenclature perturbation +CREATE TABLE IF NOT EXISTS pr_sipaf.cor_diag_nomenclature_perturbation ( + id_diagnostic INTEGER NOT NULL NOT NULL, + id_nomenclature INTEGER NOT NULL NOT NULL +); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_perturbation +ADD + CONSTRAINT pk_pr_sipaf_cor_diag_nomenclature_perturbation_id_diagnostic_id_nomenclature PRIMARY KEY (id_diagnostic, id_nomenclature); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_perturbation +ADD + CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_perturbation_id_diagnostic FOREIGN KEY (id_diagnostic) REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_perturbation +ADD + CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_perturbation_id_nomenclature FOREIGN KEY (id_nomenclature) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq FOREIGN KEY ( + id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq + ) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +-- cor diag nomenclature ouvrage_hydrau_etat_berge +CREATE TABLE IF NOT EXISTS pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_etat_berge ( + id_diagnostic INTEGER NOT NULL NOT NULL, + id_nomenclature INTEGER NOT NULL NOT NULL +); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_etat_berge +ADD + CONSTRAINT pk_ouvrage_hydray_etat_berge_id_diagnostic_id_nomenclature PRIMARY KEY (id_diagnostic, id_nomenclature); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_etat_berge +ADD + CONSTRAINT fk_ouvrage_hydray_etat_berge_id_diagnostic FOREIGN KEY (id_diagnostic) REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_etat_berge +ADD + CONSTRAINT fk_ouvrage_hydray_etat_berge_id_nomenclature FOREIGN KEY (id_nomenclature) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +-- cor diag nomenclature ouvrage_hydreau_dimensionnement +CREATE TABLE IF NOT EXISTS pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_dim ( + id_diagnostic INTEGER NOT NULL NOT NULL, + id_nomenclature INTEGER NOT NULL NOT NULL +); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_dim +ADD + CONSTRAINT pk_pr_sipaf_cor_diag_nomenclature_ouvrage_hydrau_dim_id_diagnostic_id_nomenclature PRIMARY KEY (id_diagnostic, id_nomenclature); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_dim +ADD + CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_ouvrage_hydrau_dim_id_diagnostic FOREIGN KEY (id_diagnostic) REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_dim +ADD + CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_ouvrage_hydrau_dim_id_nomenclature FOREIGN KEY (id_nomenclature) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +-- cor diag nomenclature amenagement_biodiv +CREATE TABLE IF NOT EXISTS pr_sipaf.cor_diag_nomenclature_amenagement_biodiv ( + id_diagnostic INTEGER NOT NULL NOT NULL, + id_nomenclature INTEGER NOT NULL NOT NULL +); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_amenagement_biodiv +ADD + CONSTRAINT pk_pr_sipaf_cor_diag_nomenclature_amenagement_biodiv_id_diagnostic_id_nomenclature PRIMARY KEY (id_diagnostic, id_nomenclature); + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_amenagement_biodiv +ADD + CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_amenagement_biodiv_id_diagnostic FOREIGN KEY (id_diagnostic) REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_amenagement_biodiv +ADD + CONSTRAINT fk_pr_sipaf_cor_diag_nomenclature_amenagement_biodiv_id_nomenclature FOREIGN KEY (id_nomenclature) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +-- clôture guidage +CREATE TABLE IF NOT EXISTS pr_sipaf.t_diagnostic_clotures ( + id_diagnostic INTEGER NOT NULL NOT NULL, + id_nomenclature_clotures_guidage_type INTEGER NOT NULL NOT NULL, + id_nomenclature_clotures_guidage_etat INTEGER NOT NULL NOT NULL, + clotures_guidage_type_autre VARCHAR, + clotures_guidage_etat_autre VARCHAR +); + +ALTER TABLE + pr_sipaf.t_diagnostic_clotures +ADD + CONSTRAINT fk_pr_sipaf_t_diagnostic_clotures_id_diagnostic FOREIGN KEY (id_diagnostic) REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostic_clotures +ADD + CONSTRAINT fk_pr_sipaf_t_d_c_g_id_nomenclature_clotures_guidage_type FOREIGN KEY (id_nomenclature_clotures_guidage_type) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostic_clotures +ADD + CONSTRAINT fk_pr_sipaf_t_d_c_g_id_nomenclature_clotures_guidage_etat FOREIGN KEY (id_nomenclature_clotures_guidage_etat) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +-- check constraint nomenclature type +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT check_nom_type_diag_ouvr_hydrau_racc_banq_id_ure_pf_ype CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq, + 'PF_DIAG_OUVRAGE_HYDRAULIQUE_RACCORDEMENT_BANQUETTE' + ) + ) NOT VALID; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_obstacle +ADD + CONSTRAINT check_nom_type_pr_sipaf_cor_diag_nomenclature_obstacle_id_ure_pf_ype CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature, 'PF_DIAG_OBSTACLE') + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostic_clotures +ADD + CONSTRAINT check_nom_type_diag_clot_gui_type CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature_clotures_guidage_type, + 'PF_DIAG_CLOTURES_GUIDAGE_TYPE' + ) + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostic_clotures +ADD + CONSTRAINT check_nom_type_diag_clot_gui_etat CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature_clotures_guidage_etat, + 'PF_DIAG_CLOTURES_GUIDAGE_ETAT' + ) + ) NOT VALID; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_amenagement_biodiv +ADD + CONSTRAINT check_nom_type_pr_sipaf_cor_diag_nomenclature_amenagement_biodiv_id_ure_pf_ype CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature, 'PF_DIAG_AMENAGEMENT_BIODIV') + ) NOT VALID; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_dim +ADD + CONSTRAINT check_nom_type_pr_sipaf_cor_diag_nomenclature_ouvrage_hydrau_dim_id_ure_pf_ype CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature, + 'PF_DIAG_OUVRAGE_HYDRAU_DIMENSIONNEMENT' + ) + ) NOT VALID; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_etat_berge +ADD + CONSTRAINT check_nom_type_ouvrage_hydrau_etat_berge_id_ure_pf_ype CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature, + 'PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE' + ) + ) NOT VALID; + +ALTER TABLE + pr_sipaf.cor_diag_nomenclature_perturbation +ADD + CONSTRAINT check_nom_type_pr_sipaf_cor_diag_nomenclature_perturbation_id_ure_pf_ype CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature, 'PF_DIAG_PERTURBATION') + ) NOT VALID; \ No newline at end of file diff --git a/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py b/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py index a127dfba..485defac 100644 --- a/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py +++ b/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py @@ -28,10 +28,17 @@ def upgrade(): def downgrade(): + if_exists = "" + # if_exists = "IF EXISTS" op.execute( - """ - DROP TABLE IF EXISTS pr_sipaf.cor_diag_nomenclature_obstacle; - DROP TABLE IF EXISTS pr_sipaf.t_diagnostics; + f""" + DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_obstacle; + DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_perturbation; + DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_etat_berge; + DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_dim; + DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_amenagement_biodiv; + DROP TABLE {if_exists} pr_sipaf.t_diagnostic_clotures; + DROP TABLE {if_exists} pr_sipaf.t_diagnostics; """ ) pass diff --git a/contrib/m_sipaf/backend/m_sipaf/models.py b/contrib/m_sipaf/backend/m_sipaf/models.py index a3c17465..6270d3db 100644 --- a/contrib/m_sipaf/backend/m_sipaf/models.py +++ b/contrib/m_sipaf/backend/m_sipaf/models.py @@ -274,11 +274,70 @@ class CorDiagObstacle(db.Model): ) +class CorDiagPerturbation(db.Model): + __tablename__ = "cor_diag_nomenclature_perturbation" + __table_args__ = {"schema": "pr_sipaf"} + + id_diagnostic = db.Column( + db.Integer, db.ForeignKey("pr_sipaf.t_diagnostics.id_diagnostic"), primary_key=True + ) + id_nomenclature = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + primary_key=True, + ) + + +class CorDiagOuvrageHydrauEtatBerge(db.Model): + __tablename__ = "cor_diag_nomenclature_ouvrage_hydrau_etat_berge" + __table_args__ = {"schema": "pr_sipaf"} + + id_diagnostic = db.Column( + db.Integer, db.ForeignKey("pr_sipaf.t_diagnostics.id_diagnostic"), primary_key=True + ) + id_nomenclature = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + primary_key=True, + ) + + +class CorDiagOuvrageHydrauDim(db.Model): + __tablename__ = "cor_diag_nomenclature_ouvrage_hydrau_dim" + __table_args__ = {"schema": "pr_sipaf"} + + id_diagnostic = db.Column( + db.Integer, db.ForeignKey("pr_sipaf.t_diagnostics.id_diagnostic"), primary_key=True + ) + id_nomenclature = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + primary_key=True, + ) + + +class CorDiagAmenagementBiodiv(db.Model): + __tablename__ = "cor_diag_nomenclature_amenagement_biodiv" + __table_args__ = {"schema": "pr_sipaf"} + + id_diagnostic = db.Column( + db.Integer, db.ForeignKey("pr_sipaf.t_diagnostics.id_diagnostic"), primary_key=True + ) + id_nomenclature = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + primary_key=True, + ) + + class Diagnostic(db.Model): __tablename__ = "t_diagnostics" __table_args__ = {"schema": "pr_sipaf"} + # communs id_diagnostic = db.Column(db.Integer, primary_key=True) + date_diagnostic = db.Column(db.Date, nullable=False) + commentaire_diagnostic = db.Column(db.Unicode) id_passage_faune = db.Column( db.Integer, @@ -289,20 +348,80 @@ class Diagnostic(db.Model): ), nullable=False, ) - passage_faune = db.relationship(PassageFaune, backref="diagnostics") - date_diagnostic = db.Column(db.Date, nullable=False) - id_role = db.Column(db.Integer, db.ForeignKey("utilisateurs.t_roles.id_role")) role = db.relationship(User) id_organisme = db.Column(db.Integer, db.ForeignKey("utilisateurs.bib_organismes.id_organisme")) organisme = db.relationship(Organisme) - comment = db.Column(db.Unicode) + # perturbation / obstacle + + commentaire_perturbation_obstacle = db.Column(db.Unicode) + obstacle_autre = db.Column(db.Unicode) + perturbation_autre = db.Column(db.Unicode) + + id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq = db.Column( + db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature") + ) + nomenclature_diagnostic_ouvrage_hydrau_raccordement_banquette = db.relationship( + TNomenclatures, + foreign_keys=[id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq], + ) nomenclatures_diagnostic_obstacle = db.relationship( TNomenclatures, secondary=CorDiagObstacle.__table__ ) - obstacle_autre = db.Column(db.Unicode) + + nomenclatures_diagnostic_perturbation = db.relationship( + TNomenclatures, secondary=CorDiagPerturbation.__table__ + ) + + nomenclatures_diagnostic_ouvrage_hydrau_etat_berge = db.relationship( + TNomenclatures, secondary=CorDiagOuvrageHydrauEtatBerge.__table__ + ) + + nomenclatures_diagnostic_ouvrage_hydrau_dim = db.relationship( + TNomenclatures, secondary=CorDiagOuvrageHydrauDim.__table__ + ) + + # Amenagements + amenagement_biodiv_autre = db.Column(db.Unicode) + + nomenclatures_diagnostic_amenagement_biodiv = db.relationship( + TNomenclatures, secondary=CorDiagAmenagementBiodiv.__table__ + ) + + +class DiagnosticCloture(db.Model): + __tablename__ = "t_diagnostic_clotures" + __table_args__ = {"schema": "pr_sipaf"} + + id_diagnostic = db.Column( + db.Integer, db.ForeignKey("pr_sipaf.t_diagnostics.id_diagnostic"), primary_key=True + ) + id_nomenclature_clotures_guidage_type = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + primary_key=True, + nullable=False, + ) + id_nomenclature_clotures_guidage_etat = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + primary_key=True, + nullable=False, + ) + + clotures_guidage_type_autre = db.Column(db.Unicode) + clotures_guidage_etat_autre = db.Column(db.Unicode) + + diagnostic = db.relationship(Diagnostic, backref="clotures") + + nomenclature_clotures_guidage_etat = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_clotures_guidage_etat] + ) + nomenclature_clotures_guidage_type = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_clotures_guidage_type] + ) diff --git a/contrib/m_sipaf/config/config.yml b/contrib/m_sipaf/config/config.yml index 62d8038a..8c192174 100644 --- a/contrib/m_sipaf/config/config.yml +++ b/contrib/m_sipaf/config/config.yml @@ -43,7 +43,7 @@ site_filters_fields: sort: code_name filters: | __f__data?.region - ? `area_code in ${utils.departementsForRegion(data.region.area_code).join(';')}` + ? `area_code in ${u.departementsForRegion(data.region.area_code).join(';')}` : null - key: commune @@ -219,7 +219,6 @@ site_details_fields: - date_diagnostic - organisme.nom_organisme - role.nom_complet - - comment prefilters: __f__`id_passage_faune = ${o.object(x, 'site').value}` sort: date_diagnostic- @@ -351,39 +350,26 @@ site_form_fields: type_widget: medias schema_dot_table: pr_sipaf.t_passages_faune details: [] -diagnostic_form_fields: + + + +diagnostic_details_fields: overflow: true items: - - hidden: true - items: - - id_diagnostic - - id_passage_faunes - - key: id_passage_faune - default: __f__context.params.id_passage_faune - - id_organisme - - id_role + - passage_faune.code_passage_faune - date_diagnostic + - organisme.nom_organisme + - role.nom_complet - direction: row items: - - key: nomenclatures_diagnostic_obstacle - additional_fields: - - cd_nomenclature - type: list_form - multiple: true - return_object: true + - nomenclatures_diagnostic_obstacle.label_fr - key: obstacle_autre hidden: __f__!data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') - required: __f__data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') - -diagnostic_details_fields: - overflow: true - items: - - hidden: true + - direction: row items: - - nomenclatures_diagnostic_obstacle.cd_nomenclature - - passage_faune.code_passage_faune - - organisme.nom_organisme - - role.nom_complet - - nomenclatures_diagnostic_obstacle.label_fr - - key: obstacle_autre - hidden: __f__!data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') \ No newline at end of file + - nomenclatures_diagnostic_perturbation.label_fr + - key: perturbation_autre + hidden: __f__!data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') + - nomenclatures_diagnostic_ouvrage_hydrau_etat_berge.label_fr + - nomenclature_diagnostic_ouvrage_hydrau_raccordement_banquette.label_fr + - nomenclatures_diagnostic_ouvrage_hydrau_dim.label_fr diff --git a/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml b/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml index 2e85b8d0..d577b5cd 100644 --- a/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml +++ b/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml @@ -4,27 +4,62 @@ title: schema diagnostic passage faune description: schema pour les diagnostics des passages à faune meta: - autoschema: true - model: m_sipaf.models.Diagnostic - module_code: m_sipaf - genre: M - label: diagnostic - label_field_name: date_diagnostic - unique: - - id_passage_faune - - date_diagnostic - - id_organism - unique_in_db: true + autoschema: true + model: m_sipaf.models.Diagnostic + module_code: m_sipaf + genre: M + label: diagnostic + label_field_name: date_diagnostic + unique: + - id_passage_faune + - date_diagnostic + - id_organism + unique_in_db: true properties: - date_diagnostic: - title: Date - id_role: - title: Agent - id_organisme: - title: Organisme - comment: - title: Commentaire - nomenclatures_diagnostic_obstacle: - title: Obstacle(s) - obstacle_autre: - title: Obstacle autre \ No newline at end of file + # commons + date_diagnostic: + title: Date + description: Date d'établissemnt du diagnostic de fonctionalité + id_role: + title: Responsable du suivi + description: Personne en charge du suivi + id_organisme: + title: Organisme + description: Organisme en charge du suivi + commentaire_diagnostic: + title: Commentaire (diagnostic) + description: champs libre pour information complémentaire indicatives + + # obstacles + commentaire_perturbation_obstacle: + title: Commentaire (perturbation / obstacle) + description: champs libre pour information complémentaire indicatives + nomenclatures_diagnostic_obstacle: + title: Obstacle(s) + description: Obstacles aux déplacement pouvant affecter la fonctionalité d'n ouvrage + obstacle_autre: + title: Obstacle autre + nomenclatures_diagnostic_perturbation: + title: Perturbation(s) + description: Éléments pouvant perturber la fonctionalité d'un ouvrage + perturbation_autre: + title: Perturbation autre + nomenclatures_diagnostic_ouvrage_hydrau_etat_berge: + title: État des berges + description: État des berges à l'entrée de l'ouvrage (ouvrage hydraulique) + id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq: + title: Raccordement banquette-berge + description: État du raccordement entre la banquette et la berge aux sorties d'un ouvrage mixte hydraulique + nomenclatures_diagnostic_ouvrage_hydrau_dim: + title: Défaut de dimensionnement + description: Dimensions de l'ouvrage hydraulique inadaptées + + # amenagements + nomenclatures_diagnostic_amenagement_biodiv: + title: Types d'aménagements + description: Types d'aménagement complémentaires en faveur de la faune + amenagement_biodiv_autre: + title: Aménagement autre + clotures: + title: Clôtures + description: Présence de clôtures pouvant guider les animaux vers le passage \ No newline at end of file diff --git a/contrib/m_sipaf/config/definitions/m_sipaf.diag_cloture.schema.yml b/contrib/m_sipaf/config/definitions/m_sipaf.diag_cloture.schema.yml new file mode 100644 index 00000000..d06d492a --- /dev/null +++ b/contrib/m_sipaf/config/definitions/m_sipaf.diag_cloture.schema.yml @@ -0,0 +1,29 @@ +type: schema +code: m_sipaf.diag_cloture +title: schema diagnostic passage faune (clotures) +description: schema pour les diagnostics des passages à faune (clotures) + +meta: + autoschema: true + model: m_sipaf.models.DiagnosticCloture + module_code: m_sipaf + genre: M + label: Diagnostic de clôture + labels: Diagnostics de clôture + label_field_name: id_diagnostic + unique: + - id_diagnostic + - id_nomenclature_clotures_guidage_type + - id_nomenclature_clotures_guidage_etat + unique_in_db: true +properties: + clotures_guidage_type_autre: + title: Autre type de clôture + clotures_guidage_etat_autre: + title: Autre état de clôture + id_nomenclature_clotures_guidage_type: + title: Nature des clôtures + description: Nature des clôtures pouvant guider les animaux vers le passage + id_nomenclature_clotures_guidage_etat: + title: État des clôtures + description: État des clôtures pouvant guider les animaux vers le passage diff --git a/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml b/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml index 215a8521..02c75fb5 100644 --- a/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml +++ b/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml @@ -18,331 +18,135 @@ items: pk_field: id_passage_faune uuid_field_name: uuid_passage_faune - schema_code: ref_nom.type + keys: [mnemonique, label_default, definition_default] + defaults: + source: SIPAF items: - - mnemonique: PF_OUVRAGE_MATERIAUX - label_default: Matériaux - definition_default: "Matériaux composant l'ouvrage" - source: SIPAF - - mnemonique: PF_OUVRAGE_HYDRAULIQUE_POSITION - label_default: OH Position - definition_default: "Position de l'ouvrage hydrolique" - source: SIPAF - - mnemonique: PF_OUVRAGE_HYDRAULIQUE_BANQ_CARACT - label_default: OH Caractérisation banquette - definition_default: Caractérisation de la banquette pour un ouvrage hydraulique - source: SIPAF - - mnemonique: PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE - label_default: OH Type de banquette - definition_default: Type de banquette pour un ouvrage hydrolique - source: SIPAF - - mnemonique: PF_INFRASTRUCTURE_TYPE - label_default: "Type d'infrastructure" - definition_default: "Type d'infrastructure pour les passages à faune" - source: SIPAF - - mnemonique: PF_OUVRAGE_SPECIFICITE - label_default: Spécificité - definition_default: Exclusivité du passage pour le passage de la faune - source: SIPAF - - mnemonique: PF_OUVRAGE_TYPE - label_default: "Type d'ouvrage" - definition_default: "Type d'ouvrage d'art pour le passage faune" - source: SIPAF - - mnemonique: PF_TYPE_ACTOR - label_default: "Type d'acteur" - definition_default: "Type d'acteur pour les passages faune" - source: SIPAF - - mnemonique: PF_DIAG_OBSTACLE - label_default: "Diagnostic obstacle" - definition_default: "Diagnostic obstacle" - source: SIPAF + - [PF_OUVRAGE_MATERIAUX, Matériaux, "Matériaux composant l'ouvrage"] + - [PF_OUVRAGE_HYDRAULIQUE_POSITION, OH Position, "Position de l'ouvrage hydrolique"] + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_CARACT, OH Caractérisation banquette, Caractérisation de la banquette pour un ouvrage hydraulique] + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, OH Type de banquette, Type de banquette pour un ouvrage hydrolique] + - [PF_INFRASTRUCTURE_TYPE, "Type d'infrastructure", "Type d'infrastructure pour les passages à faune"] + - [PF_OUVRAGE_SPECIFICITE, Spécificité, Exclusivité du passage pour le passage de la faune] + - [PF_OUVRAGE_TYPE, "Type d'ouvrage", "Type d'ouvrage d'art pour le passage faune"] + - [PF_TYPE_ACTOR, "Type d'acteur", "Type d'acteur pour les passages faune"] + - [PF_DIAG_OBSTACLE, Diagnostic obstacle, Diagnostic obstacle] + - [PF_DIAG_PERTURBATION, Diagnostic perturbation, Diagnostic perturbation] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE, Diagnostic état berge, État des berges à l'entrée de l'ouvrage (ouvrage hydraulique)] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_RACCORDEMENT_BANQUETTE, Diagnostic Raccordement banquette-berge, État du raccordement entre la banquette et la berge aux sorties d'un ouvrage mixte hydraulique] + - [PF_DIAG_OUVRAGE_HYDRAU_DIMENSIONNEMENT, Défaut de dimensionnement, Défaut de dimensionnement] + - [PF_DIAG_AMENAGEMENT_BIODIV, Types d'aménagements, Types d'aménagement complémentaires en faveur de la faune] + - [PF_DIAG_CLOTURES_GUIDAGE_TYPE, Nature des clôtures, Nature des clôtures pouvant guider les animaux vers le passage] + - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, État des clôtures, État des clôtures pouvant guider les animaux vers le passage] + # PF_DIAG_VEGETATION_PRESENTE_TABLIER_TYPE + # PF_DIAG_VEGETATION_PRESENTE_DEBOUCH_TYPE - schema_code: ref_nom.nomenclature defaults: source: SIPAF active: true + keys: + [id_type, cd_nomenclature, mnemonique, label_default, definition_default] items: - - id_type: PF_OUVRAGE_MATERIAUX - cd_nomenclature: BET - mnemonique: Béta. - label_default: Béton - definition_default: Béton - - id_type: PF_OUVRAGE_MATERIAUX - cd_nomenclature: MET - mnemonique: Mét. - label_default: Métal - definition_default: Métal - - id_type: PF_OUVRAGE_MATERIAUX - cd_nomenclature: PLT - mnemonique: Pla. - label_default: Plastique - definition_default: Plastique - - id_type: PF_OUVRAGE_MATERIAUX - cd_nomenclature: BOI - mnemonique: Boi. - label_default: Bois - definition_default: Bois - - id_type: PF_OUVRAGE_MATERIAUX - cd_nomenclature: MAC - mnemonique: Maç. - label_default: Maçonnerie - definition_default: Maçonnerie - - id_type: PF_OUVRAGE_MATERIAUX - cd_nomenclature: AUT - mnemonique: Aut. - label_default: Autre - definition_default: Autre - - id_type: PF_OUVRAGE_MATERIAUX - cd_nomenclature: IND - mnemonique: Ind. - label_default: Indéterminé - definition_default: Indéterminé - - id_type: PF_OUVRAGE_HYDRAULIQUE_POSITION - cd_nomenclature: RD - mnemonique: R. d. - label_default: Rive droite - definition_default: Rive droite - - id_type: PF_OUVRAGE_HYDRAULIQUE_POSITION - cd_nomenclature: RG - mnemonique: R. g. - label_default: Rive Gauche - definition_default: Rive Gauche - - id_type: PF_OUVRAGE_HYDRAULIQUE_POSITION - cd_nomenclature: RGD - mnemonique: "R. g. & d." - label_default: Rive gauche et rive droite - definition_default: Rive gauche et rive droite (la rive se détermine dans le sens amont/aval) - - id_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_CARACT - cd_nomenclature: SIM - mnemonique: Spl. - label_default: Simple - definition_default: Banquette simple - - id_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_CARACT - cd_nomenclature: DOU - mnemonique: Dbl. - label_default: Double - definition_default: Banquette double - - id_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE - cd_nomenclature: NAT - mnemonique: Nat. - label_default: Banquette naturelle - definition_default: Banquette naturelle / bande enherbée - - id_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE - cd_nomenclature: BET - mnemonique: Bet. - label_default: Banquette béton - definition_default: Banquette béton - - id_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE - cd_nomenclature: ECB - mnemonique: Ecb. - label_default: Encorbellement - definition_default: Encorbellement - - id_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE - cd_nomenclature: POF - mnemonique: Po. F. - label_default: Ponton flottant - definition_default: Ponton flottant - - id_type: PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE - cd_nomenclature: AUT - mnemonique: Aut. - label_default: Autre - definition_default: Autre - - id_type: PF_INFRASTRUCTURE_TYPE - cd_nomenclature: AU - mnemonique: Auto. - label_default: Autoroute - definition_default: Autoroute - - id_type: PF_INFRASTRUCTURE_TYPE - cd_nomenclature: RN - mnemonique: R. N. - label_default: Route Nationale - definition_default: Route Nationale - - id_type: PF_INFRASTRUCTURE_TYPE - cd_nomenclature: RD - mnemonique: R. D. - label_default: Route Départementale - definition_default: Route Départementale - - id_type: PF_INFRASTRUCTURE_TYPE - cd_nomenclature: VF - mnemonique: V. F. - label_default: Voie ferrée - definition_default: Voie ferrée - - id_type: PF_INFRASTRUCTURE_TYPE - cd_nomenclature: CA - mnemonique: Ca. - label_default: Canal / Rivère navigable - definition_default: Canal / Rivère navigable - - id_type: PF_OUVRAGE_SPECIFICITE - cd_nomenclature: SPE - mnemonique: Spé. - label_default: Spécifique - definition_default: Ouvrage construit que pour le passage des animaux - - id_type: PF_OUVRAGE_SPECIFICITE - cd_nomenclature: MIX - mnemonique: Mixt. - label_default: Mixte - definition_default: Ouvrage mixte construit pour le passage des animaux concomitamment à un ou plusieurs autres usages - - id_type: PF_OUVRAGE_SPECIFICITE - cd_nomenclature: ND - mnemonique: Non déd. - label_default: Non dédié - definition_default: Ouvrage non dédié au passage de la faune mais pouvant servir à cet usage - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: BUS - mnemonique: Bus. - label_default: Buse - definition_default: Buse - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: CAD - mnemonique: Cad. - label_default: Cadre - definition_default: Cadre - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: VOU+R - mnemonique: Voût. Rad. - label_default: Voûte avec radier - definition_default: Voûte maçonnée avec radier - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: AUT - mnemonique: Aut. - label_default: Autre (préciser) - definition_default: Autre (préciser) - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: POR - mnemonique: Por. - label_default: Portique - definition_default: Portique en béton - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: VOU - mnemonique: Voû. - label_default: Voûte sans radier - definition_default: Voûte maçonnée sans radier - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: DAL+P - mnemonique: Dal. pal. - label_default: Dalle et palpaplanche - definition_default: Dalle et palpaplanche - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: DAL - mnemonique: Dal. - label_default: Dalle - definition_default: Dalle - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: ARC - mnemonique: Arc. - label_default: Arc - definition_default: Arc - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: VIA - mnemonique: via. - label_default: Viaduc - definition_default: Viaduc - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: PON - mnemonique: pon. - label_default: Pont - definition_default: Pont - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: CAN - mnemonique: can. - label_default: Canalisation - definition_default: Canalisation - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: DALO - mnemonique: dalo. - label_default: Dalot - definition_default: Dalot - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: DIAB - mnemonique: diab. - label_default: Diabolo - definition_default: Diabolo - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: TRA - mnemonique: Tra. - label_default: Tranchée - definition_default: Tranchée - - id_type: PF_OUVRAGE_TYPE - cd_nomenclature: TUN - mnemonique: Tun. - label_default: Tunnel - definition_default: Tunnel - - id_type: PF_TYPE_ACTOR - cd_nomenclature: PRO - mnemonique: Prop. - label_default: Propriétaire - definition_default: Propriétaire du passage faune - - id_type: PF_TYPE_ACTOR - cd_nomenclature: CON - mnemonique: Conc. - label_default: Concessionaire - definition_default: Concessionnaire - - id_type: PF_TYPE_ACTOR - cd_nomenclature: INT - mnemonique: Int. - label_default: Intervenant - definition_default: Intervenant sur ce passage faune - - id_type: PF_TYPE_ACTOR - cd_nomenclature: GES - mnemonique: Ges. - label_default: Gestionnaire - definition_default: Gestionnaire du passage faune - - id_type: PF_TYPE_ACTOR - cd_nomenclature: ETA - mnemonique: État - label_default: État - definition_default: État - - id_type: PF_TYPE_ACTOR - cd_nomenclature: DEP - mnemonique: Dépt. - label_default: Département - definition_default: Département - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: NEA - mnemonique: Néa. - label_default: Néant - definition_default: Pas de présence d'obstacle - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: CLOT - mnemonique: Clôt. - label_default: Clôtures - definition_default: Clôture fermant l'ouvrage - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: TROT - mnemonique: Trot. - label_default: Trottoirs - definition_default: Trottoirs - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: TROT - mnemonique: Trot. - label_default: Trottoirs - definition_default: Trottoirs - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: GLIS - mnemonique: Glis. - label_default: Glissières - definition_default: Glissières - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: DENIV - mnemonique: Déniv. - label_default: Dénivelé - definition_default: Dénivelé - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: DEP - mnemonique: Dép. - label_default: Dépôts - definition_default: Dépôts - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: STAG - mnemonique: Stag. - label_default: Stagnation d'eau - definition_default: Stagnation d'eau - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: INFR - mnemonique: Infr. - label_default: Infrastructure - definition_default: Infrastructure au débouché - - id_type: PF_DIAG_OBSTACLE - cd_nomenclature: AUT - mnemonique: Aut. - label_default: Autre (Préciser) - definition_default: Autre (Préciser) + - [PF_OUVRAGE_MATERIAUX, BET, Béta., Béton, Béton] + - [PF_OUVRAGE_MATERIAUX, MET, Mét., Métal, Métal] + - [PF_OUVRAGE_MATERIAUX, PLT, Pla., Plastique, Plastique] + - [PF_OUVRAGE_MATERIAUX, BOI, Boi., Bois, Bois] + - [PF_OUVRAGE_MATERIAUX, MAC, Maç., Maçonnerie, Maçonnerie] + - [PF_OUVRAGE_MATERIAUX, AUT, Aut., Autre, Autre] + - [PF_OUVRAGE_MATERIAUX, IND, Ind., Indéterminé, Indéterminé] + + - [PF_OUVRAGE_HYDRAULIQUE_POSITION, RG, R. g., Rive Gauche, Rive Gauche] + - [PF_OUVRAGE_HYDRAULIQUE_POSITION, RGD, R. g. & d.", Rive gauche et rive droite, Rive gauche et rive droite (la rive se détermine dans le sens amont/aval)] + + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_CARACT, DOU, Dbl., Double, Banquette double] + + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, BET, Bet., Banquette béton, Banquette béton] + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, ECB, Ecb., Encorbellement, Encorbellement] + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, POF, Po. F., Ponton flottant, Ponton flottant] + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, AUT, Aut., Autre, Autre] + + - [PF_INFRASTRUCTURE_TYPE, RN, R. N., Route Nationale, Route Nationale] + - [PF_INFRASTRUCTURE_TYPE, RD, R. D., Route Départementale, Route Départementale] + - [PF_INFRASTRUCTURE_TYPE, VF, V. F., Voie ferrée, Voie ferrée] + - [PF_INFRASTRUCTURE_TYPE, CA, Ca., Canal / Rivère navigable, Canal / Rivère navigable] + + - [PF_OUVRAGE_SPECIFICITE, MIX, Mixt., Mixte, Ouvrage mixte construit pour le passage des animaux concomitamment à un ou plusieurs autres usages] + - [PF_OUVRAGE_SPECIFICITE, ND, Non déd., Non dédié, Ouvrage non dédié au passage de la faune mais pouvant servir à cet usage] + + - [PF_OUVRAGE_TYPE, CAD, Cad., Cadre, Cadre] + - [PF_OUVRAGE_TYPE, VOU+R, Voût. Rad., Voûte avec radier, Voûte maçonnée avec radier] + - [PF_OUVRAGE_TYPE, AUT, Aut., Autre (préciser), Autre (préciser)] + - [PF_OUVRAGE_TYPE, POR, Por., Portique, Portique en béton] + - [PF_OUVRAGE_TYPE, VOU, Voû., Voûte sans radier, Voûte maçonnée sans radier] + - [PF_OUVRAGE_TYPE, DAL+P, Dal. pal., Dalle et palpaplanche, Dalle et palpaplanche] + - [PF_OUVRAGE_TYPE, DAL, Dal., Dalle, Dalle] + - [PF_OUVRAGE_TYPE, ARC, Arc., Arc, Arc] + - [PF_OUVRAGE_TYPE, VIA, via., Viaduc, Viaduc] + - [PF_OUVRAGE_TYPE, PON, pon., Pont, Pont] + - [PF_OUVRAGE_TYPE, CAN, can., Canalisation, Canalisation] + - [PF_OUVRAGE_TYPE, DALO, dalo., Dalot, Dalot] + - [PF_OUVRAGE_TYPE, DIAB, diab., Diabolo, Diabolo] + - [PF_OUVRAGE_TYPE, TRA, Tra., Tranchée, Tranchée] + - [PF_OUVRAGE_TYPE, TUN, Tun., Tunnel, Tunnel] + + - [PF_TYPE_ACTOR, CON, Conc., Concessionaire, Concessionnaire] + - [PF_TYPE_ACTOR, INT, Int., Intervenant, Intervenant sur ce passage faune] + - [PF_TYPE_ACTOR, GES, Ges., Gestionnaire, Gestionnaire du passage faune] + - [PF_TYPE_ACTOR, ETA, État, État, État] + - [PF_TYPE_ACTOR, DEP, Dépt., Département, Département] + + - [PF_DIAG_OBSTACLE, CLOT, Clôt., Clôtures, Clôture fermant l'ouvrage] + - [PF_DIAG_OBSTACLE, TROT, Trot., Trottoirs, Trottoirs] + - [PF_DIAG_OBSTACLE, TROT, Trot., Trottoirs, Trottoirs] + - [PF_DIAG_OBSTACLE, GLIS, Glis., Glissières, Glissières] + - [PF_DIAG_OBSTACLE, DENIV, Déniv., Dénivelé, Dénivelé] + - [PF_DIAG_OBSTACLE, DEP, Dép., Dépôts, Dépôts] + - [PF_DIAG_OBSTACLE, STAG, Stag., Stagnation d'eau, Stagnation d'eau] + - [PF_DIAG_OBSTACLE, INFR, Infr., Infrastructure, Infrastructure au débouché] + - [PF_DIAG_OBSTACLE, AUT, Aut., Autre (préciser), Autre (préciser)] + + - [PF_DIAG_PERTURBATION, C_P, Circ. Piet., Circulation Piéton, Circulation Piéton] + - [PF_DIAG_PERTURBATION, CV, Circ. Voit., Circulation Voiture, Circulation Voiture] + - [PF_DIAG_PERTURBATION, C2RQ, Circ. 2R. Qu., Circulation 2-roues & quad, Circulation 2-roues & quad] + - [PF_DIAG_PERTURBATION, CTR, Circ. tr., Circulation tracteur, Circulation tracteur] + - [PF_DIAG_PERTURBATION, CHA, Cha., Chasse, Chasse] + - [PF_DIAG_PERTURBATION, SONO, Sono., Sonore, "Perturbations sonores (vibration, bruit de circulation, etc.)"] + - [PF_DIAG_PERTURBATION, VISU, Visu., Visuelle, "Perturbations visuelles (lumières, éclairage, etc.)"] + - [PF_DIAG_PERTURBATION, AUT, Aut., Autre (préciser), Autre (préciser)] + + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE, STA, Sta., Stable, Stable] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE, ERO, Éro., Érodé, Érodé] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE, RIPC, Rip. Cont., Ripisylve continue, Ripisylve continue] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE, RIPD, Rip. Dis., Ripisylve discontinue, Ripisylve discontinue] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE, ENRO, Enroch., Enrochement, Enrochement] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE, TECHV, Tech. Vég., Technique végétale, Technique végétale] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_ETAT_BERGE, ENHB, Herb., Enherbées, Enherbées] + + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_RACCORDEMENT_BANQUETTE, BON, Bon, Bon, "Bon : raccordement amont et aval"] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_RACCORDEMENT_BANQUETTE, INS, Insuff., Insuffisant, "Insuffisant : raccordement d'un seul côté"] + - [PF_DIAG_OUVRAGE_HYDRAULIQUE_RACCORDEMENT_BANQUETTE, ABS, Abs., Absent, "Absent : pas de raccordement ni en amont ni en aval"] + + - [PF_DIAG_OUVRAGE_HYDRAU_DIMENSIONNEMENT, LARG_INF, Larg. Faib., Largeur trop faible, "Largeur trop faible: augmentation de la vitesse d'écoulement empêchant le franchissement piscicole"] + - [PF_DIAG_OUVRAGE_HYDRAU_DIMENSIONNEMENT, LARG_SUP, Larg. Élev., Largeur trop élevée, "Largeur trop élevée: diminution de la lame d'eau"] + - [PF_DIAG_OUVRAGE_HYDRAU_DIMENSIONNEMENT, HAUT_INF, Larg. Élev., Hauteur insuffisante, "Hauteur insuffisante: en période de crue, submersion des aménagements"] + - [PF_DIAG_OUVRAGE_HYDRAU_DIMENSIONNEMENT, DEF_PENTE, Déf. Pente, Ne respecte pas la pente, "Ouvrage ne respectant pas la pente du cours d'eau : érosion à l'entrée / sortie de l'ouvrage"] + + - [PF_DIAG_AMENAGEMENT_BIODIV, PAR, Para., Parapet d'accultation, Parapet d'accultation] + - [PF_DIAG_AMENAGEMENT_BIODIV, PLA, Planta., Plantations, Plantations] + - [PF_DIAG_AMENAGEMENT_BIODIV, MUR, Mur., Andains / muret, Andains / muret] + - [PF_DIAG_AMENAGEMENT_BIODIV, MAR, Mar., Mares, Mares] + - [PF_DIAG_AMENAGEMENT_BIODIV, AUT, Aut., Autre (préciser), Autre (préciser)] + + - [PF_DIAG_CLOTURES_GUIDAGE_TYPE, 'GFS', Gd. Faun. Simp., Grande faune simple, Grande faune simple] + - [PF_DIAG_CLOTURES_GUIDAGE_TYPE, 'PFS', Pt. Faun. Simp., Petite faune simple, Petite faune simple] + - [PF_DIAG_CLOTURES_GUIDAGE_TYPE, 'MAIP', Mail. Prog., Mailles progressives, Mailles progressives] + - [PF_DIAG_CLOTURES_GUIDAGE_TYPE, 'MUR', Mur., Muret, Muret] + - [PF_DIAG_CLOTURES_GUIDAGE_TYPE, AUT, Aut., Autre (préciser), Autre (préciser)] + + - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, BON, Bon, Bon, Bon] + - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, MOY, Moy., Moyen, Moyen] + - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, DEG, Dég., Dégradée, Dégradée] + - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, NOJ, Non Jo., Non jointive, Non jointive] + - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, AUT, Aut., Autre (préciser), Autre (préciser)] + diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml index 85433eb6..16efd4d0 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml @@ -3,10 +3,100 @@ code: m_sipaf.diagnostic_edit title: page edit diagnostic description: page edit diagnostic +aliases: + - &diag_commons + - hidden: true + items: + - id_diagnostic + - key: id_passage_faune + default: __f__context.params.id_passage_faune + - display: row + items: + - date_diagnostic + - id_organisme + - id_role + - commentaire_diagnostic + + - &diag_perturbation_obstacle + - direction: row + items: + - key: nomenclatures_diagnostic_obstacle + additional_fields: + - cd_nomenclature + type: list_form + multiple: true + return_object: true + - key: obstacle_autre + hidden: __f__!data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') + required: __f__data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') + - direction: row + items: + - key: nomenclatures_diagnostic_perturbation + additional_fields: + - cd_nomenclature + type: list_form + multiple: true + return_object: true + - key: perturbation_autre + hidden: __f__!data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') + required: __f__data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') + - nomenclatures_diagnostic_ouvrage_hydrau_etat_berge + - id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq + - nomenclatures_diagnostic_ouvrage_hydrau_dim + - commentaire_perturbation_obstacle + + - &diag_amenagement + - direction: row + items: + - key: nomenclatures_diagnostic_amenagement_biodiv + additional_fields: + - cd_nomenclature + type: list_form + multiple: true + return_object: true + - key: amenagement_biodiv_autre + disabled: __f__!data.nomenclatures_diagnostic_amenagement_biodiv?.some(n => n.cd_nomenclature == 'AUT') + required: __f__data.nomenclatures_diagnostic_amenagement_biodiv?.some(n => n.cd_nomenclature == 'AUT') + - items: + - key: clotures + type: array + display: fieldset + items: + direction: row + items: + - id_nomenclature_clotures_guidage_type + - key: clotures_guidage_type_autre + disabled: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_type) == 'AUT')" + required: "__f__u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_type) == 'AUT'" + - id_nomenclature_clotures_guidage_etat + - key: clotures_guidage_etat_autre + disabled: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT')" + required: "__f__u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT'" + + + - &diag_synthese + items: + + layout: height_auto: true items: code: utils.object_form template_params: object_code: diagnostic - layout: __DIAGNOSTIC_FORM_FIELDS__ + layout: + overflow: true + items: + # - title: Diagnostic + # display: fieldset + # items: *diag_commons + # - title: Pertubations / Obstacles + # display: fieldset + # items: *diag_perturbation_obstacle + - title: Aménagements + display: fieldset + items: *diag_amenagement + # - title: Synthese + # display: fieldset + # items: *diag_synthese + \ No newline at end of file From 08516037f183077de6020dbbe4a96db5816decd0 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 19 Apr 2023 15:47:39 +0200 Subject: [PATCH 078/142] fix pytest --- backend/gn_modulator/commands.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/backend/gn_modulator/commands.py b/backend/gn_modulator/commands.py index d06be406..14d11c53 100644 --- a/backend/gn_modulator/commands.py +++ b/backend/gn_modulator/commands.py @@ -47,9 +47,7 @@ def cmd_install_module(module_code=None, module_path=None, force=False): for module_code in ModuleMethods.registred_modules(): print(f"- {module_code}") - raise Exception - - return ModuleMethods.install_module(module_code, module_path, force) + # return ModuleMethods.install_module(module_code, module_path, force) @click.command("remove") From dcdfaf6549c151caa7122ec80e80f30fd9a9e970 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 19 Apr 2023 15:58:25 +0200 Subject: [PATCH 079/142] fix pytest --- backend/gn_modulator/definition/base.py | 5 +++-- backend/gn_modulator/imports/mixins/check.py | 4 +++- backend/gn_modulator/schema/base.py | 3 ++- backend/gn_modulator/schema/features/base.py | 3 ++- backend/gn_modulator/schema/models/base.py | 9 ++++++--- backend/gn_modulator/schema/repositories/utils.py | 3 ++- backend/gn_modulator/schema/serializers.py | 3 ++- backend/gn_modulator/schema/sql/base.py | 3 ++- backend/gn_modulator/schema/validation.py | 3 ++- backend/gn_modulator/utils/errors.py | 3 ++- 10 files changed, 26 insertions(+), 13 deletions(-) diff --git a/backend/gn_modulator/definition/base.py b/backend/gn_modulator/definition/base.py index 49eff9c4..ae235105 100644 --- a/backend/gn_modulator/definition/base.py +++ b/backend/gn_modulator/definition/base.py @@ -413,10 +413,11 @@ def global_check_definition(cls, definition_type, definition_code): ) # dépendancies - if dependencies := definition_type not in [ + dependencies = definition_type not in [ "template", "use_template", - ] and definition.get("dependencies"): + ] and definition.get("dependencies") + if dependencies: definition_codes = cls.definition_codes_for_type(definition_type) missing_dependencies = [ dependency for dependency in dependencies if dependency not in definition_codes diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index 20cffde3..ee195ac5 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -213,7 +213,9 @@ def check_resolve_keys(self): values = None # Dans le cas des nomenclatures on peut faire remonter les valeurs possible ?? - if code_type := sm.property(key).get("nomenclature_type"): + + code_type = sm.property(key).get("nomenclature_type") + if code_type: values = list( map( lambda x: { diff --git a/backend/gn_modulator/schema/base.py b/backend/gn_modulator/schema/base.py index 686c02e6..54909f6d 100644 --- a/backend/gn_modulator/schema/base.py +++ b/backend/gn_modulator/schema/base.py @@ -355,7 +355,8 @@ def process_csv_data(self, key, data, options={}, process_label=True): None, data[key], options=options, process_label=process_label ) - if labels := options.get("labels") and process_label: + labels = options.get("labels") and process_label + if labels: if data is True: return labels[0] if len(labels) > 0 else True elif data is False: diff --git a/backend/gn_modulator/schema/features/base.py b/backend/gn_modulator/schema/features/base.py index 31253d0c..919935ac 100644 --- a/backend/gn_modulator/schema/features/base.py +++ b/backend/gn_modulator/schema/features/base.py @@ -80,7 +80,8 @@ def get_foreign_key(self, key_process, rel_test_values, process_one=False): cache_key = "__".join([self.schema_code()] + list(map(lambda x: str(x), rel_test_values))) - if cache_value := get_global_cache(["import_pk_keys", self.schema_code(), cache_key]): + cache_value = get_global_cache(["import_pk_keys", self.schema_code(), cache_key]) + if cache_value: return cache_value if None in rel_test_values: diff --git a/backend/gn_modulator/schema/models/base.py b/backend/gn_modulator/schema/models/base.py index ed2af409..befc62a2 100644 --- a/backend/gn_modulator/schema/models/base.py +++ b/backend/gn_modulator/schema/models/base.py @@ -148,7 +148,8 @@ def process_relation_model(self, key, relationship_def, Model): def CorTable(self, relation_def): # cas cor_schema_code - if cor_schema_code := relation_def.get("cor_schema_code"): + cor_schema_code = relation_def.get("cor_schema_code") + if cor_schema_code: sm_cor = self.cls(cor_schema_code) Model = sm_cor.Model() CorTable = Model.__table__ @@ -205,11 +206,13 @@ def Model(self): return None # get Model from cache - if Model := get_global_cache(["schema", self.schema_code(), "model"]): + Model = get_global_cache(["schema", self.schema_code(), "model"]) + if Model: return Model # get Model from existing - if Model := self.get_existing_model(): + Model = self.get_existing_model() + if Model: return Model # dict_model used with type() to list properties and methods for class creation diff --git a/backend/gn_modulator/schema/repositories/utils.py b/backend/gn_modulator/schema/repositories/utils.py index d72c8841..8ed324b0 100644 --- a/backend/gn_modulator/schema/repositories/utils.py +++ b/backend/gn_modulator/schema/repositories/utils.py @@ -108,7 +108,8 @@ def custom_getattr( is_last_field = index == len(fields) - 1 # récupération depuis le cache associé à la query - if res := self.get_query_cache(query, cache_key): + res = self.get_query_cache(query, cache_key) + if res: return self.process_custom_getattr_res(res, query, only_fields) # si non en cache diff --git a/backend/gn_modulator/schema/serializers.py b/backend/gn_modulator/schema/serializers.py index 8fd50702..e566e0ea 100644 --- a/backend/gn_modulator/schema/serializers.py +++ b/backend/gn_modulator/schema/serializers.py @@ -299,7 +299,8 @@ def serialize(self, m, fields=None, as_geojson=False, geometry_field_name=None): sm_rel = self.cls(property["schema_code"]) fields_to_remove.append(field) - if default_fields := sm_rel.attr("meta.default_fields"): + default_fields = sm_rel.attr("meta.default_fields") + if default_fields: for rel_field in default_fields: fields_to_add.append(f"{field}.{rel_field}") else: diff --git a/backend/gn_modulator/schema/sql/base.py b/backend/gn_modulator/schema/sql/base.py index c49aa324..90f5794d 100644 --- a/backend/gn_modulator/schema/sql/base.py +++ b/backend/gn_modulator/schema/sql/base.py @@ -49,7 +49,8 @@ def non_auto_sql_schemas_dot_tables(cls): @classmethod def get_tables(cls): - if tables := get_global_cache(["schema_dot_tables"]): + tables = get_global_cache(["schema_dot_tables"]) + if tables: return tables sql_txt_tables = f""" diff --git a/backend/gn_modulator/schema/validation.py b/backend/gn_modulator/schema/validation.py index f0869299..7518f845 100644 --- a/backend/gn_modulator/schema/validation.py +++ b/backend/gn_modulator/schema/validation.py @@ -139,7 +139,8 @@ def set_definition_from_schema_code(self, definitions, schema_code): if schema_definition_id in definitions: return - if schema_definition := get_global_cache(["js_definition", schema_definition_id]): + schema_definition = get_global_cache(["js_definition", schema_definition_id]) + if schema_definition: definitions[schema_definition_id] = schema_definition deps = schema_definition["deps"] for dep in deps: diff --git a/backend/gn_modulator/utils/errors.py b/backend/gn_modulator/utils/errors.py index 78da0c5e..31826b7e 100644 --- a/backend/gn_modulator/utils/errors.py +++ b/backend/gn_modulator/utils/errors.py @@ -98,7 +98,8 @@ def errors_txt(): # on affiche les erreurs par fichier pour simplifier la lecture for definition_error_file_path in sorted(definition_error_file_paths): txt_errors += f"\n- {definition_error_file_path}\n" - if template_file_path := template_file_paths.get(definition_error_file_path): + template_file_path = template_file_paths.get(definition_error_file_path) + if template_file_path: txt_errors += f" {template_file_path}\n" txt_errors += "\n" From 3728e69804c840fac736dadde7a1874f57f05764 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 19 Apr 2023 16:05:26 +0200 Subject: [PATCH 080/142] pk diag cloture --- .../backend/m_sipaf/migrations/data/schema_diagnostic.sql | 8 +++++++- contrib/m_sipaf/backend/m_sipaf/models.py | 1 - 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql b/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql index a6083c0a..d894ec53 100644 --- a/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql +++ b/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql @@ -152,13 +152,18 @@ ADD -- clôture guidage CREATE TABLE IF NOT EXISTS pr_sipaf.t_diagnostic_clotures ( - id_diagnostic INTEGER NOT NULL NOT NULL, + id_diagnostic SERIAL NOT NULL NOT NULL, id_nomenclature_clotures_guidage_type INTEGER NOT NULL NOT NULL, id_nomenclature_clotures_guidage_etat INTEGER NOT NULL NOT NULL, clotures_guidage_type_autre VARCHAR, clotures_guidage_etat_autre VARCHAR ); +ALTER TABLE + pr_sipaf.t_diagnostic_clotures +ADD + CONSTRAINT pk_sipaf_t_diagnostic_clotures PRIMARY KEY (id_diagnostic, id_nomenclature_clotures_guidage_type); + ALTER TABLE pr_sipaf.t_diagnostic_clotures ADD @@ -174,6 +179,7 @@ ALTER TABLE ADD CONSTRAINT fk_pr_sipaf_t_d_c_g_id_nomenclature_clotures_guidage_etat FOREIGN KEY (id_nomenclature_clotures_guidage_etat) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + -- check constraint nomenclature type ALTER TABLE pr_sipaf.t_diagnostics diff --git a/contrib/m_sipaf/backend/m_sipaf/models.py b/contrib/m_sipaf/backend/m_sipaf/models.py index 6270d3db..7a6734b0 100644 --- a/contrib/m_sipaf/backend/m_sipaf/models.py +++ b/contrib/m_sipaf/backend/m_sipaf/models.py @@ -410,7 +410,6 @@ class DiagnosticCloture(db.Model): id_nomenclature_clotures_guidage_etat = db.Column( db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), - primary_key=True, nullable=False, ) From b69a7936da73b71155a698e16957b014158c2fe9 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 13:51:48 +0200 Subject: [PATCH 081/142] diag --- .../migrations/data/schema_diagnostic.sql | 153 +++++++++++++++++- .../90f6e5531f7c_diagnostic_m_sipaf.py | 2 + contrib/m_sipaf/backend/m_sipaf/models.py | 90 ++++++++++- .../definitions/m_sipaf.diag.schema.yml | 21 ++- ..._sipaf.diag_vegetation_debouche.schema.yml | 25 +++ ...m_sipaf.diag_vegetation_tablier.schema.yml | 25 +++ .../config/features/m_sipaf.utils.data.yml | 31 +++- .../m_sipaf.diagnostic_edit.layout.yml | 143 +++++++++------- contrib/m_sipaf/config/m_sipaf.module.yml | 2 +- 9 files changed, 423 insertions(+), 69 deletions(-) create mode 100644 contrib/m_sipaf/config/definitions/m_sipaf.diag_vegetation_debouche.schema.yml create mode 100644 contrib/m_sipaf/config/definitions/m_sipaf.diag_vegetation_tablier.schema.yml diff --git a/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql b/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql index d894ec53..57a86076 100644 --- a/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql +++ b/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql @@ -10,8 +10,13 @@ CREATE TABLE pr_sipaf.t_diagnostics ( commentaire_perturbation_obstacle VARCHAR, obstacle_autre VARCHAR, perturbation_autre VARCHAR, - id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq INTEGER, - amenagement_biodiv_autre VARCHAR + id_nomenclature_ouvrage_hydrau_racc_banq INTEGER, + amenagement_biodiv_autre VARCHAR, + commentaire_amenagement VARCHAR, + id_nomenclature_amenagement_entretient INTEGER, + id_nomenclature_franchissabilite INTEGER, + id_nomenclature_interet_faune INTEGER, + amenagement_faire VARCHAR ); ALTER TABLE @@ -83,10 +88,32 @@ ADD ALTER TABLE pr_sipaf.t_diagnostics ADD - CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq FOREIGN KEY ( - id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq + CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_ouvrage_hydrau_racc_banq FOREIGN KEY ( + id_nomenclature_ouvrage_hydrau_racc_banq ) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_amenagement_entretient FOREIGN KEY ( + id_nomenclature_amenagement_entretient + ) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_franchissablite FOREIGN KEY ( + id_nomenclature_franchissabilite + ) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_interet_faune FOREIGN KEY ( + id_nomenclature_interet_faune + ) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + + -- cor diag nomenclature ouvrage_hydrau_etat_berge CREATE TABLE IF NOT EXISTS pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_etat_berge ( id_diagnostic INTEGER NOT NULL NOT NULL, @@ -179,6 +206,62 @@ ALTER TABLE ADD CONSTRAINT fk_pr_sipaf_t_d_c_g_id_nomenclature_clotures_guidage_etat FOREIGN KEY (id_nomenclature_clotures_guidage_etat) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; +-- vegetation tablier +CREATE TABLE IF NOT EXISTS pr_sipaf.t_diagnostic_vegetation_presente_tablier ( + id_diagnostic INTEGER NOT NULL NOT NULL, + id_nomenclature_vegetation_type INTEGER NOT NULL NOT NULL, + id_nomenclature_vegetation_couvert INTEGER NOT NULL NOT NULL +); + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_tablier +ADD + CONSTRAINT pk_sipaf_t_diagnostic_vegetation_presente_tablier PRIMARY KEY (id_diagnostic, id_nomenclature_vegetation_type); + + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_tablier +ADD + CONSTRAINT fk_pr_sipaf_t_diagnostic_v_p_t_id_diagnostic FOREIGN KEY (id_diagnostic) REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_tablier +ADD + CONSTRAINT fk_pr_sipaf_t_diagnostic_v_p_t_id_nom_type FOREIGN KEY (id_nomenclature_vegetation_type) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_tablier +ADD + CONSTRAINT fk_pr_sipaf_t_diagnostic_v_p_t_id_nom_couvert FOREIGN KEY (id_nomenclature_vegetation_couvert) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +-- vegetation debouche +CREATE TABLE IF NOT EXISTS pr_sipaf.t_diagnostic_vegetation_presente_debouche ( + id_diagnostic INTEGER NOT NULL NOT NULL, + id_nomenclature_vegetation_type INTEGER NOT NULL NOT NULL, + id_nomenclature_vegetation_couvert INTEGER NOT NULL NOT NULL +); + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_debouche +ADD + CONSTRAINT pk_sipaf_t_diagnostic_vegetation_presente_debouche PRIMARY KEY (id_diagnostic, id_nomenclature_vegetation_type); + + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_debouche +ADD + CONSTRAINT fk_pr_sipaf_t_diagnostic_v_p_t_id_diagnostic FOREIGN KEY (id_diagnostic) REFERENCES pr_sipaf.t_diagnostics (id_diagnostic) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_debouche +ADD + CONSTRAINT fk_pr_sipaf_t_diagnostic_v_p_t_id_nom_type FOREIGN KEY (id_nomenclature_vegetation_type) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_debouche +ADD + CONSTRAINT fk_pr_sipaf_t_diagnostic_v_p_t_id_nom_couvert FOREIGN KEY (id_nomenclature_vegetation_couvert) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + -- check constraint nomenclature type ALTER TABLE @@ -186,11 +269,43 @@ ALTER TABLE ADD CONSTRAINT check_nom_type_diag_ouvr_hydrau_racc_banq_id_ure_pf_ype CHECK ( ref_nomenclatures.check_nomenclature_type_by_mnemonique( - id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq, + id_nomenclature_ouvrage_hydrau_racc_banq, 'PF_DIAG_OUVRAGE_HYDRAULIQUE_RACCORDEMENT_BANQUETTE' ) ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT check_nom_type_diag_amenagement_entretient CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature_amenagement_entretient, + 'PF_DIAG_AMENAGEMENT_ENTRETIENT' + ) + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT check_nom_type_diag_franchissabilite CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature_franchissabilite, + 'PF_DIAG_FRANCHISSABILITE' + ) + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT check_nom_type_diag_interet_faune CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature_interet_faune, + 'PF_DIAG_INTERET_FAUNE' + ) + ) NOT VALID; + + ALTER TABLE pr_sipaf.cor_diag_nomenclature_obstacle ADD @@ -250,4 +365,32 @@ ALTER TABLE ADD CONSTRAINT check_nom_type_pr_sipaf_cor_diag_nomenclature_perturbation_id_ure_pf_ype CHECK ( ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature, 'PF_DIAG_PERTURBATION') + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_tablier +ADD + CONSTRAINT check_nom_type_sipaf_diag_vege_pre_tab_type_nom_type CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature_vegetation_type, 'PF_DIAG_AMENAGEMENT_VEGETATION_TYPE') + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_tablier +ADD + CONSTRAINT check_nom_type_sipaf_diag_vege_pre_tab_couvert_nom_type CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature_vegetation_couvert, 'PF_DIAG_AMENAGEMENT_VEGETATION_COUVERT') + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_debouche +ADD + CONSTRAINT check_nom_type_sipaf_diag_vege_pre_tab_type_nom_type CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature_vegetation_type, 'PF_DIAG_AMENAGEMENT_VEGETATION_TYPE') + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostic_vegetation_presente_debouche +ADD + CONSTRAINT check_nom_type_sipaf_diag_vege_pre_tab_couvert_nom_type CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature_vegetation_couvert, 'PF_DIAG_AMENAGEMENT_VEGETATION_COUVERT') ) NOT VALID; \ No newline at end of file diff --git a/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py b/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py index 485defac..3743929e 100644 --- a/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py +++ b/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py @@ -37,6 +37,8 @@ def downgrade(): DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_etat_berge; DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_ouvrage_hydrau_dim; DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_amenagement_biodiv; + DROP TABLE {if_exists} pr_sipaf.t_diagnostic_vegetation_presente_tablier; + DROP TABLE {if_exists} pr_sipaf.t_diagnostic_vegetation_presente_debouche; DROP TABLE {if_exists} pr_sipaf.t_diagnostic_clotures; DROP TABLE {if_exists} pr_sipaf.t_diagnostics; """ diff --git a/contrib/m_sipaf/backend/m_sipaf/models.py b/contrib/m_sipaf/backend/m_sipaf/models.py index 7a6734b0..38120bec 100644 --- a/contrib/m_sipaf/backend/m_sipaf/models.py +++ b/contrib/m_sipaf/backend/m_sipaf/models.py @@ -362,12 +362,12 @@ class Diagnostic(db.Model): obstacle_autre = db.Column(db.Unicode) perturbation_autre = db.Column(db.Unicode) - id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq = db.Column( + id_nomenclature_ouvrage_hydrau_racc_banq = db.Column( db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature") ) nomenclature_diagnostic_ouvrage_hydrau_raccordement_banquette = db.relationship( TNomenclatures, - foreign_keys=[id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq], + foreign_keys=[id_nomenclature_ouvrage_hydrau_racc_banq], ) nomenclatures_diagnostic_obstacle = db.relationship( @@ -393,6 +393,32 @@ class Diagnostic(db.Model): TNomenclatures, secondary=CorDiagAmenagementBiodiv.__table__ ) + id_nomenclature_amenagement_entretient = db.Column( + db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature") + ) + nomenclature_amenagement_entretient = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_amenagement_entretient] + ) + + commentaire_amenagement = db.Column(db.Unicode) + + # synthese + id_nomenclature_interet_faune = db.Column( + db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature") + ) + nomenclature_interet_faune = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_interet_faune] + ) + + id_nomenclature_franchissabilite = db.Column( + db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature") + ) + nomenclature_franchissabilite = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_franchissabilite] + ) + + amenagement_faire = db.Column(db.Unicode) + class DiagnosticCloture(db.Model): __tablename__ = "t_diagnostic_clotures" @@ -424,3 +450,63 @@ class DiagnosticCloture(db.Model): nomenclature_clotures_guidage_type = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_clotures_guidage_type] ) + + +class DiagnosticVegetationTablier(db.Model): + __tablename__ = "t_diagnostic_vegetation_presente_tablier" + __table_args__ = {"schema": "pr_sipaf"} + + id_diagnostic = db.Column( + db.Integer, db.ForeignKey("pr_sipaf.t_diagnostics.id_diagnostic"), primary_key=True + ) + id_nomenclature_vegetation_type = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + primary_key=True, + nullable=False, + ) + id_nomenclature_vegetation_couvert = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + nullable=False, + ) + + diagnostic = db.relationship(Diagnostic, backref="vegetation_tablier") + + nomenclature_vegetation_type = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_vegetation_type] + ) + + nomenclature_vegetation_couvert = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_vegetation_couvert] + ) + + +class DiagnosticVegetationDebouche(db.Model): + __tablename__ = "t_diagnostic_vegetation_presente_debouche" + __table_args__ = {"schema": "pr_sipaf"} + + id_diagnostic = db.Column( + db.Integer, db.ForeignKey("pr_sipaf.t_diagnostics.id_diagnostic"), primary_key=True + ) + id_nomenclature_vegetation_type = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + primary_key=True, + nullable=False, + ) + id_nomenclature_vegetation_couvert = db.Column( + db.Integer, + db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + nullable=False, + ) + + diagnostic = db.relationship(Diagnostic, backref="vegetation_debouche") + + nomenclature_vegetation_type = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_vegetation_type] + ) + + nomenclature_vegetation_couvert = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_vegetation_couvert] + ) diff --git a/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml b/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml index d577b5cd..3940a975 100644 --- a/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml +++ b/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml @@ -47,7 +47,7 @@ properties: nomenclatures_diagnostic_ouvrage_hydrau_etat_berge: title: État des berges description: État des berges à l'entrée de l'ouvrage (ouvrage hydraulique) - id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq: + id_nomenclature_ouvrage_hydrau_racc_banq: title: Raccordement banquette-berge description: État du raccordement entre la banquette et la berge aux sorties d'un ouvrage mixte hydraulique nomenclatures_diagnostic_ouvrage_hydrau_dim: @@ -62,4 +62,21 @@ properties: title: Aménagement autre clotures: title: Clôtures - description: Présence de clôtures pouvant guider les animaux vers le passage \ No newline at end of file + description: Présence de clôtures pouvant guider les animaux vers le passage + vegetation_tablier: + title: Vegetation (tablier) + description: Végétation présente sur le tablier + vegetation_debouche: + title: Vegetation (débouchés) + description: Végétation présente sur aux débouchés de l'ouvrage + + # synthese + id_nomenclature_franchissabilite: + title: Franchissibilité + description: Estimation de la franchissabilité de l'ouvrage pour les animaux + id_nomenclature_interet_faune: + title: Intérêt + description: Intérêt pour les espèces cibles + amenagement_faire: + title: Aménagements à faire + description: Détails aménagements ou autres mesures à réaliser pour rendre l'ouvrage plus fonctionel diff --git a/contrib/m_sipaf/config/definitions/m_sipaf.diag_vegetation_debouche.schema.yml b/contrib/m_sipaf/config/definitions/m_sipaf.diag_vegetation_debouche.schema.yml new file mode 100644 index 00000000..f13f8e91 --- /dev/null +++ b/contrib/m_sipaf/config/definitions/m_sipaf.diag_vegetation_debouche.schema.yml @@ -0,0 +1,25 @@ +type: schema +code: m_sipaf.diag_vegetation_debouche +title: schema diagnostic passage faune (vegetation_debouche) +description: schema pour les diagnostics des passages à faune (vegetation_debouche) + +meta: + autoschema: true + model: m_sipaf.models.DiagnosticVegetationDebouche + module_code: m_sipaf + genre: M + label: Diagnostic de vegetation débouchés + labels: Diagnostics de vegetation débouchés + label_field_name: id_diagnostic + unique: + - id_diagnostic + - id_nomenclature_vegetation_type + - id_nomenclature_vegetation_couvert + unique_in_db: true +properties: + id_nomenclature_vegetation_type: + title: Type de vegation + description: Type de vegation présente sur aux débouchés de l'ouvrage + id_nomenclature_vegetation_couvert: + title: Couverture végétale + description: Couverture végétale pour ce type de végétation diff --git a/contrib/m_sipaf/config/definitions/m_sipaf.diag_vegetation_tablier.schema.yml b/contrib/m_sipaf/config/definitions/m_sipaf.diag_vegetation_tablier.schema.yml new file mode 100644 index 00000000..af1f229f --- /dev/null +++ b/contrib/m_sipaf/config/definitions/m_sipaf.diag_vegetation_tablier.schema.yml @@ -0,0 +1,25 @@ +type: schema +code: m_sipaf.diag_vegetation_tablier +title: schema diagnostic passage faune (vegetation_tablier) +description: schema pour les diagnostics des passages à faune (vegetation_tablier) + +meta: + autoschema: true + model: m_sipaf.models.DiagnosticVegetationTablier + module_code: m_sipaf + genre: M + label: Diagnostic de vegetation tablier + labels: Diagnostics de vegetation tablier + label_field_name: id_diagnostic + unique: + - id_diagnostic + - id_nomenclature_vegetation_type + - id_nomenclature_vegetation_couvert + unique_in_db: true +properties: + id_nomenclature_vegetation_type: + title: Type de vegation + description: Type de vegation présente sur le tablier + id_nomenclature_vegetation_couvert: + title: Couverture végétale + description: Couverture végétale pour ce type de végétationd diff --git a/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml b/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml index 02c75fb5..3c45f98b 100644 --- a/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml +++ b/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml @@ -38,8 +38,11 @@ items: - [PF_DIAG_AMENAGEMENT_BIODIV, Types d'aménagements, Types d'aménagement complémentaires en faveur de la faune] - [PF_DIAG_CLOTURES_GUIDAGE_TYPE, Nature des clôtures, Nature des clôtures pouvant guider les animaux vers le passage] - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, État des clôtures, État des clôtures pouvant guider les animaux vers le passage] - # PF_DIAG_VEGETATION_PRESENTE_TABLIER_TYPE - # PF_DIAG_VEGETATION_PRESENTE_DEBOUCH_TYPE + - [PF_DIAG_AMENAGEMENT_VEGETATION_TYPE, Type de végétation, Type de végétation présente] + - [PF_DIAG_AMENAGEMENT_VEGETATION_COUVERT, Couverture végétale (%), Couverture végétale (%)] + - [PF_DIAG_AMENAGEMENT_ENTRETIENT, Entretient dispositif et végétation, Entretient dispositif et végétation] + - [PF_DIAG_FRANCHISSABILITE, Franchissabilité, Estimation de la franchissabilité de l'ouvrage pour les animaux] + - [PF_DIAG_INTERET_FAUNE, Intérêt pour les espèces cibles, Intérêt pour les espèces cibles] - schema_code: ref_nom.nomenclature defaults: source: SIPAF @@ -149,4 +152,26 @@ items: - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, NOJ, Non Jo., Non jointive, Non jointive] - [PF_DIAG_CLOTURES_GUIDAGE_ETAT, AUT, Aut., Autre (préciser), Autre (préciser)] - + - [PF_DIAG_AMENAGEMENT_VEGETATION_TYPE, NU, Nu, Sol nu, Sol nu] + - [PF_DIAG_AMENAGEMENT_VEGETATION_TYPE, HER, Herb.,Herbacé (<1m), Herbacé (<1m)] + - [PF_DIAG_AMENAGEMENT_VEGETATION_TYPE, BUI, Buis., Buissonant arbustif (1-3m), Buissonant arbustif (1-3m)] + - [PF_DIAG_AMENAGEMENT_VEGETATION_TYPE, ARB, Arb., Arboré (>3m), Arboré (>3m)] + + - [PF_DIAG_AMENAGEMENT_VEGETATION_COUVERT, "1", 0-25 , 0-25 %, 0-25 %] + - [PF_DIAG_AMENAGEMENT_VEGETATION_COUVERT, "2", 25-50, 25-50 %, 25-50 %] + - [PF_DIAG_AMENAGEMENT_VEGETATION_COUVERT, "3", 50-75, 50-75 %, 50-75 %] + - [PF_DIAG_AMENAGEMENT_VEGETATION_COUVERT, "4", 75-100, 75-100 %, 75-100 %] + + - [PF_DIAG_AMENAGEMENT_ENTRETIENT, BON, Bon., Bonne, Bonne] + - [PF_DIAG_AMENAGEMENT_ENTRETIENT, MOY, Moy., Moyenne, Moyenne] + - [PF_DIAG_AMENAGEMENT_ENTRETIENT, OCC, Occ., Occasionelle, Occasionelle] + - [PF_DIAG_AMENAGEMENT_ENTRETIENT, NUL, Nul., Nulle, Nulle] + + - [PF_DIAG_FRANCHISSABILITE, BON, Bon., Bonne, Bonne] + - [PF_DIAG_FRANCHISSABILITE, MOY, Moy., Moyenne, Moyenne] + - [PF_DIAG_FRANCHISSABILITE, OCC, Occ., Occasionelle, Occasionelle] + - [PF_DIAG_FRANCHISSABILITE, NUL, Nul., Nulle, Nulle] + + - [PF_DIAG_INTERET_FAUNE, FAI, Fai., Faible, Faible] + - [PF_DIAG_INTERET_FAUNE, MOY, Moy., Moyen, Moyen] + - [PF_DIAG_INTERET_FAUNE, FOR, For., Fort, Fort] diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml index 16efd4d0..8d6f5650 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml @@ -15,38 +15,50 @@ aliases: - date_diagnostic - id_organisme - id_role - - commentaire_diagnostic + - key: commentaire_diagnostic + type: textarea - &diag_perturbation_obstacle - - direction: row - items: - - key: nomenclatures_diagnostic_obstacle - additional_fields: - - cd_nomenclature - type: list_form - multiple: true - return_object: true - - key: obstacle_autre - hidden: __f__!data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') - required: __f__data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') - - direction: row - items: - - key: nomenclatures_diagnostic_perturbation - additional_fields: - - cd_nomenclature - type: list_form - multiple: true - return_object: true - - key: perturbation_autre - hidden: __f__!data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') - required: __f__data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') - - nomenclatures_diagnostic_ouvrage_hydrau_etat_berge - - id_nomenclature_diagnostic_ouvrage_hydrau_racc_banq - - nomenclatures_diagnostic_ouvrage_hydrau_dim - - commentaire_perturbation_obstacle + items: + - display: fieldset + title: Obstacles + direction: row + items: + - key: nomenclatures_diagnostic_obstacle + additional_fields: + - cd_nomenclature + type: list_form + multiple: true + return_object: true + - key: obstacle_autre + disabled: __f__!data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') + required: __f__data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') + - display: fieldset + title: perturbation + direction: row + items: + - key: nomenclatures_diagnostic_perturbation + additional_fields: + - cd_nomenclature + type: list_form + multiple: true + return_object: true + - key: perturbation_autre + disabled: __f__!data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') + required: __f__data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') + - display: fieldset + title: Ouvrage hydrau + items: + - nomenclatures_diagnostic_ouvrage_hydrau_etat_berge + - id_nomenclature_ouvrage_hydrau_racc_banq + - nomenclatures_diagnostic_ouvrage_hydrau_dim + - type: textarea + key: commentaire_perturbation_obstacle - &diag_amenagement - - direction: row + - title: Aménagements + display: fieldset + direction: row items: - key: nomenclatures_diagnostic_amenagement_biodiv additional_fields: @@ -57,26 +69,47 @@ aliases: - key: amenagement_biodiv_autre disabled: __f__!data.nomenclatures_diagnostic_amenagement_biodiv?.some(n => n.cd_nomenclature == 'AUT') required: __f__data.nomenclatures_diagnostic_amenagement_biodiv?.some(n => n.cd_nomenclature == 'AUT') - - items: - - key: clotures + - key: clotures + type: array + display: fieldset + items: + direction: row + items: + - id_nomenclature_clotures_guidage_type + - key: clotures_guidage_type_autre + disabled: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_type) == 'AUT')" + required: "__f__u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_type) == 'AUT'" + - id_nomenclature_clotures_guidage_etat + - key: clotures_guidage_etat_autre + disabled: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT')" + required: "__f__u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT'" + - title: Végétation + display: fieldset + items: + - key: vegetation_tablier type: array display: fieldset items: direction: row items: - - id_nomenclature_clotures_guidage_type - - key: clotures_guidage_type_autre - disabled: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_type) == 'AUT')" - required: "__f__u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_type) == 'AUT'" - - id_nomenclature_clotures_guidage_etat - - key: clotures_guidage_etat_autre - disabled: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT')" - required: "__f__u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT'" - + - id_nomenclature_vegetation_type + - id_nomenclature_vegetation_couvert + - key: vegetation_debouche + type: array + display: fieldset + items: + direction: row + items: + - id_nomenclature_vegetation_type + - id_nomenclature_vegetation_couvert + - key: commentaire_amenagement + type: textarea - &diag_synthese - items: - + - id_nomenclature_franchissabilite + - id_nomenclature_interet_faune + - key: amenagement_faire + type: textarea layout: height_auto: true @@ -85,18 +118,16 @@ layout: template_params: object_code: diagnostic layout: - overflow: true - items: - # - title: Diagnostic - # display: fieldset - # items: *diag_commons - # - title: Pertubations / Obstacles - # display: fieldset - # items: *diag_perturbation_obstacle - - title: Aménagements - display: fieldset - items: *diag_amenagement - # - title: Synthese - # display: fieldset - # items: *diag_synthese - \ No newline at end of file + - type: breadcrumbs + flex: "0" + - overflow: true + display: tabs + items: + - label: Champs communs + items: *diag_commons + - label: Perturbations / Obstacles + items: *diag_perturbation_obstacle + - label: Aménagement + items: *diag_amenagement + - label: Synthese + items: *diag_synthese diff --git a/contrib/m_sipaf/config/m_sipaf.module.yml b/contrib/m_sipaf/config/m_sipaf.module.yml index b7ad7f3a..73e17224 100644 --- a/contrib/m_sipaf/config/m_sipaf.module.yml +++ b/contrib/m_sipaf/config/m_sipaf.module.yml @@ -26,7 +26,7 @@ objects: cruved: CRUD tree: - pf: + site: diagnostic: pages_definition: From bf46d225aaca504783b1ede2ee2b989667a4b197 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 13:52:02 +0200 Subject: [PATCH 082/142] fix fields and tabs table --- backend/gn_modulator/module/config/utils.py | 35 ++++++++++++--------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index 84f0ac19..7a47b8a9 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -369,17 +369,34 @@ def process_layout_fields(cls, module_code): context = {"module_code": module_code} for page_code, page_definition in pages.items(): keys = cls.get_layout_keys(page_definition["layout"], config_params, context, keys) + # if page_code == 'diagnostic_create': + # keyss = keys.get('m_sipaf', {}).get('diagnostic', {}).get('read') + # print('get keys from page', module_code, page_code) + # for k in sorted(keyss): + # print(f'- {k}') set_global_cache(["keys"], keys) @classmethod def get_layout_keys(cls, layout, params, context, keys): + if isinstance(layout, list): for item in layout: cls.get_layout_keys(item, params, context, keys) return keys + if isinstance(layout, dict): + if layout.get("object_code"): + context = {**context, "object_code": layout["object_code"]} + + if layout.get("type") == "form": + context = {**context, "form": True} + + if layout.get("module_code"): + context = {**context, "module_code": layout["module_code"]} + if isinstance(layout, dict) and layout.get("type") in ["dict", "array"]: + data_keys = context.get("data_keys", []) data_keys.append(layout["key"]) context = {**context, "data_keys": data_keys} @@ -388,7 +405,6 @@ def get_layout_keys(cls, layout, params, context, keys): if ( isinstance(layout, dict) and layout.get("type") == "list_form" - and layout.get("object_code") ): key_add = [] if layout.get("label_field_name"): @@ -398,14 +414,11 @@ def get_layout_keys(cls, layout, params, context, keys): if layout.get("additional_fields"): key_add += layout["additional_fields"] if key_add: + print(context.get('module_code'), context.get('object_code'), layout.get("key")) cls.get_layout_keys( key_add, params, - { - **context, - "module_code": layout.get("module_code") or context["module_code"], - "object_code": layout["object_code"], - }, + context, keys, ) if layout.get("return_object") and layout.get("additional_fields"): @@ -430,7 +443,7 @@ def get_layout_keys(cls, layout, params, context, keys): if not sm.has_property(key): # raise error ? - print(f"pb ? {sm} has no {key}") + # print(f"pb ? {sm} has no {key}") return keys if key not in object_keys["read"]: object_keys["read"].append(key) @@ -438,14 +451,6 @@ def get_layout_keys(cls, layout, params, context, keys): object_keys["write"].append(key) return keys - if layout.get("object_code"): - context = {**context, "object_code": layout["object_code"]} - - if layout.get("type") == "form": - context = {**context, "form": True} - - if layout.get("module_code"): - context = {**context, "module_code": layout["module_code"]} if layout.get("code"): template_params = {**params, **layout.get("template_params", {})} From f6632ab6747e6c9649fe91cf72c6ac54cbcb25a0 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 13:52:23 +0200 Subject: [PATCH 083/142] fix fields and tabs table --- frontend/app/components/base/base.scss | 8 ++- .../layout/base/layout-section.component.html | 24 +++++---- .../layout/base/layout.component.ts | 54 ++++++++++--------- .../object/layout-object-table.component.html | 2 +- .../object/layout-object-table.component.ts | 1 + 5 files changed, 51 insertions(+), 38 deletions(-) diff --git a/frontend/app/components/base/base.scss b/frontend/app/components/base/base.scss index e55e9d85..729574bf 100644 --- a/frontend/app/components/base/base.scss +++ b/frontend/app/components/base/base.scss @@ -15,9 +15,13 @@ // border: 1px solid black; } -.layout-tab { +.layout-tab-out { + height: 100%; + padding: 10px; +} + +.layout-tab-in { height: 100%; - // padding: 10px; } .element-container { diff --git a/frontend/app/components/layout/base/layout-section.component.html b/frontend/app/components/layout/base/layout-section.component.html index 7743cfc3..4375e1c3 100644 --- a/frontend/app/components/layout/base/layout-section.component.html +++ b/frontend/app/components/layout/base/layout-section.component.html @@ -7,7 +7,7 @@
-
- - +
+
+ + +
diff --git a/frontend/app/components/layout/base/layout.component.ts b/frontend/app/components/layout/base/layout.component.ts index 082eb72b..c56493a1 100644 --- a/frontend/app/components/layout/base/layout.component.ts +++ b/frontend/app/components/layout/base/layout.component.ts @@ -436,24 +436,10 @@ export class ModulesLayoutComponent implements OnInit { // si on a reduit la fenetre // -> on remet à 0 - const overflowStyle = {}; - if (this.computedLayout.display != 'tabs') { - overflowStyle['overflow-y'] = 'scroll'; - } if (this.docHeightSave > docHeight || !this.docHeightSave) { setTimeout(() => { - this.computedLayout.style = { - ...(this.computedLayout.style || {}), - height: '200px', - ...overflowStyle, - }; - - this.layout.style = { - ...(this.layout.style || {}), - height: `200px`, - ...overflowStyle, - }; + this.setStyleHeight(200); }, 10); } @@ -462,20 +448,38 @@ export class ModulesLayoutComponent implements OnInit { setTimeout(() => { const parent = elem.closest('div.layout-item'); const height = parent?.clientHeight; - this.layout.style = { - ...(this.layout.style || {}), - height: `${height}px`, - ...overflowStyle, - }; - this.computedLayout.style = { - ...(this.computedLayout.style || {}), - height: `${height}px`, - ...overflowStyle, - }; + this.setStyleHeight(height); }, 200); } + setStyleHeight(height) { + let overflowStyle = {}; + if (this.computedLayout.display != 'tabs') { + overflowStyle['overflow-y'] = 'scroll'; + } + let style = { + height: `${height}px`, + ...overflowStyle, + }; + this.layout.style = { + ...(this.layout.style || {}), + ...style, + }; + this.computedLayout.style = { + ...(this.computedLayout.style || {}), + ...style, + }; + + if (this.computedLayout.display == 'tabs') { + let heightTab = height - 50; + let styleTab = { + height: `${heightTab}px` + } + this.computedLayout.style_tab = this.layout.style_tab = styleTab; + } + } + /** * Pour gérer les élément dont on souhaite que la taille correspondent à la taille de la fenètre */ diff --git a/frontend/app/components/layout/object/layout-object-table.component.html b/frontend/app/components/layout/object/layout-object-table.component.html index 5f2117e3..fdef9bd0 100644 --- a/frontend/app/components/layout/object/layout-object-table.component.html +++ b/frontend/app/components/layout/object/layout-object-table.component.html @@ -1,5 +1,5 @@ -
+
diff --git a/frontend/app/components/layout/object/layout-object-table.component.ts b/frontend/app/components/layout/object/layout-object-table.component.ts index b37ca65f..6dcae1da 100644 --- a/frontend/app/components/layout/object/layout-object-table.component.ts +++ b/frontend/app/components/layout/object/layout-object-table.component.ts @@ -288,6 +288,7 @@ export class ModulesLayoutObjectTableComponent this.tableHeight = `${elem.clientHeight}px`; this.table.setHeight(elem.clientHeight); const pageSize = Math.floor((elem.clientHeight - 90) / 50); + console.log(pageSize, elem.clientHeight) if ( !this.computedLayout.page_size && From ab3acc19affd20671c1a3eaba3d5032f9c746583 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 13:54:05 +0200 Subject: [PATCH 084/142] lint + clean print --- backend/gn_modulator/module/config/utils.py | 14 +------------- .../app/components/layout/base/layout.component.ts | 4 ++-- .../layout/object/layout-object-table.component.ts | 1 - 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index 7a47b8a9..ec7c56a9 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -369,17 +369,11 @@ def process_layout_fields(cls, module_code): context = {"module_code": module_code} for page_code, page_definition in pages.items(): keys = cls.get_layout_keys(page_definition["layout"], config_params, context, keys) - # if page_code == 'diagnostic_create': - # keyss = keys.get('m_sipaf', {}).get('diagnostic', {}).get('read') - # print('get keys from page', module_code, page_code) - # for k in sorted(keyss): - # print(f'- {k}') set_global_cache(["keys"], keys) @classmethod def get_layout_keys(cls, layout, params, context, keys): - if isinstance(layout, list): for item in layout: cls.get_layout_keys(item, params, context, keys) @@ -396,16 +390,12 @@ def get_layout_keys(cls, layout, params, context, keys): context = {**context, "module_code": layout["module_code"]} if isinstance(layout, dict) and layout.get("type") in ["dict", "array"]: - data_keys = context.get("data_keys", []) data_keys.append(layout["key"]) context = {**context, "data_keys": data_keys} return cls.get_layout_keys(layout["items"], params, context, keys) - if ( - isinstance(layout, dict) - and layout.get("type") == "list_form" - ): + if isinstance(layout, dict) and layout.get("type") == "list_form": key_add = [] if layout.get("label_field_name"): key_add.append(layout["label_field_name"]) @@ -414,7 +404,6 @@ def get_layout_keys(cls, layout, params, context, keys): if layout.get("additional_fields"): key_add += layout["additional_fields"] if key_add: - print(context.get('module_code'), context.get('object_code'), layout.get("key")) cls.get_layout_keys( key_add, params, @@ -451,7 +440,6 @@ def get_layout_keys(cls, layout, params, context, keys): object_keys["write"].append(key) return keys - if layout.get("code"): template_params = {**params, **layout.get("template_params", {})} layout_from_code = SchemaMethods.get_layout_from_code( diff --git a/frontend/app/components/layout/base/layout.component.ts b/frontend/app/components/layout/base/layout.component.ts index c56493a1..56a1f16e 100644 --- a/frontend/app/components/layout/base/layout.component.ts +++ b/frontend/app/components/layout/base/layout.component.ts @@ -474,8 +474,8 @@ export class ModulesLayoutComponent implements OnInit { if (this.computedLayout.display == 'tabs') { let heightTab = height - 50; let styleTab = { - height: `${heightTab}px` - } + height: `${heightTab}px`, + }; this.computedLayout.style_tab = this.layout.style_tab = styleTab; } } diff --git a/frontend/app/components/layout/object/layout-object-table.component.ts b/frontend/app/components/layout/object/layout-object-table.component.ts index 6dcae1da..b37ca65f 100644 --- a/frontend/app/components/layout/object/layout-object-table.component.ts +++ b/frontend/app/components/layout/object/layout-object-table.component.ts @@ -288,7 +288,6 @@ export class ModulesLayoutObjectTableComponent this.tableHeight = `${elem.clientHeight}px`; this.table.setHeight(elem.clientHeight); const pageSize = Math.floor((elem.clientHeight - 90) / 50); - console.log(pageSize, elem.clientHeight) if ( !this.computedLayout.page_size && From cc364df7bb50f1f541b37a0625ddcdc2e0bf106a Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 14:11:20 +0200 Subject: [PATCH 085/142] fix migration diag --- backend/gn_modulator/blueprint.py | 14 ++------------ backend/gn_modulator/utils/commons.py | 18 ++++++++++++++++++ .../90f6e5531f7c_diagnostic_m_sipaf.py | 2 +- doc/changelog.md | 14 ++++++++++++++ 4 files changed, 35 insertions(+), 13 deletions(-) diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index 5360fd8f..a3686b71 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -8,6 +8,7 @@ from gn_modulator import init_gn_modulator from gn_modulator.utils.api import process_dict_path from gn_modulator.utils.errors import get_errors, errors_txt +from gn_modulator.utils.commons import test_is_app_running from gn_modulator import MODULE_CODE from geonature.core.gn_permissions.decorators import check_cruved_scope from geonature.core.gn_commons.models.base import TModules @@ -33,19 +34,8 @@ def set_current_module(endpoint, values): ) -# On teste sys.argv pour éviter de charger les définitions -# si on est dans le cadre d'une commande -# On initialise dans le cadre d'une application lancée avec -# - gunicorn -# - celery -# - pytest -# - flask run -# - geonature run -test_init = any(sys.argv[0].endswith(x) for x in ["gunicorn", "celery", "pytest"]) or ( - len(sys.argv) >= 2 and sys.argv[1] == "run" -) -if test_init: +if test_is_app_running(): init_gn_modulator() if get_errors(): print(f"\n{errors_txt()}") diff --git a/backend/gn_modulator/utils/commons.py b/backend/gn_modulator/utils/commons.py index 6dd62587..0a3ddbed 100644 --- a/backend/gn_modulator/utils/commons.py +++ b/backend/gn_modulator/utils/commons.py @@ -4,6 +4,7 @@ import unicodedata +import sys from importlib import import_module @@ -79,3 +80,20 @@ def getAttr(obj, path, index=0): path_cur = path[index] cur = obj[path_cur] return getAttr(cur, path, index + 1) + +def test_is_app_running(): + """ + On teste sys.argv pour éviter de charger les définitions + si on est dans le cadre d'une commande + On initialise dans le cadre d'une application lancée avec + - gunicorn + - celery + - pytest + - flask run + - geonature run + """ + + + return any(sys.argv[0].endswith(x) for x in ["gunicorn", "celery", "pytest"]) or ( + len(sys.argv) >= 2 and sys.argv[1] == "run" + ) diff --git a/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py b/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py index 3743929e..d356ebc5 100644 --- a/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py +++ b/contrib/m_sipaf/backend/m_sipaf/migrations/versions/90f6e5531f7c_diagnostic_m_sipaf.py @@ -29,7 +29,7 @@ def upgrade(): def downgrade(): if_exists = "" - # if_exists = "IF EXISTS" + if_exists = "IF EXISTS" op.execute( f""" DROP TABLE {if_exists} pr_sipaf.cor_diag_nomenclature_obstacle; diff --git a/doc/changelog.md b/doc/changelog.md index 503430af..4907cfaf 100644 --- a/doc/changelog.md +++ b/doc/changelog.md @@ -26,6 +26,20 @@ - [ ] amélioration du composant list_form - [ ] ajout diagnostic sipaf +**⚠️ Notes de version** + +- Mettre à jour la base + +``` +geonature db autoupgrade +``` + +- Mettre à jour les `features` de `m_sipaf` + +``` +geonature modulator features m_sipaf.pf +``` + ## 1.0.5 (13-03-2023) From fa50c83da210d891d37a02d60114ceb7549f4a97 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 14:18:32 +0200 Subject: [PATCH 086/142] lint --- backend/gn_modulator/blueprint.py | 1 - backend/gn_modulator/utils/commons.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index a3686b71..08ceba5c 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -34,7 +34,6 @@ def set_current_module(endpoint, values): ) - if test_is_app_running(): init_gn_modulator() if get_errors(): diff --git a/backend/gn_modulator/utils/commons.py b/backend/gn_modulator/utils/commons.py index 0a3ddbed..9087c431 100644 --- a/backend/gn_modulator/utils/commons.py +++ b/backend/gn_modulator/utils/commons.py @@ -81,6 +81,7 @@ def getAttr(obj, path, index=0): cur = obj[path_cur] return getAttr(cur, path, index + 1) + def test_is_app_running(): """ On teste sys.argv pour éviter de charger les définitions @@ -93,7 +94,6 @@ def test_is_app_running(): - geonature run """ - return any(sys.argv[0].endswith(x) for x in ["gunicorn", "celery", "pytest"]) or ( - len(sys.argv) >= 2 and sys.argv[1] == "run" + len(sys.argv) >= 2 and sys.argv[1] == "run" ) From f5855b3521ef791259e41c3bf3b0ec98bfe88001 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 14:23:49 +0200 Subject: [PATCH 087/142] ne plus cacher loption update --- contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml index e3ca316f..176cfe67 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml @@ -68,8 +68,8 @@ layout: # width: 1200px items: type: import - hidden_options: - - enable_update + # hidden_options: + # - enable_update flex: "0" - type: object From c528a0673cf764c06b8341e03a3ea44fe2e46e8f Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 17:12:42 +0200 Subject: [PATCH 088/142] modal delete --- .../nomenclature/ref_nom.nomenclature.schema.yml | 1 + config/layouts/tests/test_list_form.layout.yml | 1 - .../layouts/utils/utils.buttons_form.layout.yml | 16 +--------------- .../layouts/utils/utils.modal_delete.layout.yml | 10 +++++----- 4 files changed, 7 insertions(+), 21 deletions(-) diff --git a/config/definitions/utils/nomenclature/ref_nom.nomenclature.schema.yml b/config/definitions/utils/nomenclature/ref_nom.nomenclature.schema.yml index bec3c5d1..c95e8d48 100644 --- a/config/definitions/utils/nomenclature/ref_nom.nomenclature.schema.yml +++ b/config/definitions/utils/nomenclature/ref_nom.nomenclature.schema.yml @@ -26,5 +26,6 @@ meta: - nomenclature_type.mnemonique - nomenclature_type.label_fr label_field_name: label_fr + title_field_name: definition_fr relations: - nomenclature_type diff --git a/config/layouts/tests/test_list_form.layout.yml b/config/layouts/tests/test_list_form.layout.yml index 82ed3241..72cda52f 100644 --- a/config/layouts/tests/test_list_form.layout.yml +++ b/config/layouts/tests/test_list_form.layout.yml @@ -77,7 +77,6 @@ layout: flex: 1 - display: tabs flex: 2 - selected_tab: advance items: - label: basique items: *set_basique diff --git a/config/layouts/utils/utils.buttons_form.layout.yml b/config/layouts/utils/utils.buttons_form.layout.yml index ffd08971..e3f3a0fa 100644 --- a/config/layouts/utils/utils.buttons_form.layout.yml +++ b/config/layouts/utils/utils.buttons_form.layout.yml @@ -31,18 +31,4 @@ layout: action: type: modal modal_name: delete - - type: modal - modal_name: delete - title: Confirmer la suppression de l'élément - direction: row - items: - - type: button - title: Suppression - action: delete - icon: delete - color: warn - - type: button - title: Annuler - action: close - icon: refresh - color: primary + - code: utils.modal_delete diff --git a/config/layouts/utils/utils.modal_delete.layout.yml b/config/layouts/utils/utils.modal_delete.layout.yml index 1e422e5b..56645ec5 100644 --- a/config/layouts/utils/utils.modal_delete.layout.yml +++ b/config/layouts/utils/utils.modal_delete.layout.yml @@ -8,13 +8,13 @@ layout: title: __f__`Confirmer la suppression ${o.du_label(x)} ${o.data_label(x)}` direction: row items: - - type: button - title: Suppression - action: delete - icon: delete - color: warn - type: button title: Annuler action: close icon: refresh color: primary + - type: button + title: Suppression + action: delete + icon: delete + color: warn From b1cf08b1c8a49ccac5472cf65873d9085cb92856 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 20 Apr 2023 17:14:01 +0200 Subject: [PATCH 089/142] lazy loading tabs fix #38 --- .../layout/base/layout-section.component.html | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/frontend/app/components/layout/base/layout-section.component.html b/frontend/app/components/layout/base/layout-section.component.html index 4375e1c3..dc074cb5 100644 --- a/frontend/app/components/layout/base/layout-section.component.html +++ b/frontend/app/components/layout/base/layout-section.component.html @@ -46,20 +46,20 @@ class="layout-section" #tabGroup > -
-
- - + +
+
+ + +
-
+ From 96854d3f33035a546b6e71867d6ddf3419dc35e8 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 25 Apr 2023 15:06:25 +0200 Subject: [PATCH 090/142] git fix update & is_new_data --- backend/gn_modulator/module/config/utils.py | 113 ++++++++++-------- .../gn_modulator/routes/utils/repository.py | 3 + .../gn_modulator/schema/repositories/base.py | 18 ++- .../gn_modulator/schema/repositories/utils.py | 102 ++++------------ backend/gn_modulator/schema/serializers.py | 18 ++- 5 files changed, 120 insertions(+), 134 deletions(-) diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index ec7c56a9..be3ddf37 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -359,7 +359,6 @@ def add_basic_fields(cls, module_code, object_code): @classmethod def process_layout_fields(cls, module_code): - keys = get_global_cache(["keys"], {}) module_config = cls.module_config(module_code) pages = module_config.get("pages", {}) @@ -367,35 +366,42 @@ def process_layout_fields(cls, module_code): config_defaults = module_config.get("config_defaults", {}) config_params = {**config_defaults, **config_params} context = {"module_code": module_code} - for page_code, page_definition in pages.items(): - keys = cls.get_layout_keys(page_definition["layout"], config_params, context, keys) - - set_global_cache(["keys"], keys) + for page_code in pages: + cls.get_layout_keys(pages[page_code]["layout"], config_params, context) @classmethod - def get_layout_keys(cls, layout, params, context, keys): + def get_layout_keys(cls, layout, params, context): + # if isinstance(layout, list): for item in layout: - cls.get_layout_keys(item, params, context, keys) - return keys + cls.get_layout_keys(item, params, context) + return + + # ajout d'une clé + if isinstance(layout, str): + return cls.add_key(context, layout) + + if layout.get("key") and layout.get("type") not in ["dict", "array"]: + cls.add_key(context, layout["key"]) - if isinstance(layout, dict): - if layout.get("object_code"): - context = {**context, "object_code": layout["object_code"]} + if layout.get("object_code"): + context = {**context, "object_code": layout["object_code"]} - if layout.get("type") == "form": - context = {**context, "form": True} + if layout.get("type") == "form": + context = {**context, "form": True} - if layout.get("module_code"): - context = {**context, "module_code": layout["module_code"]} + if layout.get("module_code"): + context = {**context, "module_code": layout["module_code"]} + # traitement dict array if isinstance(layout, dict) and layout.get("type") in ["dict", "array"]: data_keys = context.get("data_keys", []) data_keys.append(layout["key"]) context = {**context, "data_keys": data_keys} - return cls.get_layout_keys(layout["items"], params, context, keys) + return cls.get_layout_keys(layout["items"], params, context) - if isinstance(layout, dict) and layout.get("type") == "list_form": + # traitement list_form + if layout.get("type") == "list_form": key_add = [] if layout.get("label_field_name"): key_add.append(layout["label_field_name"]) @@ -407,49 +413,62 @@ def get_layout_keys(cls, layout, params, context, keys): cls.get_layout_keys( key_add, params, - context, - keys, + {**context, "data_keys": [*context.get("data_keys", []), layout["key"]]}, ) - if layout.get("return_object") and layout.get("additional_fields"): + if ( + layout.get("return_object") + and layout.get("additional_fields") + and not context.get("form") + ): additional_keys = list( map(lambda x: f"{layout['key']}.{x}", layout["additional_fields"]) ) - cls.get_layout_keys(additional_keys, params, context, keys) - - if isinstance(layout, str) or layout.get("key"): - key = layout if isinstance(layout, str) else layout["key"] - if context.get("data_keys"): - key = f"{''.join(context['data_keys'])}.{key}" - module_keys = keys[context["module_code"]] = keys.get(context["module_code"], {}) - object_keys = module_keys[context["object_code"]] = module_keys.get( - context["object_code"], {"read": [], "write": []} - ) - - object_config = cls.object_config(context["module_code"], context["object_code"]) - schema_code = object_config["schema_code"] - - sm = SchemaMethods(schema_code) - - if not sm.has_property(key): - # raise error ? - # print(f"pb ? {sm} has no {key}") - return keys - if key not in object_keys["read"]: - object_keys["read"].append(key) - if context.get("form"): - object_keys["write"].append(key) - return keys + cls.get_layout_keys(additional_keys, params, context) if layout.get("code"): template_params = {**params, **layout.get("template_params", {})} layout_from_code = SchemaMethods.get_layout_from_code( layout.get("code"), template_params ) - return cls.get_layout_keys(layout_from_code, params, context, keys) + return cls.get_layout_keys(layout_from_code, params, context) if layout.get("items"): - return cls.get_layout_keys(layout.get("items"), params, context, keys) + return cls.get_layout_keys(layout.get("items"), params, context) + + @classmethod + def add_key(cls, context, key): + keys = get_global_cache(["keys"]) + if context.get("data_keys"): + key = f"{''.join(context['data_keys'])}.{key}" + + module_keys = keys[context["module_code"]] = keys.get(context["module_code"], {}) + object_keys = module_keys[context["object_code"]] = module_keys.get( + context["object_code"], {"read": [], "write": []} + ) + + object_config = cls.object_config(context["module_code"], context["object_code"]) + schema_code = object_config["schema_code"] + + sm = SchemaMethods(schema_code) + + if not sm.has_property(key): + # raise error ? + print(f"pb ? {sm} has no {key}") + return keys + # ajout en lecture + if key not in object_keys["read"]: + object_keys["read"].append(key) + + # ajout en ecriture + if context.get("form"): + # key si relationship + write_key = key + if sm.is_relationship(key): + rel = SchemaMethods(sm.property(key)["schema_code"]) + write_key = f"{key}.{rel.pk_field_name()}" + if write_key not in object_keys["write"]: + object_keys["write"].append(write_key) return keys @classmethod diff --git a/backend/gn_modulator/routes/utils/repository.py b/backend/gn_modulator/routes/utils/repository.py index 9fb8db48..2e2ed230 100644 --- a/backend/gn_modulator/routes/utils/repository.py +++ b/backend/gn_modulator/routes/utils/repository.py @@ -119,6 +119,9 @@ def patch_rest(module_code, object_code, value): except sm.errors.SchemaUnsufficientCruvedRigth as e: return "Erreur Cruved : {}".format(str(e)), 403 + except Exception as e: + print(e) + raise (e) return sm.serialize(m, fields=params.get("fields"), as_geojson=params.get("as_geojson")) diff --git a/backend/gn_modulator/schema/repositories/base.py b/backend/gn_modulator/schema/repositories/base.py index 46d3e860..78c8c41b 100644 --- a/backend/gn_modulator/schema/repositories/base.py +++ b/backend/gn_modulator/schema/repositories/base.py @@ -108,11 +108,22 @@ def is_new_data(self, model, data): if model is None and data is not None: return True + # data_fields = self.get_data_fields(data) + # data_db = m = self.serialize(model, fields=fields)[key] + if isinstance(data, dict) and not isinstance(model, dict): for key, data_value in data.items(): if not hasattr(model, key): - return True - m = self.serialize(model, fields=[key])[key] + continue + fields = [key] + if self.is_relation_1_n(key) or self.is_relation_n_n(key): + fields = [] + for item in data_value: + for k in item: + kk = f"{key}.{k}" + if kk not in fields: + fields.append(kk) + m = self.serialize(model, fields=fields)[key] if self.is_new_data(m, data_value): return True return False @@ -165,7 +176,6 @@ def update_row( # TODO deserialiser """ - self.validate_data(data, check_required=False) m = self.get_row( @@ -178,10 +188,10 @@ def update_row( ).one() if not self.is_new_data(m, data): + print("not new") return m, False db.session.flush() - self.unserialize(m, data, authorized_write_fields) if commit: diff --git a/backend/gn_modulator/schema/repositories/utils.py b/backend/gn_modulator/schema/repositories/utils.py index 8ed324b0..abb83faf 100644 --- a/backend/gn_modulator/schema/repositories/utils.py +++ b/backend/gn_modulator/schema/repositories/utils.py @@ -24,22 +24,24 @@ def get_query_cache(self, query, key): return None return query._cache.get(key) - def process_custom_getattr_res(self, res, query, only_fields=[]): + def process_custom_getattr_res(self, res, query, field_name, index, only_fields=[]): # si c'est une propriété - if not res["is_relationship"]: + fields = field_name.split(".") + is_relationship = self.is_val_relationship(res["val"]) + is_last_field = index == len(fields) - 1 + + if not is_relationship: # on ne peut pas avoir de field apres une propriété - if not res["is_last_field"]: - raise Exception( - f"pb fields {res['field_name']}, il ne devrait plus rester de champs" - ) + if not is_last_field: + raise Exception(f"pb fields {field_name}, il ne devrait plus rester de champs") return res["val"], query - if not res["is_last_field"]: + if not is_last_field: return self.custom_getattr( res["relation_alias"], - res["field_name"], - index=res["index"] + 1, + field_name, + index=index + 1, query=query, only_fields=only_fields, ) @@ -92,6 +94,9 @@ def eager_load_only(self, field_name, query, only_fields, index): return query + def is_val_relationship(self, val): + return hasattr(val, "mapper") and hasattr(val.mapper, "entity") + def custom_getattr( self, Model, field_name, query=None, only_fields="", index=0, condition=None ): @@ -110,25 +115,25 @@ def custom_getattr( # récupération depuis le cache associé à la query res = self.get_query_cache(query, cache_key) if res: - return self.process_custom_getattr_res(res, query, only_fields) + return self.process_custom_getattr_res(res, query, field_name, index, only_fields) # si non en cache # on le calcule # dictionnaire de résultat pour le cache res = { - "field_name": field_name, - "index": index, - "is_last_field": is_last_field, + # "field_name": field_name, + # "index": index, + # "is_last_field": is_last_field, "val": getattr(Model, current_field), } - res["is_relationship"] = hasattr(res["val"], "mapper") and hasattr( - res["val"].mapper, "entity" - ) + # res["is_relationship"] = hasattr(res["val"], "mapper") and hasattr( + # res["val"].mapper, "entity" + # ) # si c'est une propriété - if res["is_relationship"]: + if self.is_val_relationship(res["val"]): res["relation_model"] = res["val"].mapper.entity res["relation_alias"] = orm.aliased(res["relation_model"]) res["val_of_type"] = res["val"].of_type(res["relation_alias"]) @@ -138,70 +143,11 @@ def custom_getattr( query = self.set_query_cache(query, cache_key, res) # chargement des champs si is last field - if res["is_relationship"] and is_last_field and only_fields: + if self.is_val_relationship(res["val"]) and is_last_field and only_fields: query = self.eager_load_only(field_name, query, only_fields, index) # mise en cache et retour - return self.process_custom_getattr_res(res, query, only_fields) - - def custom_getattr2(self, Model, field_name, query=None, condition=None, fields=None): - """ - - obselete - - getattr pour un modèle, étendu pour pouvoir traiter les 'rel.field_name' - - on utilise des alias pour pouvoir gérer les cas plus compliqués - - query pour les filtres dans les api - condition pour les filtres dans les column_properties - - exemple: - - on a deux relations de type nomenclature - et l'on souhaite filtrer la requête par rapport aux deux - - TODO gerer plusieurs '.' - exemple - http://localhost:8000/modules/schemas.sipaf.pf/rest/?page=1&page_size=13&sorters=[{%22field%22:%22id_pf%22,%22dir%22:%22asc%22}]&filters=[{%22field%22:%22areas.type.coe_type%22,%22type%22:%22=%22,%22value%22:%22DEP%22}]&fields=[%22id_pf%22,%22nom_pf%22,%22ownership%22] - """ - - if "." not in field_name: - # cas simple - model_attribute = getattr(Model, field_name) - - return model_attribute, query - - else: - # cas avec un ou plusieurs '.', recursif - - field_names = field_name.split(".") - - rel = field_names[0] - relationship = getattr(Model, rel) - - col = ".".join(field_names[1:]) - - # pour recupérer le modèle correspondant à la relation - relation_entity = relationship.mapper.entity - - if query is not None and condition is None: - # on fait un alias - relation_entity = orm.aliased(relationship.mapper.entity) - # relation_entity = relationship.mapper.entity - - if fields: - query = query.join(relation_entity, isouter=True) - else: - query = query.join(relation_entity, isouter=True) - # query = query.join(relation_entity, relationship, isouter=True) - # if fields: - # query = query.options(orm.joinedload(relationship).load_only(*fields)) - elif condition: - # TODO gérer les alias si filtres un peu plus tordus ?? - query = and_(query, relationship._query_clause_element()) - - return self.custom_getattr(relation_entity, col, query, condition) + return self.process_custom_getattr_res(res, query, field_name, index, only_fields) def get_sorters(self, Model, sort, query): order_bys = [] diff --git a/backend/gn_modulator/schema/serializers.py b/backend/gn_modulator/schema/serializers.py index e566e0ea..009b612c 100644 --- a/backend/gn_modulator/schema/serializers.py +++ b/backend/gn_modulator/schema/serializers.py @@ -195,10 +195,17 @@ def MarshmallowSchema(self, force=False): def pre_load_make_object(self_marshmallow, data, **kwargs): for key in self.pk_field_names(): if key in data and data[key] is None: - print("\nmarsh remove pk null\n", key) data.pop(key, None) - # # pour les champs null avec default defini dans les proprietés + # enleve les clés si non dans only + for k in list(data.keys()): + if self_marshmallow.only and k not in self_marshmallow.only: + print(self, "pop not in only", k) + data.pop(k) + + # # pour les champs null avec default d + # + # efini dans les proprietés # for key, column_def in self.columns().items(): # if key in data and data[key] is None and column_def.get('default'): # data.pop(key, None) @@ -422,16 +429,17 @@ def as_geojson(self, data, geometry_field_name=None): geometry = data.pop(geometry_field_name) return {"type": "Feature", "geometry": geometry, "properties": data} - def unserialize(self, m, data, autorized_write_fields=None): + def unserialize(self, m, data, authorized_write_fields=None): """ unserialize using marshmallow """ kwargs = {} - if autorized_write_fields: - kwargs = {"only": autorized_write_fields, "unknown": EXCLUDE} + if authorized_write_fields: + kwargs = {"only": authorized_write_fields, "unknown": EXCLUDE} MS = self.MarshmallowSchema() ms = MS(**kwargs) + ms.load(data, instance=m) @classmethod From 38ad71934047ed519770502e05853dad9becc065 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 25 Apr 2023 15:15:51 +0200 Subject: [PATCH 091/142] fix table css --- frontend/app/components/base/base.scss | 3 ++- .../app/components/layout/form/list-form.component.scss | 4 ++-- .../layout/object/layout-object-table.component.ts | 1 + frontend/app/services/list-form.service.ts | 1 + frontend/app/services/object.service.ts | 7 ++++--- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/frontend/app/components/base/base.scss b/frontend/app/components/base/base.scss index 729574bf..baaada33 100644 --- a/frontend/app/components/base/base.scss +++ b/frontend/app/components/base/base.scss @@ -17,7 +17,8 @@ .layout-tab-out { height: 100%; - padding: 10px; + padding-bottom: 4px; + padding-top: 4px; } .layout-tab-in { diff --git a/frontend/app/components/layout/form/list-form.component.scss b/frontend/app/components/layout/form/list-form.component.scss index 186361d8..e6ebd72e 100644 --- a/frontend/app/components/layout/form/list-form.component.scss +++ b/frontend/app/components/layout/form/list-form.component.scss @@ -1,10 +1,10 @@ .reset-list-form-button { - font-size: 1.2em; + // font-size: 1.2em; color: gray; } :host ::ng-deep .mdc-text-field--disabled .reset-list-form-button { - font-size: 1.2em; + // font-size: 1.2em; color: lightgrey; } diff --git a/frontend/app/components/layout/object/layout-object-table.component.ts b/frontend/app/components/layout/object/layout-object-table.component.ts index b37ca65f..a09ad445 100644 --- a/frontend/app/components/layout/object/layout-object-table.component.ts +++ b/frontend/app/components/layout/object/layout-object-table.component.ts @@ -286,6 +286,7 @@ export class ModulesLayoutObjectTableComponent return; } this.tableHeight = `${elem.clientHeight}px`; + this.tableHeight = `${elem.clientHeight}px`; this.table.setHeight(elem.clientHeight); const pageSize = Math.floor((elem.clientHeight - 90) / 50); diff --git a/frontend/app/services/list-form.service.ts b/frontend/app/services/list-form.service.ts index a2f23b83..f4d3da5c 100644 --- a/frontend/app/services/list-form.service.ts +++ b/frontend/app/services/list-form.service.ts @@ -154,6 +154,7 @@ export class ListFormService { options.object_code = 'ref_nom.nomenclature'; schemaFilters.push(`nomenclature_type.mnemonique = ${options.nomenclature_type}`); options.module_code = this._mConfig.MODULE_CODE; + options.additional_fields = options.additional_fields || []; options.cache = true; } if (options.area_type) { diff --git a/frontend/app/services/object.service.ts b/frontend/app/services/object.service.ts index d370413c..460fe488 100644 --- a/frontend/app/services/object.service.ts +++ b/frontend/app/services/object.service.ts @@ -229,9 +229,10 @@ export class ModulesObjectService { const nbTotal = objectConfig.nb_total; const nbFiltered = objectConfig.nb_filtered; const labels = this.labels({ context }); - const objectTabLabel = nbTotal - ? `${utils.capitalize(labels)} (${nbFiltered}/${nbTotal})` - : `${utils.capitalize(labels)} (0)`; + const objectTabLabel = + nbFiltered == nbTotal + ? `${utils.capitalize(labels)} (${nbTotal || 0})` + : `${utils.capitalize(labels)} (${nbFiltered}/${nbTotal})`; return objectTabLabel; } From 7680daf9ff433b932697a1393569736ecd3ca6cd Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 25 Apr 2023 15:16:27 +0200 Subject: [PATCH 092/142] up diagnostics --- .../migrations/data/schema_diagnostic.sql | 31 ++++-- contrib/m_sipaf/backend/m_sipaf/models.py | 36 ++++--- contrib/m_sipaf/config/config.yml | 25 +---- .../definitions/m_sipaf.diag.schema.yml | 18 +++- .../m_sipaf.diagnostic_details.layout.yml | 36 ++++++- .../m_sipaf.diagnostic_edit.layout.yml | 52 ++++----- .../layouts/m_sipaf.site_list.layout.yml | 102 ++++++++++-------- 7 files changed, 181 insertions(+), 119 deletions(-) diff --git a/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql b/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql index 57a86076..af84c84a 100644 --- a/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql +++ b/contrib/m_sipaf/backend/m_sipaf/migrations/data/schema_diagnostic.sql @@ -15,8 +15,10 @@ CREATE TABLE pr_sipaf.t_diagnostics ( commentaire_amenagement VARCHAR, id_nomenclature_amenagement_entretient INTEGER, id_nomenclature_franchissabilite INTEGER, - id_nomenclature_interet_faune INTEGER, - amenagement_faire VARCHAR + id_nomenclature_interet_petite_faune INTEGER, + id_nomenclature_interet_grande_faune INTEGER, + amenagement_faire VARCHAR, + commentaire_synthese VARCHAR ); ALTER TABLE @@ -109,8 +111,15 @@ ADD ALTER TABLE pr_sipaf.t_diagnostics ADD - CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_interet_faune FOREIGN KEY ( - id_nomenclature_interet_faune + CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_interet_petite_faune FOREIGN KEY ( + id_nomenclature_interet_petite_faune + ) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT fk_pr_sipaf_t_d_id_nomenclature_interet_grande_faune FOREIGN KEY ( + id_nomenclature_interet_grande_faune ) REFERENCES ref_nomenclatures.t_nomenclatures (id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; @@ -298,9 +307,19 @@ ADD ALTER TABLE pr_sipaf.t_diagnostics ADD - CONSTRAINT check_nom_type_diag_interet_faune CHECK ( + CONSTRAINT check_nom_type_diag_interet_petite_faune CHECK ( + ref_nomenclatures.check_nomenclature_type_by_mnemonique( + id_nomenclature_interet_petite_faune, + 'PF_DIAG_INTERET_FAUNE' + ) + ) NOT VALID; + +ALTER TABLE + pr_sipaf.t_diagnostics +ADD + CONSTRAINT check_nom_type_diag_interet_grande_faune CHECK ( ref_nomenclatures.check_nomenclature_type_by_mnemonique( - id_nomenclature_interet_faune, + id_nomenclature_interet_grande_faune, 'PF_DIAG_INTERET_FAUNE' ) ) NOT VALID; diff --git a/contrib/m_sipaf/backend/m_sipaf/models.py b/contrib/m_sipaf/backend/m_sipaf/models.py index 38120bec..912ea0a1 100644 --- a/contrib/m_sipaf/backend/m_sipaf/models.py +++ b/contrib/m_sipaf/backend/m_sipaf/models.py @@ -2,7 +2,7 @@ from datetime import datetime from sqlalchemy.dialects.postgresql import UUID, JSONB -from sqlalchemy.orm import column_property +from sqlalchemy.orm import column_property, backref from sqlalchemy import ( func, literal, @@ -21,10 +21,6 @@ from pypnnomenclature.models import TNomenclatures from ref_geo.models import LAreas, LLinears, BibAreasTypes, BibLinearsTypes -# class TActors(db.Model): -# __tablename__ = "t_actors" -# __table_args__ = {"schema": "pr_sipaf"} - class CorPfNomenclatureOuvrageType(db.Model): __tablename__ = "cor_pf_nomenclature_ouvrage_type" @@ -348,7 +344,9 @@ class Diagnostic(db.Model): ), nullable=False, ) - passage_faune = db.relationship(PassageFaune, backref="diagnostics") + passage_faune = db.relationship( + PassageFaune, backref=backref("diagnostics", cascade="all,delete,delete-orphan") + ) id_role = db.Column(db.Integer, db.ForeignKey("utilisateurs.t_roles.id_role")) role = db.relationship(User) @@ -403,11 +401,18 @@ class Diagnostic(db.Model): commentaire_amenagement = db.Column(db.Unicode) # synthese - id_nomenclature_interet_faune = db.Column( + id_nomenclature_interet_petite_faune = db.Column( db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature") ) - nomenclature_interet_faune = db.relationship( - TNomenclatures, foreign_keys=[id_nomenclature_interet_faune] + nomenclature_interet_petite_faune = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_interet_petite_faune] + ) + + id_nomenclature_interet_grande_faune = db.Column( + db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature") + ) + nomenclature_interet_grande_faune = db.relationship( + TNomenclatures, foreign_keys=[id_nomenclature_interet_grande_faune] ) id_nomenclature_franchissabilite = db.Column( @@ -418,6 +423,7 @@ class Diagnostic(db.Model): ) amenagement_faire = db.Column(db.Unicode) + commentaire_synthese = db.Column(db.Unicode) class DiagnosticCloture(db.Model): @@ -442,7 +448,9 @@ class DiagnosticCloture(db.Model): clotures_guidage_type_autre = db.Column(db.Unicode) clotures_guidage_etat_autre = db.Column(db.Unicode) - diagnostic = db.relationship(Diagnostic, backref="clotures") + diagnostic = db.relationship( + Diagnostic, backref=backref("clotures", cascade="all,delete,delete-orphan") + ) nomenclature_clotures_guidage_etat = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_clotures_guidage_etat] @@ -471,7 +479,9 @@ class DiagnosticVegetationTablier(db.Model): nullable=False, ) - diagnostic = db.relationship(Diagnostic, backref="vegetation_tablier") + diagnostic = db.relationship( + Diagnostic, backref=backref("vegetation_tablier", cascade="all,delete,delete-orphan") + ) nomenclature_vegetation_type = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_vegetation_type] @@ -501,7 +511,9 @@ class DiagnosticVegetationDebouche(db.Model): nullable=False, ) - diagnostic = db.relationship(Diagnostic, backref="vegetation_debouche") + diagnostic = db.relationship( + Diagnostic, backref=backref("vegetation_debouche", cascade="all,delete,delete-orphan") + ) nomenclature_vegetation_type = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_vegetation_type] diff --git a/contrib/m_sipaf/config/config.yml b/contrib/m_sipaf/config/config.yml index 8c192174..11bfdea9 100644 --- a/contrib/m_sipaf/config/config.yml +++ b/contrib/m_sipaf/config/config.yml @@ -353,23 +353,8 @@ site_form_fields: -diagnostic_details_fields: - overflow: true - items: - - passage_faune.code_passage_faune - - date_diagnostic - - organisme.nom_organisme - - role.nom_complet - - direction: row - items: - - nomenclatures_diagnostic_obstacle.label_fr - - key: obstacle_autre - hidden: __f__!data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') - - direction: row - items: - - nomenclatures_diagnostic_perturbation.label_fr - - key: perturbation_autre - hidden: __f__!data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') - - nomenclatures_diagnostic_ouvrage_hydrau_etat_berge.label_fr - - nomenclature_diagnostic_ouvrage_hydrau_raccordement_banquette.label_fr - - nomenclatures_diagnostic_ouvrage_hydrau_dim.label_fr +diagnostic_table_fields: + - date_diagnostic + - passage_faune.code_passage_faune + - organisme.nom_organisme + - role.nom_complet diff --git a/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml b/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml index 3940a975..56acfbe0 100644 --- a/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml +++ b/contrib/m_sipaf/config/definitions/m_sipaf.diag.schema.yml @@ -17,6 +17,8 @@ meta: unique_in_db: true properties: # commons + id_passage_faune: + title: Passage faune date_diagnostic: title: Date description: Date d'établissemnt du diagnostic de fonctionalité @@ -69,14 +71,24 @@ properties: vegetation_debouche: title: Vegetation (débouchés) description: Végétation présente sur aux débouchés de l'ouvrage + commentaire_amenagement: + title: Commentaire (aménagement) + description: champs libre pour information complémentaire indicatives # synthese id_nomenclature_franchissabilite: title: Franchissibilité description: Estimation de la franchissabilité de l'ouvrage pour les animaux - id_nomenclature_interet_faune: - title: Intérêt - description: Intérêt pour les espèces cibles + id_nomenclature_interet_petite_faune: + title: Intérêt petite_faune + description: Intérêt pour la petite_faune + id_nomenclature_interet_grande_faune: + title: Intérêt grande_faune + description: Intérêt pour la grande_faune + commentaire_synthese: + title: Commentaire (synthèse) + description: champs libre pour information complémentaire indicatives + amenagement_faire: title: Aménagements à faire description: Détails aménagements ou autres mesures à réaliser pour rendre l'ouvrage plus fonctionel diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml index 8062b711..75ec6933 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml @@ -3,10 +3,44 @@ code: m_sipaf.diagnostic_details title: page details diagnostic description: page details diagnostic +aliases: +- &diag_commons + - hidden: true + items: + - id_diagnostic + - key: id_passage_faune + default: __f__context.params.id_passage_faune + - display: row + items: + - passage_faune.code_passage_faune + - passage_faune.nom_usuel_passage_faune + - date_diagnostic + - organisme.nom_organisme + - role.nom_complet + - commentaire_diagnostic + +- &diag_perturbation_obstacle +- &diag_amenagement +- &diag_synthese + + layout: height_auto: true items: + - type: breadcrumbs + flex: "0" - code: utils.object_details template_params: object_code: diagnostic - layout: __DIAGNOSTIC_DETAILS_FIELDS__ + layout: + display: tabs + overflow: true + items: + - label: Champs communs + items: *diag_commons + - label: Perturbations / Obstacles + items: *diag_perturbation_obstacle + - label: Aménagement + items: *diag_amenagement + - label: Synthese + items: *diag_synthese diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml index 8d6f5650..5ed78476 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml @@ -20,8 +20,7 @@ aliases: - &diag_perturbation_obstacle items: - - display: fieldset - title: Obstacles + - title: Obstacles direction: row items: - key: nomenclatures_diagnostic_obstacle @@ -33,8 +32,7 @@ aliases: - key: obstacle_autre disabled: __f__!data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') required: __f__data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') - - display: fieldset - title: perturbation + - title: Perturbation direction: row items: - key: nomenclatures_diagnostic_perturbation @@ -46,8 +44,7 @@ aliases: - key: perturbation_autre disabled: __f__!data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') required: __f__data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') - - display: fieldset - title: Ouvrage hydrau + - title: Ouvrage hydrau items: - nomenclatures_diagnostic_ouvrage_hydrau_etat_berge - id_nomenclature_ouvrage_hydrau_racc_banq @@ -57,7 +54,6 @@ aliases: - &diag_amenagement - title: Aménagements - display: fieldset direction: row items: - key: nomenclatures_diagnostic_amenagement_biodiv @@ -71,7 +67,6 @@ aliases: required: __f__data.nomenclatures_diagnostic_amenagement_biodiv?.some(n => n.cd_nomenclature == 'AUT') - key: clotures type: array - display: fieldset items: direction: row items: @@ -83,34 +78,31 @@ aliases: - key: clotures_guidage_etat_autre disabled: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT')" required: "__f__u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT'" - - title: Végétation - display: fieldset + - key: vegetation_tablier + type: array items: - - key: vegetation_tablier - type: array - display: fieldset - items: - direction: row - items: - - id_nomenclature_vegetation_type - - id_nomenclature_vegetation_couvert - - key: vegetation_debouche - type: array - display: fieldset - items: - direction: row - items: - - id_nomenclature_vegetation_type - - id_nomenclature_vegetation_couvert - - key: commentaire_amenagement - type: textarea + direction: row + items: + - id_nomenclature_vegetation_type + - id_nomenclature_vegetation_couvert + - key: vegetation_debouche + type: array + items: + direction: row + items: + - id_nomenclature_vegetation_type + - id_nomenclature_vegetation_couvert + - key: commentaire_amenagement + type: textarea - &diag_synthese - id_nomenclature_franchissabilite - - id_nomenclature_interet_faune + - id_nomenclature_interet_petite_faune + - id_nomenclature_interet_grande_faune - key: amenagement_faire type: textarea - + - key: commentaire_synthese + type: textarea layout: height_auto: true items: diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml index 176cfe67..d4fce92a 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.site_list.layout.yml @@ -25,54 +25,62 @@ layout: items: - type: breadcrumbs flex: "0" - - direction: row + - display: tabs + overflow: true items: - - code: utils.button_create - flex: "0" - - type: button - description: Exporter les passages à faune - color: primary - flex: "0" - icon: download - hidden: __f__!o.is_action_allowed(x, 'E') - action: - type: modal - modal_name: exports - - type: modal - flex: "0" - modal_name: exports + - label: __f__o.tab_label(x) + object_code: site items: - - title: Exports pour les passages à faune - - type: button + - direction: row + items: + - code: utils.button_create + flex: "0" + - type: button + description: Exporter les passages à faune + color: primary + flex: "0" + icon: download + hidden: __f__!o.is_action_allowed(x, 'E') + action: + type: modal + modal_name: exports + - type: modal + flex: "0" + modal_name: exports + items: + - title: Exports pour les passages à faune + - type: button + flex: "0" + title: "Export complet" + description: Télécharger les passages à faune (les filtres sont appliqués) + href: __f__o.url_export(x, 'm_sipaf.pf') + - type: button + flex: "0" + title: "Export import" + description: Export destiné à l'import (les filtres sont appliqués) + href: __f__o.url_export(x, 'm_sipaf.pf_import') + - type: button + flex: "0" + icon: upload + color: primary + description: Importer des passages à faune + action: + type: modal + modal_name: import + hidden: __f__!o.is_action_allowed(x, 'I') + - type: modal + modal_name: import + items: + type: import flex: "0" - title: "Export complet" - description: Télécharger les passages à faune (les filtres sont appliqués) - href: __f__o.url_export(x, 'm_sipaf.pf') - - type: button - flex: "0" - title: "Export import" - description: Export destiné à l'import (les filtres sont appliqués) - href: __f__o.url_export(x, 'm_sipaf.pf_import') - - type: button - flex: "0" - icon: upload - color: primary - description: Importer des passages à faune - action: - type: modal - modal_name: import - hidden: __f__!o.is_action_allowed(x, 'I') - - type: modal - modal_name: import - # style: - # width: 1200px + - type: object + display: table + sort: code_passage_faune + items: __SITE_TABLE_FIELDS__ + - object_code: diagnostic + label: __f__o.tab_label(x) items: - type: import - # hidden_options: - # - enable_update - - flex: "0" - - type: object - display: table - sort: code_passage_faune - items: __SITE_TABLE_FIELDS__ + - type: object + display: table + sort: date_diagnostic- + items: __DIAGNOSTIC_TABLE_FIELDS__ From 14c6cd9a4368bf5828c026d3b80a6eb52ad4b6dd Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 25 Apr 2023 16:59:09 +0200 Subject: [PATCH 093/142] fix eager_load_only --- .../gn_modulator/schema/repositories/utils.py | 5 +- contrib/m_sipaf/backend/m_sipaf/models.py | 2 +- .../m_sipaf.diagnostic_details.layout.yml | 70 +++++++++++++++++-- 3 files changed, 71 insertions(+), 6 deletions(-) diff --git a/backend/gn_modulator/schema/repositories/utils.py b/backend/gn_modulator/schema/repositories/utils.py index abb83faf..a8336c27 100644 --- a/backend/gn_modulator/schema/repositories/utils.py +++ b/backend/gn_modulator/schema/repositories/utils.py @@ -87,7 +87,10 @@ def eager_load_only(self, field_name, query, only_fields, index): if not only_columns_i: rel_schema_code = self.property(key_cache_eager)["schema_code"] rel = self.cls(rel_schema_code) - only_columns_i = [getattr(cache["relation_alias"], rel.pk_field_name())] + only_columns_i = [ + getattr(cache["relation_alias"], pk_field_name) + for pk_field_name in rel.pk_field_names() + ] # chargement de relation en eager et choix des champs query = query.options(orm.contains_eager(*eagers).load_only(*only_columns_i)) diff --git a/contrib/m_sipaf/backend/m_sipaf/models.py b/contrib/m_sipaf/backend/m_sipaf/models.py index 912ea0a1..6ce3d605 100644 --- a/contrib/m_sipaf/backend/m_sipaf/models.py +++ b/contrib/m_sipaf/backend/m_sipaf/models.py @@ -363,7 +363,7 @@ class Diagnostic(db.Model): id_nomenclature_ouvrage_hydrau_racc_banq = db.Column( db.Integer, db.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature") ) - nomenclature_diagnostic_ouvrage_hydrau_raccordement_banquette = db.relationship( + nomenclature_ouvrage_hydrau_racc_banq = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_ouvrage_hydrau_racc_banq], ) diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml index 75ec6933..7c717062 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml @@ -4,7 +4,7 @@ title: page details diagnostic description: page details diagnostic aliases: -- &diag_commons + - &diag_commons - hidden: true items: - id_diagnostic @@ -19,10 +19,71 @@ aliases: - role.nom_complet - commentaire_diagnostic -- &diag_perturbation_obstacle -- &diag_amenagement -- &diag_synthese + - &diag_perturbation_obstacle + - title: Obstacles + items: + - nomenclatures_diagnostic_obstacle.label_fr + - key: nomenclatures_diagnostic_obstacle.cd_nomenclature + hidden: true + - key: obstacle_autre + hidden: __f__!data.nomenclatures_diagnostic_obstacle?.some(n => n.cd_nomenclature == 'AUT') + - title: Perturbation + items: + - nomenclatures_diagnostic_perturbation.label_fr + - key: nomenclatures_diagnostic_perturbation.cd_nomenclature + hidden: true + - key: perturbation_autre + hidden: __f__!data.nomenclatures_diagnostic_perturbation?.some(n => n.cd_nomenclature == 'AUT') + + - title: Ouvrage hydrau + items: + - nomenclatures_diagnostic_ouvrage_hydrau_etat_berge.label_fr + - nomenclature_ouvrage_hydrau_racc_banq.label_fr + - nomenclatures_diagnostic_ouvrage_hydrau_dim.label_fr + - commentaire_perturbation_obstacle + + - &diag_amenagement + - title: Aménagements + direction: row + items: + - nomenclatures_diagnostic_amenagement_biodiv.label_fr + - key: nomenclatures_diagnostic_amenagement_biodiv.cd_nomenclature + hidden: true + - key: amenagement_biodiv_autre + hidden: __f__!data.nomenclatures_diagnostic_amenagement_biodiv?.some(n => n.cd_nomenclature == 'AUT') + - key: clotures + type: array + items: + direction: row + items: + - nomenclature_clotures_guidage_type.label_fr + - key: clotures_guidage_type_autre + hidden: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_type) == 'AUT')" + - nomenclature_clotures_guidage_etat.label_fr + - key: clotures_guidage_etat_autre + hidden: "__f__!(u.get_cd_nomenclature(data?.id_nomenclature_clotures_guidage_etat) == 'AUT')" + - key: vegetation_tablier + type: array + items: + direction: row + items: + - nomenclature_vegetation_type.label_fr + - nomenclature_vegetation_couvert.label_fr + - key: vegetation_debouche + type: array + items: + direction: row + items: + - nomenclature_vegetation_type.label_fr + - nomenclature_vegetation_couvert.label_fr + - commentaire_amenagement + - &diag_synthese + - nomenclature_franchissabilite.label_fr + - nomenclature_interet_petite_faune.label_fr + - nomenclature_interet_grande_faune.label_fr + - amenagement_faire + - commentaire_synthese layout: height_auto: true @@ -35,6 +96,7 @@ layout: layout: display: tabs overflow: true + selected_tab: Aménagement items: - label: Champs communs items: *diag_commons From 5c56e114ea65b4d046ce181de78b494049b2f7c2 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 15 May 2023 09:35:56 +0200 Subject: [PATCH 094/142] display config error --- backend/gn_modulator/blueprint.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index 08ceba5c..062b979b 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -53,7 +53,10 @@ def api_modules_config(config_path): # s'il y a des erreurs à l'initialisation du module => on le fait remonter if len(errors_init_module) > 0: - return f"Il y a {len(errors_init_module)} erreur(s) dans les définitions.", 500 + txt = f"Il y a {len(errors_init_module)} erreur(s) dans les définitions.
" + for error in errors_init_module: + txt += f"- {error.error_code} : {error.error_msg}
{error.file_path}" + return txt, 500 return process_dict_path( ModuleMethods.modules_config(), From d03b54a18891771da413348742a5003fedc7fc36 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 15 May 2023 09:37:36 +0200 Subject: [PATCH 095/142] display config error --- backend/gn_modulator/blueprint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index 062b979b..24b5f243 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -55,7 +55,7 @@ def api_modules_config(config_path): if len(errors_init_module) > 0: txt = f"Il y a {len(errors_init_module)} erreur(s) dans les définitions.
" for error in errors_init_module: - txt += f"- {error.error_code} : {error.error_msg}
{error.file_path}" + txt += f"- {error['error_code']} : {error['error_msg']}
{error['file_path']}

" return txt, 500 return process_dict_path( From 27bdd096ee2317e2fa757a231a860be03a062cc8 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 15 May 2023 10:17:43 +0200 Subject: [PATCH 096/142] fix search authorized keys --- backend/gn_modulator/blueprint.py | 4 +++- backend/gn_modulator/module/config/utils.py | 2 +- config/definitions/utils/ref_geo/ref_geo.area.schema.yml | 3 +++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/backend/gn_modulator/blueprint.py b/backend/gn_modulator/blueprint.py index 24b5f243..0e99d70e 100644 --- a/backend/gn_modulator/blueprint.py +++ b/backend/gn_modulator/blueprint.py @@ -55,7 +55,9 @@ def api_modules_config(config_path): if len(errors_init_module) > 0: txt = f"Il y a {len(errors_init_module)} erreur(s) dans les définitions.
" for error in errors_init_module: - txt += f"- {error['error_code']} : {error['error_msg']}
{error['file_path']}

" + txt += ( + f"- {error['error_code']} : {error['error_msg']}
{error['file_path']}

" + ) return txt, 500 return process_dict_path( diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index be3ddf37..4f9f3484 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -413,7 +413,7 @@ def get_layout_keys(cls, layout, params, context): cls.get_layout_keys( key_add, params, - {**context, "data_keys": [*context.get("data_keys", []), layout["key"]]}, + {**context, "data_keys": []}, ) if ( layout.get("return_object") diff --git a/config/definitions/utils/ref_geo/ref_geo.area.schema.yml b/config/definitions/utils/ref_geo/ref_geo.area.schema.yml index 6e514062..010cbaa0 100644 --- a/config/definitions/utils/ref_geo/ref_geo.area.schema.yml +++ b/config/definitions/utils/ref_geo/ref_geo.area.schema.yml @@ -14,6 +14,9 @@ meta: unique: - id_type - area_code + authorized_fields: + - name_code + - code_name properties: name_code: type: string From 4d9f592274bbc66822373e6aece3745193b3e83c Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 15 May 2023 10:30:44 +0200 Subject: [PATCH 097/142] fix communes filters utils. -> u. --- contrib/m_sipaf/config/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/m_sipaf/config/config.yml b/contrib/m_sipaf/config/config.yml index 11bfdea9..eaf0b01b 100644 --- a/contrib/m_sipaf/config/config.yml +++ b/contrib/m_sipaf/config/config.yml @@ -58,7 +58,7 @@ site_filters_fields: __f__data?.departement ? `area_code like ${data.departement.area_code}%` : data?.region - ? utils.departementsForRegion(data.region.area_code) + ? u.departementsForRegion(data.region.area_code) .map(departementCode => `area_code like ${departementCode}%`) .join(',|,') : null From b12e28e29c8e6c0c07e37a29197214bdfc1b3749 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 30 May 2023 17:15:39 +0200 Subject: [PATCH 098/142] frontend tabs choice for lazy loading or not --- .../m_sipaf.diagnostic_edit.layout.yml | 1 + .../layout/base/layout-section.component.html | 76 ++++++++++++++----- .../layout/base/layout-section.component.ts | 2 +- 3 files changed, 59 insertions(+), 20 deletions(-) diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml index 5ed78476..98077009 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_edit.layout.yml @@ -114,6 +114,7 @@ layout: flex: "0" - overflow: true display: tabs + lazy_loading: true items: - label: Champs communs items: *diag_commons diff --git a/frontend/app/components/layout/base/layout-section.component.html b/frontend/app/components/layout/base/layout-section.component.html index dc074cb5..f46dd6b9 100644 --- a/frontend/app/components/layout/base/layout-section.component.html +++ b/frontend/app/components/layout/base/layout-section.component.html @@ -32,21 +32,59 @@ - - - - + + + lazy + + + + +
+
+ + +
+
+
+
+
+
+
+ + + + +
-
-
-
-
+ +
+ + diff --git a/frontend/app/components/layout/base/layout-section.component.ts b/frontend/app/components/layout/base/layout-section.component.ts index 27035162..f40c31c3 100644 --- a/frontend/app/components/layout/base/layout-section.component.ts +++ b/frontend/app/components/layout/base/layout-section.component.ts @@ -32,7 +32,7 @@ export class ModulesLayoutSectionComponent extends ModulesLayoutComponent implem return item; } const computedItem = {}; - for (const key of ['label', 'hidden', 'disabled']) { + for (const key of ['label', 'hidden', 'disabled', 'lazy_loading']) { computedItem[key] = this._mLayout.evalLayoutElement({ element: item[key], layout: item, From 7975faed062b137564d9e4eb5545c431b7254e90 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 31 May 2023 09:59:22 +0200 Subject: [PATCH 099/142] raise exeption if module doesnt exists --- backend/gn_modulator/commands.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/backend/gn_modulator/commands.py b/backend/gn_modulator/commands.py index 14d11c53..83952054 100644 --- a/backend/gn_modulator/commands.py +++ b/backend/gn_modulator/commands.py @@ -39,13 +39,16 @@ def cmd_install_module(module_code=None, module_path=None, force=False): print(f"Le module demandé {module_code} n'existe pas.") print("Veuillez choisir un code parmi la liste suivante\n") - for module_code in ModuleMethods.unregistred_modules(): - print(f"- {module_code}") + for unregistred_module_code in ModuleMethods.unregistred_modules(): + print(f"- {unregistred_module_code}") print() print("Modules installés\n") - for module_code in ModuleMethods.registred_modules(): - print(f"- {module_code}") + for registred_module_code in ModuleMethods.registred_modules(): + print(f"- {registred_module_code}") + + if module_code: + raise Exception("Le module demandé {module_code} n'existe pas.") # return ModuleMethods.install_module(module_code, module_path, force) From c193b93585ffe2475bef269fb41b93107a633e26 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 31 May 2023 10:15:02 +0200 Subject: [PATCH 100/142] remove monitoring from test --- .github/workflows/pytest.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b77ff4fe..4f96eb31 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -129,14 +129,14 @@ jobs: - name: list modules run: geonature modulator install - - name: install m_monitoring - run: geonature modulator install -p ./contrib/m_monitoring + # - name: install m_monitoring + # run: geonature modulator install -p ./contrib/m_monitoring - - name: install m_monitoring test 1 - run: geonature modulator install m_monitoring_test_1 + # - name: install m_monitoring test 1 + # run: geonature modulator install m_monitoring_test_1 - - name: install m_monitoring test 2 - run: geonature modulator install m_monitoring_test_2 + # - name: install m_monitoring test 2 + # run: geonature modulator install m_monitoring_test_2 - name: install m_sipaf run: geonature modulator install -p ./contrib/m_sipaf From 087eafd356d42e04a0663172eab4afaaad26d60c Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 31 May 2023 10:22:46 +0200 Subject: [PATCH 101/142] 'ownership' -> 'scope' --- backend/gn_modulator/module/config/utils.py | 4 ++-- backend/gn_modulator/routes/utils/decorators.py | 2 +- backend/gn_modulator/schema/models/base.py | 2 +- backend/gn_modulator/schema/repositories/base.py | 9 ++++----- backend/gn_modulator/schema/repositories/cruved.py | 12 ++++++------ backend/gn_modulator/schema/serializers.py | 8 ++++---- ...oring.site_template.module-tempate_defaults.yml | 4 ++-- doc/changelog.md | 1 + .../object/layout-object-geojson.component.ts | 10 +++++----- .../layout/object/layout-object.component.ts | 2 +- frontend/app/services/object.service.ts | 14 +++++++------- frontend/app/services/table.service.ts | 10 +++++----- 12 files changed, 39 insertions(+), 39 deletions(-) diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index 4f9f3484..8f18b5b3 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -346,13 +346,13 @@ def add_basic_fields(cls, module_code, object_code): # - label_field_name # - title_field_name # - champs d'unicité - # - ownership + # - scope for elem in [ sm.pk_field_name(), sm.label_field_name(), sm.title_field_name(), *sm.unique(), - "ownership", + "scope", ]: if elem is not None and elem not in authorized_read_fields: authorized_read_fields.append(elem) diff --git a/backend/gn_modulator/routes/utils/decorators.py b/backend/gn_modulator/routes/utils/decorators.py index ebcbcbd0..9b68afdc 100644 --- a/backend/gn_modulator/routes/utils/decorators.py +++ b/backend/gn_modulator/routes/utils/decorators.py @@ -32,7 +32,7 @@ def check_fields(module_code, object_code): # liste des champs invalides # - cad ne correspondent pas à un champs du schema unvalid_fields = sorted( - list(filter(lambda f: not sm.has_property(f) and f != "ownership", fields)) + list(filter(lambda f: not sm.has_property(f) and f != "scope", fields)) ) # liste des champs non autorisés diff --git a/backend/gn_modulator/schema/models/base.py b/backend/gn_modulator/schema/models/base.py index befc62a2..9ea43aba 100644 --- a/backend/gn_modulator/schema/models/base.py +++ b/backend/gn_modulator/schema/models/base.py @@ -235,7 +235,7 @@ def Model(self): Model = type(self.model_name(), (ModelBaseClass,), dict_model) # patch cruved - Model.ownership = 0 + Model.scope = 0 # store in cache before relations (avoid circular dependencies) set_global_cache(["schema", self.schema_code(), "model"], Model) diff --git a/backend/gn_modulator/schema/repositories/base.py b/backend/gn_modulator/schema/repositories/base.py index 78c8c41b..46586a98 100644 --- a/backend/gn_modulator/schema/repositories/base.py +++ b/backend/gn_modulator/schema/repositories/base.py @@ -188,7 +188,6 @@ def update_row( ).one() if not self.is_new_data(m, data): - print("not new") return m, False db.session.flush() @@ -224,15 +223,15 @@ def delete_row(self, value, field_name=None, module_code=MODULE_CODE, params={}, def process_query_columns(self, params, query, order_by): """ permet d'ajouter de colonnes selon les besoin - - ownership pour cruved (toujours?) + - scope pour cruved (toujours?) - row_number (si dans fields) """ fields = params.get("fields") or [] # cruved - if "ownership" in fields: - query = self.add_column_ownership(query) + if "scope" in fields: + query = self.add_column_scope(query) # row_number if "row_number" in fields: @@ -275,7 +274,7 @@ def query_list(self, module_code=MODULE_CODE, cruved_type="R", params={}, query_ # ).load_only(self.cls('ref_nom.nomenclature').Model().label_fr, self.cls('ref_nom.nomenclature').Model().cd_nomenclature) # ) - # ajout colonnes row_number, ownership (cruved) + # ajout colonnes row_number, scope (cruved) query = self.process_query_columns(params, query, order_bys) # prefiltrage diff --git a/backend/gn_modulator/schema/repositories/cruved.py b/backend/gn_modulator/schema/repositories/cruved.py index 3d34e873..1461e0ea 100644 --- a/backend/gn_modulator/schema/repositories/cruved.py +++ b/backend/gn_modulator/schema/repositories/cruved.py @@ -13,10 +13,10 @@ class SchemaRepositoriesCruved: methodes pour l'accès aux données TODO voire comment parametre les schema - pour avoir différentes façon de calculer cruved ownership + pour avoir différentes façon de calculer cruved scope """ - def expression_ownership(self): + def expression_scope(self): Model = self.Model() if self.attr("meta.check_cruved") is None: @@ -36,9 +36,9 @@ def expression_ownership(self): else_=3, ) - def add_column_ownership(self, query): + def add_column_scope(self, query): """ - ajout d'une colonne 'ownership' à la requête + ajout d'une colonne 'scope' à la requête afin de - filter dans la requete de liste - verifier les droit sur un donnée pour les action unitaire (post update delete) @@ -46,7 +46,7 @@ def add_column_ownership(self, query): - affichage de boutton, vérification d'accès aux pages etc .... """ - query = query.add_columns(self.expression_ownership().label("ownership")) + query = query.add_columns(self.expression_scope().label("scope")) return query @@ -63,6 +63,6 @@ def process_cruved_filter(self, cruved_type, module_code, query): cruved_for_type = user_cruved.get(cruved_type) if cruved_for_type < 3: - query = query.filter(self.expression_ownership() <= cruved_for_type) + query = query.filter(self.expression_scope() <= cruved_for_type) return query diff --git a/backend/gn_modulator/schema/serializers.py b/backend/gn_modulator/schema/serializers.py index 009b612c..d32ca8a5 100644 --- a/backend/gn_modulator/schema/serializers.py +++ b/backend/gn_modulator/schema/serializers.py @@ -253,10 +253,10 @@ def pre_load_make_object(self_marshmallow, data, **kwargs): ) # if self.attr('meta.check_cruved'): - marshmallow_schema_dict["ownership"] = fields.Integer(metadata={"dumps_only": True}) + marshmallow_schema_dict["scope"] = fields.Integer(metadata={"dumps_only": True}) marshmallow_schema_dict["row_number"] = fields.Integer(metadata={"dumps_only": True}) # else: - # marshmallow_schema_dict['ownership'] = 0 + # marshmallow_schema_dict['scope'] = 0 # store in cache before relation (avoid circular dependencies) @@ -328,7 +328,7 @@ def serialize(self, m, fields=None, as_geojson=False, geometry_field_name=None): data = self.MarshmallowSchema()(**kwargs).dump(m[0] if isinstance(m, tuple) else m) - # pour gérer les champs supplémentaire (ownership, row_number, etc....) + # pour gérer les champs supplémentaire (scope, row_number, etc....) if isinstance(m, tuple): keys = list(m.keys()) if len(keys) > 1: @@ -403,7 +403,7 @@ def serialize_list( map(lambda x: x[0] if isinstance(x, tuple) else x, m_list), many=True ) - # pour gérer les champs supplémentaire (ownership, row_number, etc....) + # pour gérer les champs supplémentaire (scope, row_number, etc....) if len(data_list) and isinstance(m_list[0], tuple): keys = list(m_list[0].keys()) if len(keys) > 1: diff --git a/contrib/m_monitoring/config/m_monitoring.site_template.module-tempate_defaults.yml b/contrib/m_monitoring/config/m_monitoring.site_template.module-tempate_defaults.yml index a146b8f6..2fd22006 100644 --- a/contrib/m_monitoring/config/m_monitoring.site_template.module-tempate_defaults.yml +++ b/contrib/m_monitoring/config/m_monitoring.site_template.module-tempate_defaults.yml @@ -88,7 +88,7 @@ site_form_fields: - hidden: true items: - id_site - - ownership + - scope - code - name - description @@ -128,7 +128,7 @@ site_details_fields: - hidden: true items: - id_site - - ownership + - scope - code - name - description diff --git a/doc/changelog.md b/doc/changelog.md index 4907cfaf..fd828ae2 100644 --- a/doc/changelog.md +++ b/doc/changelog.md @@ -22,6 +22,7 @@ - utilisation de `raise_load` - on charge le minimum de champs possibles - déplacement des config dans le dossier `media/modulator/config` +- changement de nom `ownership` -> `scope` - [ ] separation des tests par modules (m_sipaf, m_monitoring) - [ ] amélioration du composant list_form - [ ] ajout diagnostic sipaf diff --git a/frontend/app/components/layout/object/layout-object-geojson.component.ts b/frontend/app/components/layout/object/layout-object-geojson.component.ts index 8854dcd8..10a38281 100644 --- a/frontend/app/components/layout/object/layout-object-geojson.component.ts +++ b/frontend/app/components/layout/object/layout-object-geojson.component.ts @@ -146,7 +146,7 @@ export class ModulesLayoutObjectGeoJSONComponent var propertiesHTML = ''; propertiesHTML += '
    \n'; propertiesHTML += fields - .filter((fieldKey) => fieldKey != 'ownership') + .filter((fieldKey) => fieldKey != 'scope') .map((fieldKey) => { // gerer les '.' const fieldKeyLabel = fieldKey.split('.')[0]; @@ -157,16 +157,16 @@ export class ModulesLayoutObjectGeoJSONComponent .join('\n'); propertiesHTML += '
\n'; - const htmlDetails = this._mObject.checkAction(this.context, 'R', properties.ownership) + const htmlDetails = this._mObject.checkAction(this.context, 'R', properties.scope) .actionAllowed ? '' : ''; - const htmlEdit = this._mObject.checkAction(this.context, 'U', properties.ownership) + const htmlEdit = this._mObject.checkAction(this.context, 'U', properties.scope) .actionAllowed ? '' : ''; - const htmlDelete = this._mObject.checkAction(this.context, 'D', properties.ownership) + const htmlDelete = this._mObject.checkAction(this.context, 'D', properties.scope) .actionAllowed ? '' : ''; @@ -190,7 +190,7 @@ export class ModulesLayoutObjectGeoJSONComponent onPopupOpen(layer) { const value = layer.feature.properties[this.pkFieldName()]; const fields = this.popupFields(); // ?? computedItems - fields.push('ownership'); + fields.push('scope'); this._mData .getOne(this.moduleCode(), this.objectCode(), value, { fields }) .subscribe((data) => { diff --git a/frontend/app/components/layout/object/layout-object.component.ts b/frontend/app/components/layout/object/layout-object.component.ts index 251be0ce..e019f205 100644 --- a/frontend/app/components/layout/object/layout-object.component.ts +++ b/frontend/app/components/layout/object/layout-object.component.ts @@ -176,7 +176,7 @@ export class ModulesLayoutObjectComponent extends ModulesLayoutComponent impleme /** champs par defaut si non définis dans items */ defaultFields({ geometry = false } = {}) { - const defaultFields = [this.pkFieldName(), this.labelFieldName(), 'ownership']; + const defaultFields = [this.pkFieldName(), this.labelFieldName(), 'scope']; if (this.computedLayout.display == 'geojson' && geometry) { defaultFields.push(this.geometryFieldName()); } diff --git a/frontend/app/services/object.service.ts b/frontend/app/services/object.service.ts index 460fe488..b3331d5d 100644 --- a/frontend/app/services/object.service.ts +++ b/frontend/app/services/object.service.ts @@ -240,7 +240,7 @@ export class ModulesObjectService { if (!(context.module_code, context.object_code)) { return false; } - const checkAction = this.checkAction(context, action, data?.ownership); + const checkAction = this.checkAction(context, action, data?.scope); return checkAction.actionAllowed; } @@ -284,7 +284,7 @@ export class ModulesObjectService { * - tableaux * - boutton (detail / edit / etc...) */ - checkAction(context, action, ownership = null) { + checkAction(context, action, scope = null) { // 1) cruved defini pour cet objet ? const objectConfig = this.objectConfigContext(context); @@ -327,7 +327,7 @@ export class ModulesObjectService { // si les droit du module sont de 2 pour l'édition // et que l'appartenance de la données est 3 (données autres (ni l'utilisateur ni son organisme)) // alors le test echoue - // - si ownership est à null => on teste seulement si l'action est bien définie sur cet object + // - si scope est à null => on teste seulement si l'action est bien définie sur cet object // (ce qui a été testé précédemment) donc à true // par exemple pour les actions d'export @@ -341,12 +341,12 @@ export class ModulesObjectService { testUserCruved = true; // pour EDIT ET READ // si on a pas d'info d'appartenance - // ownership null => False (par sécurité) - } else if (ownership == null) { + // scope null => False (par sécurité) + } else if (scope == null) { testUserCruved = false; - // on compare ownership, l'appartenance qui doit être supérieur aet les droits du module + // on compare scope, l'appartenance qui doit être supérieur aet les droits du module } else { - testUserCruved = moduleCruvedAction >= ownership; + testUserCruved = moduleCruvedAction >= scope; } if (!testUserCruved) { diff --git a/frontend/app/services/table.service.ts b/frontend/app/services/table.service.ts index 117d9ec0..fb3fe4e4 100644 --- a/frontend/app/services/table.service.ts +++ b/frontend/app/services/table.service.ts @@ -53,7 +53,7 @@ export class ModulesTableService { * - Renvoie la définition de la colonne pour les actions: * voir, éditer, supprimer * - On utilise mPage.chekcLink pour voir si et comment on affiche l'action en question - * - L'appartenance (ownership) sera fournie par les données du rang de la cellule dans les fonction formatter et tooltip) + * - L'appartenance (scope) sera fournie par les données du rang de la cellule dans les fonction formatter et tooltip) * */ columnAction(context, action) { // test si l'action est possible (ou avant) @@ -78,8 +78,8 @@ export class ModulesTableService { return { headerSort: false, formatter: (cell, formatterParams, onRendered) => { - const ownership = cell._cell.row.data['ownership']; - const { actionAllowed, actionMsg } = this._mObject.checkAction(context, action, ownership); + const scope = cell._cell.row.data['scope']; + const { actionAllowed, actionMsg } = this._mObject.checkAction(context, action, scope); return ``; @@ -87,8 +87,8 @@ export class ModulesTableService { width: 22, hozAlign: 'center', tooltip: (cell) => { - const ownership = cell._cell.row.data['ownership']; - const { actionAllowed, actionMsg } = this._mObject.checkAction(context, action, ownership); + const scope = cell._cell.row.data['scope']; + const { actionAllowed, actionMsg } = this._mObject.checkAction(context, action, scope); return actionMsg; }, }; From 2807a3393d79d79b2746208010f49e16a3a44d85 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 31 May 2023 10:33:46 +0200 Subject: [PATCH 102/142] lint --- .../layout/object/layout-object-geojson.component.ts | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/frontend/app/components/layout/object/layout-object-geojson.component.ts b/frontend/app/components/layout/object/layout-object-geojson.component.ts index 10a38281..1d4363e6 100644 --- a/frontend/app/components/layout/object/layout-object-geojson.component.ts +++ b/frontend/app/components/layout/object/layout-object-geojson.component.ts @@ -157,17 +157,14 @@ export class ModulesLayoutObjectGeoJSONComponent .join('\n'); propertiesHTML += '\n'; - const htmlDetails = this._mObject.checkAction(this.context, 'R', properties.scope) - .actionAllowed + const htmlDetails = this._mObject.checkAction(this.context, 'R', properties.scope).actionAllowed ? '' : ''; - const htmlEdit = this._mObject.checkAction(this.context, 'U', properties.scope) - .actionAllowed + const htmlEdit = this._mObject.checkAction(this.context, 'U', properties.scope).actionAllowed ? '' : ''; - const htmlDelete = this._mObject.checkAction(this.context, 'D', properties.scope) - .actionAllowed + const htmlDelete = this._mObject.checkAction(this.context, 'D', properties.scope).actionAllowed ? '' : ''; From 403c5a65ae45b98fa305e97d237ff33b96a6a18c Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 31 May 2023 10:53:58 +0200 Subject: [PATCH 103/142] add error_values in import check errors --- backend/gn_modulator/imports/mixins/check.py | 14 ++++++++------ backend/gn_modulator/imports/mixins/utils.py | 13 +++++++++++-- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/backend/gn_modulator/imports/mixins/check.py b/backend/gn_modulator/imports/mixins/check.py index ee195ac5..76c9bd2e 100644 --- a/backend/gn_modulator/imports/mixins/check.py +++ b/backend/gn_modulator/imports/mixins/check.py @@ -112,7 +112,7 @@ def check_types(self): # Ajout d'une erreur qui référence les lignes concernées nb_lines = res[0] lines = res[1] - values = res[2] + error_values = res[2] str_lines = lines and ", ".join(map(lambda x: str(x), lines)) or "" if nb_lines == 0: continue @@ -120,7 +120,7 @@ def check_types(self): error_code="ERR_IMPORT_INVALID_VALUE_FOR_TYPE", key=key, lines=lines, - values=values, + error_values=error_values, error_msg=f"Il y a des valeurs invalides pour la colonne {key} de type {sql_type}. {nb_lines} ligne(s) concernée(s) : [{str_lines}]", ) @@ -192,7 +192,7 @@ def check_resolve_keys(self): # - non nulles dans 'raw' # - et nulles dans 'process txt_check_resolve_keys = f""" -SELECT COUNT(*), ARRAY_AGG(r.id_import) +SELECT COUNT(*), ARRAY_AGG(r.id_import), ARRAY_AGG(r.{key}) FROM {raw_table} r JOIN {process_table} p ON r.id_import = p.id_import @@ -203,6 +203,7 @@ def check_resolve_keys(self): res = SchemaMethods.c_sql_exec_txt(txt_check_resolve_keys).fetchone() nb_lines = res[0] lines = res[1] + error_values = res[2] # s'il n'y a pas de résultat, on passe à la colonne suivante if nb_lines == 0: @@ -210,13 +211,13 @@ def check_resolve_keys(self): # sinon on ajoute une erreur référençant les lignes concernée - values = None + valid_values = None # Dans le cas des nomenclatures on peut faire remonter les valeurs possible ?? code_type = sm.property(key).get("nomenclature_type") if code_type: - values = list( + valid_values = list( map( lambda x: { "cd_nomenclature": x["cd_nomenclature"], @@ -230,5 +231,6 @@ def check_resolve_keys(self): key=key, lines=lines, error_msg="Clé étrangère non résolue", - values=values, + valid_values=valid_values, + error_values=error_values, ) diff --git a/backend/gn_modulator/imports/mixins/utils.py b/backend/gn_modulator/imports/mixins/utils.py index b65779a0..8883f485 100644 --- a/backend/gn_modulator/imports/mixins/utils.py +++ b/backend/gn_modulator/imports/mixins/utils.py @@ -121,7 +121,15 @@ def table_name(self, type, key=None): rel = f"_{key}" if key is not None else "" return f"{schema_import}.v_{self.id_import}_{type}_{self.schema_code.replace('.', '_')}{rel}" - def add_error(self, error_code=None, error_msg=None, key=None, lines=None, values=None): + def add_error( + self, + error_code=None, + error_msg=None, + key=None, + lines=None, + valid_values=None, + error_values=None, + ): """ ajout d'une erreur lorsque qu'elle est rencontrée """ @@ -131,7 +139,8 @@ def add_error(self, error_code=None, error_msg=None, key=None, lines=None, value "msg": error_msg, "key": key, "lines": lines, - "values": values, + "valid_values": valid_values, + "error_values": error_values, } ) self.status = "ERROR" From 1bc3b8d3163e7f60d497958a9dd467d859b6085a Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 31 May 2023 11:25:00 +0200 Subject: [PATCH 104/142] fix missing sipaf nomenclature --- .github/workflows/pytest2.yml | 164 ------------------ .../config/features/m_sipaf.utils.data.yml | 8 + 2 files changed, 8 insertions(+), 164 deletions(-) delete mode 100644 .github/workflows/pytest2.yml diff --git a/.github/workflows/pytest2.yml b/.github/workflows/pytest2.yml deleted file mode 100644 index f7cd9cb9..00000000 --- a/.github/workflows/pytest2.yml +++ /dev/null @@ -1,164 +0,0 @@ -name: pytest - -on: - push: - branches: - - develop - - main - pull_request: - branches: - - develop - - main - -env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - -jobs: - build: - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - include: - - name: "Debian 10" - python-version: "3.7" - postgres-version: 11 - postgis-version: 2.5 - - name: "Debian 11" - python-version: "3.9" - postgres-version: 13 - postgis-version: 3.2 - - name: ${{ matrix.name }} - - services: - postgres: - image: postgis/postgis:${{ matrix.postgres-version }}-${{ matrix.postgis-version }} - env: - POSTGRES_DB: geonature2db - POSTGRES_PASSWORD: geonatpasswd - POSTGRES_USER: geonatadmin - ports: - - 5432:5432 - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - steps: - - name: Clone gn_modulator repository - uses: actions/checkout@v3 - with: - submodules: recursive - - name: Add postgis_raster database extension - if: ${{ matrix.postgis-version >= 3 }} - run: | - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "postgis_raster";' - env: - PGPASSWORD: geonatpasswd - - name: Add database extensions - run: | - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "hstore";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "uuid-ossp";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "pg_trgm";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "unaccent";' - env: - PGPASSWORD: geonatpasswd - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install GDAL - run: | - sudo apt update - sudo apt install -y libgdal-dev - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install \ - -e ..[tests] \ - -r requirements-dev.txt - working-directory: ./dependencies/GeoNature/backend - - - name: Install core modules - run: | - pip install -e ./dependencies/GeoNature/contrib/occtax - pip install -e ./dependencies/GeoNature/contrib/gn_module_occhab - pip install -e ./dependencies/GeoNature/contrib/gn_module_validation - - - name: Install GN Modulator - run: | - pip install -e . - - - name: Show database branches and dependencies - run: | - geonature db status --dependencies - - - name: Install database - run: | - geonature db upgrade geonature@head -x local-srid=2154 - geonature db autoupgrade -x local-srid=2154 - geonature taxref import-v15 --skip-bdc-statuts - geonature db upgrade geonature-samples@head - geonature db upgrade nomenclatures_taxonomie_data@head - geonature db upgrade ref_geo_fr_departments@head - geonature db upgrade ref_geo_fr_municipalities@head - geonature db upgrade ref_geo_inpn_grids_5@head - geonature db upgrade ref_sensitivity_inpn@head - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - name: Show database status - run: | - geonature db status - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - name: Install modules db - run: | - geonature upgrade-modules-db - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - name: check gn_modulator - run: geonature modulator check - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - name: list modules - run: geonature modulator install - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - name: install m_monitoring - run: geonature modulator install contrib/m_monitring - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - - name: install m_monitoring test 1 - run: geonature modulator install m_monitoring_test_1 - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - name: install m_monitoring test 2 - run: geonature modulator install m_monitoring_test_2 - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - name: install m_sipaf - run: geonature modulator install contrib/m_sipaf - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - - name: Pytest gn_modulator - run: pytest -v --cov --cov-report xml - env: - GEONATURE_CONFIG_FILE: ./dependencies/GeoNature/config/test_config.toml - - - name: Upload coverage to Codecov - if: ${{ matrix.name == 'Debian 11' }} - uses: codecov/codecov-action@v2 - with: - flags: pytest diff --git a/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml b/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml index 3c45f98b..3bc229b4 100644 --- a/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml +++ b/contrib/m_sipaf/config/features/m_sipaf.utils.data.yml @@ -59,15 +59,20 @@ items: - [PF_OUVRAGE_MATERIAUX, IND, Ind., Indéterminé, Indéterminé] - [PF_OUVRAGE_HYDRAULIQUE_POSITION, RG, R. g., Rive Gauche, Rive Gauche] + - [PF_OUVRAGE_HYDRAULIQUE_POSITION, RD, R. d., Rive Droite, Rive Droite] - [PF_OUVRAGE_HYDRAULIQUE_POSITION, RGD, R. g. & d.", Rive gauche et rive droite, Rive gauche et rive droite (la rive se détermine dans le sens amont/aval)] - [PF_OUVRAGE_HYDRAULIQUE_BANQ_CARACT, DOU, Dbl., Double, Banquette double] + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_CARACT, SIM, Simp., Simple, Banquette simple] + - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, NAT, Nat., Banquette naturelle, Banquette naturelle] - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, BET, Bet., Banquette béton, Banquette béton] - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, ECB, Ecb., Encorbellement, Encorbellement] - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, POF, Po. F., Ponton flottant, Ponton flottant] - [PF_OUVRAGE_HYDRAULIQUE_BANQ_TYPE, AUT, Aut., Autre, Autre] + + - [PF_INFRASTRUCTURE_TYPE, AU, Auto., Autoroute, Autoroute] - [PF_INFRASTRUCTURE_TYPE, RN, R. N., Route Nationale, Route Nationale] - [PF_INFRASTRUCTURE_TYPE, RD, R. D., Route Départementale, Route Départementale] - [PF_INFRASTRUCTURE_TYPE, VF, V. F., Voie ferrée, Voie ferrée] @@ -75,7 +80,9 @@ items: - [PF_OUVRAGE_SPECIFICITE, MIX, Mixt., Mixte, Ouvrage mixte construit pour le passage des animaux concomitamment à un ou plusieurs autres usages] - [PF_OUVRAGE_SPECIFICITE, ND, Non déd., Non dédié, Ouvrage non dédié au passage de la faune mais pouvant servir à cet usage] + - [PF_OUVRAGE_SPECIFICITE, SPE, Spé., Spécifique, Ouvrage construit que pour le passage des animaux] + - [PF_OUVRAGE_TYPE, BUS, Bus., Buse, Buse] - [PF_OUVRAGE_TYPE, CAD, Cad., Cadre, Cadre] - [PF_OUVRAGE_TYPE, VOU+R, Voût. Rad., Voûte avec radier, Voûte maçonnée avec radier] - [PF_OUVRAGE_TYPE, AUT, Aut., Autre (préciser), Autre (préciser)] @@ -92,6 +99,7 @@ items: - [PF_OUVRAGE_TYPE, TRA, Tra., Tranchée, Tranchée] - [PF_OUVRAGE_TYPE, TUN, Tun., Tunnel, Tunnel] + - [PF_TYPE_ACTOR, PRO, Prop., Propriétaire, Propriétaire] - [PF_TYPE_ACTOR, CON, Conc., Concessionaire, Concessionnaire] - [PF_TYPE_ACTOR, INT, Int., Intervenant, Intervenant sur ce passage faune] - [PF_TYPE_ACTOR, GES, Ges., Gestionnaire, Gestionnaire du passage faune] From 69778b4bbaa37a60f6375aa088cd87483ecf2efd Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 1 Jun 2023 11:17:12 +0200 Subject: [PATCH 105/142] up permissions cruved_scope_for_user_in_module --- backend/gn_modulator/module/config/base.py | 1 - .../{perm.action.schema.yml => perm.action.schema.yml_} | 0 .../{perm.filter.schema.yml => perm.filter.schema.yml_} | 0 .../{perm.object.schema.yml => perm.object.schema.yml_} | 0 .../{perm.permission.schema.yml => perm.permission.schema.yml_} | 0 5 files changed, 1 deletion(-) rename config/definitions/utils/permission/{perm.action.schema.yml => perm.action.schema.yml_} (100%) rename config/definitions/utils/permission/{perm.filter.schema.yml => perm.filter.schema.yml_} (100%) rename config/definitions/utils/permission/{perm.object.schema.yml => perm.object.schema.yml_} (100%) rename config/definitions/utils/permission/{perm.permission.schema.yml => perm.permission.schema.yml_} (100%) diff --git a/backend/gn_modulator/module/config/base.py b/backend/gn_modulator/module/config/base.py index 20a86f14..0667d906 100644 --- a/backend/gn_modulator/module/config/base.py +++ b/backend/gn_modulator/module/config/base.py @@ -6,7 +6,6 @@ from flask import g from gn_modulator.schema import SchemaMethods from gn_modulator.utils.cache import get_global_cache, set_global_cache -from geonature.core.gn_permissions.tools import cruved_scope_for_user_in_module class ModulesConfigBase: diff --git a/config/definitions/utils/permission/perm.action.schema.yml b/config/definitions/utils/permission/perm.action.schema.yml_ similarity index 100% rename from config/definitions/utils/permission/perm.action.schema.yml rename to config/definitions/utils/permission/perm.action.schema.yml_ diff --git a/config/definitions/utils/permission/perm.filter.schema.yml b/config/definitions/utils/permission/perm.filter.schema.yml_ similarity index 100% rename from config/definitions/utils/permission/perm.filter.schema.yml rename to config/definitions/utils/permission/perm.filter.schema.yml_ diff --git a/config/definitions/utils/permission/perm.object.schema.yml b/config/definitions/utils/permission/perm.object.schema.yml_ similarity index 100% rename from config/definitions/utils/permission/perm.object.schema.yml rename to config/definitions/utils/permission/perm.object.schema.yml_ diff --git a/config/definitions/utils/permission/perm.permission.schema.yml b/config/definitions/utils/permission/perm.permission.schema.yml_ similarity index 100% rename from config/definitions/utils/permission/perm.permission.schema.yml rename to config/definitions/utils/permission/perm.permission.schema.yml_ From 71eac78c2080b85057d30bccee89a202560cb773 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 1 Jun 2023 15:39:57 +0200 Subject: [PATCH 106/142] tab label display ... when still unknown --- frontend/app/services/object.service.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/services/object.service.ts b/frontend/app/services/object.service.ts index b3331d5d..fa1481ae 100644 --- a/frontend/app/services/object.service.ts +++ b/frontend/app/services/object.service.ts @@ -231,7 +231,7 @@ export class ModulesObjectService { const labels = this.labels({ context }); const objectTabLabel = nbFiltered == nbTotal - ? `${utils.capitalize(labels)} (${nbTotal || 0})` + ? `${utils.capitalize(labels)} (${nbTotal != null ? nbTotal : '...'})` : `${utils.capitalize(labels)} (${nbFiltered}/${nbTotal})`; return objectTabLabel; } From ecba68b387738eee5caa7969e80a156484b0f305 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 11:21:35 +0200 Subject: [PATCH 107/142] disabled button color && tooltip --- .../layouts/utils/utils.buttons_form.layout.yml | 2 +- frontend/app/components/base/base.scss | 17 +++++++++-------- .../layout/base/layout-section.component.html | 1 - .../layout/base/layout.component.html | 4 +--- .../object/layout-object-geojson.component.ts | 10 +++++----- 5 files changed, 16 insertions(+), 18 deletions(-) diff --git a/config/layouts/utils/utils.buttons_form.layout.yml b/config/layouts/utils/utils.buttons_form.layout.yml index e3f3a0fa..08866fca 100644 --- a/config/layouts/utils/utils.buttons_form.layout.yml +++ b/config/layouts/utils/utils.buttons_form.layout.yml @@ -18,7 +18,7 @@ layout: color: success title: Valider icon: done - description: Enregistrer le contenu du formulaire + description: "__f__formGroup.valid ? `Enregistrer le contenu du formulaire` : `Le formulaire comporte des erreurs`" action: submit disabled: __f__!formGroup.valid - flex: "0" diff --git a/frontend/app/components/base/base.scss b/frontend/app/components/base/base.scss index baaada33..83e54e7c 100644 --- a/frontend/app/components/base/base.scss +++ b/frontend/app/components/base/base.scss @@ -50,7 +50,6 @@ color: #fff; } - // couleur custom des bouttons .mat-mdc-button-base.mat-error { background-color: red; @@ -63,12 +62,15 @@ color: #fff; } - -.mat-button-base.mat-button-disabled { - color: rgba(0,0,0,.26); - background-color: rgba(0,0,0,.12); +.mat-mdc-raised-button[disabled][disabled] { + color: rgba(0, 0, 0, 0.26); + background-color: rgba(0, 0, 0, 0.12); } +.mat-stroked-button.mat-button-disabled.mat-button-disabled { + color: #ffffff; + background-color: #008cba; +} // :host ::ng-deep .button-just-icon { // font-size: 1.5rem !important; // } @@ -215,7 +217,6 @@ div.layout-items > div { background-color: lightgreen; } - .layout-message.error { color: darkred; background-color: lightcoral; @@ -308,7 +309,8 @@ div.layout-items > div { cursor: pointer; } -.geojsons {} +.geojsons { +} .test-layout { border: solid 1px lightgray; @@ -347,4 +349,3 @@ div.layout-items > div { background-color: lightgray; border-radius: 10px; } - diff --git a/frontend/app/components/layout/base/layout-section.component.html b/frontend/app/components/layout/base/layout-section.component.html index f46dd6b9..04e5c26b 100644 --- a/frontend/app/components/layout/base/layout-section.component.html +++ b/frontend/app/components/layout/base/layout-section.component.html @@ -34,7 +34,6 @@ - lazy -
+
' + ? '' : ''; const htmlEdit = this._mObject.checkAction(this.context, 'U', properties.scope).actionAllowed ? '' @@ -191,10 +191,10 @@ export class ModulesLayoutObjectGeoJSONComponent this._mData .getOne(this.moduleCode(), this.objectCode(), value, { fields }) .subscribe((data) => { - layer.setPopupContent(this.popupHTML(data)); - }); - this._mapService.L.DomEvent.on(layer.getPopup().getElement(), 'click', (e) => { - const action = e && e.target && e.target.attributes.getNamedItem('action')?.nodeValue; + layer.setPopupContent(this.popupHTML(data)); + }); + this._mapService.L.DomEvent.on(layer.getPopup().getElement(), 'click', (e) => { + const action = e && e.target && e.target.attributes.getNamedItem('action')?.nodeValue; if (action) { this._mAction.processAction({ action, From 570b993828d279bd9d584d41556b43acb9ecd874 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 11:47:15 +0200 Subject: [PATCH 108/142] fix is_new_data --- backend/gn_modulator/schema/repositories/base.py | 6 +++--- .../layout/object/layout-object-geojson.component.ts | 8 ++++---- frontend/app/services/config.service.ts | 2 -- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/backend/gn_modulator/schema/repositories/base.py b/backend/gn_modulator/schema/repositories/base.py index 46586a98..cfa1c8c4 100644 --- a/backend/gn_modulator/schema/repositories/base.py +++ b/backend/gn_modulator/schema/repositories/base.py @@ -117,13 +117,13 @@ def is_new_data(self, model, data): continue fields = [key] if self.is_relation_1_n(key) or self.is_relation_n_n(key): - fields = [] for item in data_value: for k in item: kk = f"{key}.{k}" - if kk not in fields: + if kk not in fields and self.has_property(kk): fields.append(kk) - m = self.serialize(model, fields=fields)[key] + m_ = self.serialize(model, fields=fields) + m = m_[key] if self.is_new_data(m, data_value): return True return False diff --git a/frontend/app/components/layout/object/layout-object-geojson.component.ts b/frontend/app/components/layout/object/layout-object-geojson.component.ts index 3d1bc20f..94846f10 100644 --- a/frontend/app/components/layout/object/layout-object-geojson.component.ts +++ b/frontend/app/components/layout/object/layout-object-geojson.component.ts @@ -191,10 +191,10 @@ export class ModulesLayoutObjectGeoJSONComponent this._mData .getOne(this.moduleCode(), this.objectCode(), value, { fields }) .subscribe((data) => { - layer.setPopupContent(this.popupHTML(data)); - }); - this._mapService.L.DomEvent.on(layer.getPopup().getElement(), 'click', (e) => { - const action = e && e.target && e.target.attributes.getNamedItem('action')?.nodeValue; + layer.setPopupContent(this.popupHTML(data)); + }); + this._mapService.L.DomEvent.on(layer.getPopup().getElement(), 'click', (e) => { + const action = e && e.target && e.target.attributes.getNamedItem('action')?.nodeValue; if (action) { this._mAction.processAction({ action, diff --git a/frontend/app/services/config.service.ts b/frontend/app/services/config.service.ts index 0d3ea85f..6a9d0034 100644 --- a/frontend/app/services/config.service.ts +++ b/frontend/app/services/config.service.ts @@ -139,11 +139,9 @@ export class ModulesConfigService { } moduleImg(moduleCode) { - console.log(this.AppConfig); const moduleImg = `${this.backendUrl()}/${ this.AppConfig.MEDIA_URL }/modulator/assets/${moduleCode.toLowerCase()}/module.jpg`; - console.log(moduleImg); return moduleImg; } From 63a258a58d4c1ec50943c5afa0e4bf8ced28c840 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 12:24:33 +0200 Subject: [PATCH 109/142] =?UTF-8?q?table=20layout=20ne=20pas=20recharger?= =?UTF-8?q?=20les=20lignes=20si=20non=20n=C3=A9cessaire?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../components/layout/object/layout-object-table.component.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/frontend/app/components/layout/object/layout-object-table.component.ts b/frontend/app/components/layout/object/layout-object-table.component.ts index a09ad445..46e8263a 100644 --- a/frontend/app/components/layout/object/layout-object-table.component.ts +++ b/frontend/app/components/layout/object/layout-object-table.component.ts @@ -290,9 +290,12 @@ export class ModulesLayoutObjectTableComponent this.table.setHeight(elem.clientHeight); const pageSize = Math.floor((elem.clientHeight - 90) / 50); + const nbTotal = this._mObject.objectConfigContext(this.context).nb_total; + if ( !this.computedLayout.page_size && this.pageSize != pageSize && + nbTotal > pageSize && pageSize > 1 && !this.context.debug ) { From 3c59740f5f660245deca404344a67a282969f3d7 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 12:25:16 +0200 Subject: [PATCH 110/142] diagnostic details -> pas de tab par defaut --- .../m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml index 7c717062..e3399d7b 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.diagnostic_details.layout.yml @@ -96,7 +96,6 @@ layout: layout: display: tabs overflow: true - selected_tab: Aménagement items: - label: Champs communs items: *diag_commons From d03b82dd02268741607df07c096382eb06d57887 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 12:26:06 +0200 Subject: [PATCH 111/142] fu custom icon --- frontend/app/services/map/base.ts | 9 --------- frontend/app/services/map/draw.ts | 2 +- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/frontend/app/services/map/base.ts b/frontend/app/services/map/base.ts index 2bcf1da8..6b12746d 100644 --- a/frontend/app/services/map/base.ts +++ b/frontend/app/services/map/base.ts @@ -179,15 +179,6 @@ export default { map.isInitialized = true; - // init PM - const customIcon = L.icon({ - iconUrl: 'assets/marker-icon.png', - shadowUrl: 'assets/marker-shadow.png', - iconAnchor: [12, 41], - }); - - var customMarker = map.pm.Toolbar.copyDrawControl('drawMarker', { name: 'customMarker' }); - customMarker.drawInstance.setOptions({ markerStyle: { icon: customIcon } }); resolve(map); }, 100); }); diff --git a/frontend/app/services/map/draw.ts b/frontend/app/services/map/draw.ts index f898006b..a8cbacf5 100644 --- a/frontend/app/services/map/draw.ts +++ b/frontend/app/services/map/draw.ts @@ -4,7 +4,7 @@ import utils from '../../utils'; const defautDrawOptions = { position: 'topleft', customMarker: true, - drawMarker: false, + drawMarker: true, editMode: true, drawCircle: false, drawCircleMarker: false, From cd0c48934c683c7ca2322962280a81da1fc14722 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 12:27:03 +0200 Subject: [PATCH 112/142] sipaf site edit detail set zoom to 12 --- contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml | 2 ++ contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml | 1 + 2 files changed, 3 insertions(+) diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml index 08536f8d..7416e012 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml @@ -9,8 +9,10 @@ layout: items: - type: map flex: 2 + zoom: 12 items: - type: object + zoom: true display: geojson object_code: site prefilters: __f__`id_passage_faune = ${o.object(x, 'site').value}` diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml index 369d271b..15df8440 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml @@ -10,3 +10,4 @@ layout: template_params: object_code: site layout: __SITE_FORM_FIELDS__ + zoom: 12 From e32c5897e1a0c7de64eaae5aaf8b6a8b4a2260d5 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 16:12:22 +0200 Subject: [PATCH 113/142] fix api query cache --- backend/gn_modulator/schema/base.py | 2 +- backend/gn_modulator/schema/repositories/base.py | 3 +++ backend/gn_modulator/schema/repositories/filters.py | 8 +------- backend/gn_modulator/schema/repositories/utils.py | 12 +++++++++--- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/backend/gn_modulator/schema/base.py b/backend/gn_modulator/schema/base.py index 54909f6d..7bf54974 100644 --- a/backend/gn_modulator/schema/base.py +++ b/backend/gn_modulator/schema/base.py @@ -332,7 +332,7 @@ def process_csv_data(self, key, data, options={}, process_label=True): if isinstance(data, list): return ", ".join( - [self.process_csv_data(key, d, process_label=process_label) for d in data] + [str(self.process_csv_data(key, d, process_label=process_label)) for d in data] ) if isinstance(data, dict): diff --git a/backend/gn_modulator/schema/repositories/base.py b/backend/gn_modulator/schema/repositories/base.py index cfa1c8c4..db99a387 100644 --- a/backend/gn_modulator/schema/repositories/base.py +++ b/backend/gn_modulator/schema/repositories/base.py @@ -264,6 +264,9 @@ def query_list(self, module_code=MODULE_CODE, cruved_type="R", params={}, query_ query = self.process_fields(query, params.get("fields") or []) + # clear_query_cache + self.clear_query_cache(query) + order_bys, query = self.get_sorters(Model, params.get("sort", []), query) # if params.get('test'): diff --git a/backend/gn_modulator/schema/repositories/filters.py b/backend/gn_modulator/schema/repositories/filters.py index 696aa8d9..2dece377 100644 --- a/backend/gn_modulator/schema/repositories/filters.py +++ b/backend/gn_modulator/schema/repositories/filters.py @@ -182,13 +182,7 @@ def get_filter(self, Model, filter, query=None, condition=None): filter_out = cast(model_attribute, db.String) != (str(filter_value)) elif filter_type == "in": - filter_out = cast(model_attribute, db.String).in_( - [str(x) for x in filter_value] - # map( - # lambda x: str(x), - # filter_value - # ) - ) + filter_out = cast(model_attribute, db.String).in_([str(x) for x in filter_value]) else: raise SchemaRepositoryFilterTypeError( diff --git a/backend/gn_modulator/schema/repositories/utils.py b/backend/gn_modulator/schema/repositories/utils.py index a8336c27..df5de38f 100644 --- a/backend/gn_modulator/schema/repositories/utils.py +++ b/backend/gn_modulator/schema/repositories/utils.py @@ -17,6 +17,10 @@ def set_query_cache(self, query, key, value): query._cache[key] = value return query + def clear_query_cache(self, query): + if hasattr(query, "_cache"): + delattr(query, "_cache") + def get_query_cache(self, query, key): if not query: return @@ -142,14 +146,16 @@ def custom_getattr( res["val_of_type"] = res["val"].of_type(res["relation_alias"]) query = query.join(res["val_of_type"], isouter=True) - # mise en cache - query = self.set_query_cache(query, cache_key, res) + if only_fields: + query = self.set_query_cache(query, cache_key, res) # chargement des champs si is last field if self.is_val_relationship(res["val"]) and is_last_field and only_fields: + # mise en cache seulement dans ce cas + # query = self.set_query_cache(query, cache_key, res) query = self.eager_load_only(field_name, query, only_fields, index) - # mise en cache et retour + # retour return self.process_custom_getattr_res(res, query, field_name, index, only_fields) def get_sorters(self, Model, sort, query): From 1c703051d2bee571017e4c5c500fe9b2ab3d7a3b Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 16:14:18 +0200 Subject: [PATCH 114/142] fix filters --- .../layout/object/layout-object-filters.component.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/layout/object/layout-object-filters.component.ts b/frontend/app/components/layout/object/layout-object-filters.component.ts index 5b8afb69..bd115a37 100644 --- a/frontend/app/components/layout/object/layout-object-filters.component.ts +++ b/frontend/app/components/layout/object/layout-object-filters.component.ts @@ -60,8 +60,11 @@ export class ModulesLayoutObjectFiltersComponent .filter(([key, val]: any) => val != null) .map(([key, val]: any) => { const field = filterDefs[key]?.field || key; - const type = filterDefs[key]?.type || '='; - const value = filterDefs[key]?.key ? val[filterDefs[key].key] : utils.getAttr(data, key); + let value = filterDefs[key].key ? utils.getAttr(val, filterDefs[key].key) : val; + let type = filterDefs[key]?.type || Array.isArray(value) ? 'in' : '='; + if (Array.isArray(value)) { + value = value.join(';'); + } return { field, type, From f2aa4a325901c5289a94c0918a9bf6337d5fc9aa Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 13 Jun 2023 16:14:35 +0200 Subject: [PATCH 115/142] up hidden fields --- contrib/m_sipaf/config/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/m_sipaf/config/config.yml b/contrib/m_sipaf/config/config.yml index eaf0b01b..a0ce5b2d 100644 --- a/contrib/m_sipaf/config/config.yml +++ b/contrib/m_sipaf/config/config.yml @@ -93,6 +93,7 @@ site_filters_defs: field: linears.groups.id_group nomenclatures_ouvrage_materiaux: field: nomenclatures_ouvrage_materiaux.id_nomenclature + key: id_nomenclature site_table_fields: - code_passage_faune @@ -236,7 +237,6 @@ site_form_fields: items: - key: id_digitiser default: __f__context.current_user?.id_role - hidden: true - geom - key: id_passage_faune required: false From 8c620246408d87f9c98a1c228ae579a118526ab2 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 20 Jun 2023 14:54:52 +0200 Subject: [PATCH 116/142] sipaf obs --- backend/gn_modulator/definition/utils.py | 6 +++ backend/gn_modulator/module/config/utils.py | 1 + .../schema/models/column_properties.py | 12 +++-- .../schema/repositories/filters.py | 11 +++- .../gn_modulator/schema/repositories/utils.py | 34 +++++++----- backend/gn_modulator/utils/filters.py | 2 +- .../utils/synthese/syn.synthese.schema.yml | 19 ++++++- contrib/m_sipaf/config/config.yml | 27 +++++++++- .../layouts/m_sipaf.site_details.layout.yml | 20 ++++++- .../layouts/m_sipaf.site_edit.layout.yml | 2 +- contrib/m_sipaf/config/m_sipaf.module.yml | 9 ++++ .../layout/base/layout-map.component.html | 6 +-- .../layout/base/layout-map.component.scss | 44 +++++++++++---- .../layout/base/layout-modal.component.html | 4 +- .../layout/base/layout-modal.component.scss | 6 +-- .../layout/form/generic-form.component.scss | 6 +-- .../layout/import/layout-import.component.ts | 1 - .../object/layout-object-geojson.component.ts | 36 ++++++++----- .../object/layout-object-table.component.ts | 22 +------- frontend/app/services/map/base.ts | 1 - frontend/app/services/map/layer.ts | 6 +++ frontend/app/services/route.service.ts | 3 ++ frontend/app/services/table.service.ts | 53 +++++++++++++++---- frontend/app/utils/commons.ts | 2 +- 24 files changed, 240 insertions(+), 93 deletions(-) diff --git a/backend/gn_modulator/definition/utils.py b/backend/gn_modulator/definition/utils.py index 22ee12b8..fff0814b 100644 --- a/backend/gn_modulator/definition/utils.py +++ b/backend/gn_modulator/definition/utils.py @@ -2,6 +2,9 @@ import json import yaml from pathlib import Path + +from flask import current_app + from gn_modulator.utils.env import local_srid from gn_modulator.utils.commons import replace_in_dict from gn_modulator.utils.yaml import YmlLoader @@ -51,6 +54,9 @@ def load_definition_from_file(cls, file_path): # traitement du local_srid data = replace_in_dict(data, "__LOCAL_SRID__", local_srid()) data = replace_in_dict(data, "__REF_MODULE_CODE__", MODULE_CODE) + data = replace_in_dict( + data, "__CONFIG.URL_APPLICATION__", current_app.config["URL_APPLICATION"] + ) # on enleve aliases if isinstance(data, dict): diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index 8f18b5b3..f4d9b271 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -351,6 +351,7 @@ def add_basic_fields(cls, module_code, object_code): sm.pk_field_name(), sm.label_field_name(), sm.title_field_name(), + sm.geometry_field_name(), *sm.unique(), "scope", ]: diff --git a/backend/gn_modulator/schema/models/column_properties.py b/backend/gn_modulator/schema/models/column_properties.py index 9a5470a9..7449cd00 100644 --- a/backend/gn_modulator/schema/models/column_properties.py +++ b/backend/gn_modulator/schema/models/column_properties.py @@ -55,9 +55,10 @@ def cp_select(self, key, column_property_def, Model): if column_property_type == "concat": # label = ' ' - # 1 => ['', '', ''] + # 1 => ['', ' ', ''] # 2 => map getattr # 3 *dans concat + conditions = [] label = column_property_def["label"] index = 0 items = [] @@ -70,14 +71,19 @@ def cp_select(self, key, column_property_def, Model): items2.append(txt) txt = "" elif label[index] == ">": - model_attribute, _ = self.custom_getattr(Model, txt) + model_attribute, condition = self.custom_getattr(Model, txt) + if condition is not None: + conditions.append(condition) items2.append(txt) items.append(model_attribute) txt = "" else: txt += label[index] index += 1 - return func.concat(*items) + cp = func.concat(*items) + if conditions: + cp = select([cp]).where(and_(*conditions)) + return cp if column_property_type in ["st_astext"]: return func.st_astext(getattr(Model, column_property_def["key"])) diff --git a/backend/gn_modulator/schema/repositories/filters.py b/backend/gn_modulator/schema/repositories/filters.py index 2dece377..3fa1f902 100644 --- a/backend/gn_modulator/schema/repositories/filters.py +++ b/backend/gn_modulator/schema/repositories/filters.py @@ -2,7 +2,7 @@ repositories - filters """ import unidecode -from sqlalchemy import cast, and_, or_, not_ +from sqlalchemy import cast, and_, or_, not_, func from geonature.utils.env import db from ..errors import SchemaRepositoryFilterError, SchemaRepositoryFilterTypeError from sqlalchemy.sql.functions import ReturnTypeFromArgs @@ -184,6 +184,15 @@ def get_filter(self, Model, filter, query=None, condition=None): elif filter_type == "in": filter_out = cast(model_attribute, db.String).in_([str(x) for x in filter_value]) + elif filter_type == "dwithin": + x, y, radius = filter_value.split(";") + geo_filter = func.ST_DWithin( + func.ST_GeogFromWKB(model_attribute), + func.ST_GeogFromWKB(func.ST_MakePoint(x, y)), + radius, + ) + filter_out = geo_filter + else: raise SchemaRepositoryFilterTypeError( "Le type de filtre {} n'est pas géré".format(filter_type) diff --git a/backend/gn_modulator/schema/repositories/utils.py b/backend/gn_modulator/schema/repositories/utils.py index df5de38f..42ff863f 100644 --- a/backend/gn_modulator/schema/repositories/utils.py +++ b/backend/gn_modulator/schema/repositories/utils.py @@ -28,7 +28,7 @@ def get_query_cache(self, query, key): return None return query._cache.get(key) - def process_custom_getattr_res(self, res, query, field_name, index, only_fields=[]): + def process_custom_getattr_res(self, res, query, condition, field_name, index, only_fields=[]): # si c'est une propriété fields = field_name.split(".") is_relationship = self.is_val_relationship(res["val"]) @@ -38,19 +38,23 @@ def process_custom_getattr_res(self, res, query, field_name, index, only_fields= # on ne peut pas avoir de field apres une propriété if not is_last_field: raise Exception(f"pb fields {field_name}, il ne devrait plus rester de champs") - - return res["val"], query + return res["val"], query or condition if not is_last_field: + if not query: + condition = ( + and_(condition, res["val"].expression) if condition else res["val"].expression + ) return self.custom_getattr( res["relation_alias"], field_name, index=index + 1, query=query, + condition=condition, only_fields=only_fields, ) - return res["relation_alias"], query + return res["relation_alias"], query or condition def eager_load_only(self, field_name, query, only_fields, index): """ @@ -79,6 +83,7 @@ def eager_load_only(self, field_name, query, only_fields, index): ), filter( lambda x: key_cache_eager in x + and x.startswith(f"{key_cache_eager}.") and "." not in x.replace(f"{key_cache_eager}.", "") and hasattr( getattr(cache["relation_alias"], x.replace(f"{key_cache_eager}.", "")), @@ -105,7 +110,7 @@ def is_val_relationship(self, val): return hasattr(val, "mapper") and hasattr(val.mapper, "entity") def custom_getattr( - self, Model, field_name, query=None, only_fields="", index=0, condition=None + self, Model, field_name, query=None, condition=None, only_fields="", index=0 ): # liste des champs 'rel1.rel2.pro1' -> 'rel1', 'rel2', 'prop1' fields = field_name.split(".") @@ -115,14 +120,15 @@ def custom_getattr( # clé pour le cache cache_key = ".".join(fields[: index + 1]) - # test si c'est le dernier champs is_last_field = index == len(fields) - 1 # récupération depuis le cache associé à la query res = self.get_query_cache(query, cache_key) if res: - return self.process_custom_getattr_res(res, query, field_name, index, only_fields) + return self.process_custom_getattr_res( + res, query, condition, field_name, index, only_fields + ) # si non en cache # on le calcule @@ -142,21 +148,25 @@ def custom_getattr( # si c'est une propriété if self.is_val_relationship(res["val"]): res["relation_model"] = res["val"].mapper.entity - res["relation_alias"] = orm.aliased(res["relation_model"]) + res["relation_alias"] = ( + orm.aliased(res["relation_model"]) if query else res["relation_model"] + ) + # res["relation_alias"] = orm.aliased(res["relation_model"]) res["val_of_type"] = res["val"].of_type(res["relation_alias"]) - query = query.join(res["val_of_type"], isouter=True) + if query: + query = query.join(res["val_of_type"], isouter=True) if only_fields: query = self.set_query_cache(query, cache_key, res) # chargement des champs si is last field if self.is_val_relationship(res["val"]) and is_last_field and only_fields: - # mise en cache seulement dans ce cas - # query = self.set_query_cache(query, cache_key, res) query = self.eager_load_only(field_name, query, only_fields, index) # retour - return self.process_custom_getattr_res(res, query, field_name, index, only_fields) + return self.process_custom_getattr_res( + res, query, condition, field_name, index, only_fields + ) def get_sorters(self, Model, sort, query): order_bys = [] diff --git a/backend/gn_modulator/utils/filters.py b/backend/gn_modulator/utils/filters.py index eb60a8a3..e74bd5d6 100644 --- a/backend/gn_modulator/utils/filters.py +++ b/backend/gn_modulator/utils/filters.py @@ -58,7 +58,7 @@ def parse_filter(str_filter): index_min = None filter_type_min = None - for filter_type in ["=", "<", ">", ">=", "<=", "like", "ilike", "in", "~"]: + for filter_type in ["=", "<", ">", ">=", "<=", "like", "ilike", "in", "~", "dwithin"]: try: index = str_filter.index(f" {filter_type} ") except ValueError: diff --git a/config/definitions/utils/synthese/syn.synthese.schema.yml b/config/definitions/utils/synthese/syn.synthese.schema.yml index 67faa832..3fbc9709 100644 --- a/config/definitions/utils/synthese/syn.synthese.schema.yml +++ b/config/definitions/utils/synthese/syn.synthese.schema.yml @@ -9,7 +9,7 @@ meta: genre: M label: element de la synthese labels: elements de la synthese - label_field_name: cd_nom + label_field_name: taxref.nom_vern geometry_field_name: the_geom_4326 unique: - id_source @@ -23,3 +23,20 @@ properties: nomenclature_type: OCC_COMPORTEMENT nomenclature_determination_method: nomenclature_type: METH_DETERMIN + nomenclature_bio_condition: + title: État biologique + taxref: + title: taxon + dataset: + title: Jeu de données + cor_observers: + title: Observateur(s) + date_min: + title: Date (min) + url_source: + type: string + column_property: concat + title: Url source + # label: '__CONFIG.URL_APPLICATION__//' + label: '/' + diff --git a/contrib/m_sipaf/config/config.yml b/contrib/m_sipaf/config/config.yml index a0ce5b2d..7fe409c9 100644 --- a/contrib/m_sipaf/config/config.yml +++ b/contrib/m_sipaf/config/config.yml @@ -223,6 +223,31 @@ site_details_fields: prefilters: __f__`id_passage_faune = ${o.object(x, 'site').value}` sort: date_diagnostic- + - label: __f__o.tab_label(x) + object_code: synthese + hidden: "__f__!o.config({...context, object_code: 'site'})?.value_xy" + items: + - type: object + display: table + prefilters: | + __f__{ + const xy = o.config({...context, object_code: 'site'})?.value_xy; + return xy + ? `the_geom_4326 dwithin ${xy.x};${xy.y};1000` + : `id_synthese = -1` + } + actions: + R: + url: "#/synthese/occurrence/" + title: Liens vers le module de synthese + items: + - date_min + - dataset.dataset_name + - taxref.nom_vern + - nomenclature_bio_condition.label_fr + - cor_observers.nom_complet + sort: date_min- + site_form_fields: display: tabs overflow: true @@ -351,8 +376,6 @@ site_form_fields: schema_dot_table: pr_sipaf.t_passages_faune details: [] - - diagnostic_table_fields: - date_diagnostic - passage_faune.code_passage_faune diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml index 7416e012..e593fc72 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml @@ -9,7 +9,7 @@ layout: items: - type: map flex: 2 - zoom: 12 + zoom: 14 items: - type: object zoom: true @@ -17,6 +17,24 @@ layout: object_code: site prefilters: __f__`id_passage_faune = ${o.object(x, 'site').value}` popup_fields: __SITE_MAP_POPUP_FIELDS__ + bring_to_front: true + tooltip_permanent: true + - type: object + display: geojson + object_code: synthese + prefilters: | + __f__{ + const xy = o.config({...context, object_code: 'site'})?.value_xy; + return xy + ? `the_geom_4326 dwithin ${xy.x};${xy.y};1000` + : `id_synthese = -1` + } + popup_fields: + - date_min + - dataset.dataset_name + - taxref.nom_vern + - nomenclature_bio_condition.label_fr + - cor_observers.nom_complet - flex: 3 items: diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml index 15df8440..8b6141a9 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.site_edit.layout.yml @@ -10,4 +10,4 @@ layout: template_params: object_code: site layout: __SITE_FORM_FIELDS__ - zoom: 12 + zoom: 13 diff --git a/contrib/m_sipaf/config/m_sipaf.module.yml b/contrib/m_sipaf/config/m_sipaf.module.yml index 73e17224..564765ba 100644 --- a/contrib/m_sipaf/config/m_sipaf.module.yml +++ b/contrib/m_sipaf/config/m_sipaf.module.yml @@ -24,6 +24,15 @@ objects: diagnostic: schema_code: m_sipaf.diag cruved: CRUD + synthese: + schema_code: syn.synthese + label: observation + labels: observations + cruved: R + map: + style: + color: red + tree: site: diff --git a/frontend/app/components/layout/base/layout-map.component.html b/frontend/app/components/layout/base/layout-map.component.html index db6f68c7..d540b421 100644 --- a/frontend/app/components/layout/base/layout-map.component.html +++ b/frontend/app/components/layout/base/layout-map.component.html @@ -22,15 +22,15 @@
-
+
-
-
+
+
{{ _map?.coordinatesTxt }}
diff --git a/frontend/app/components/layout/base/layout-map.component.scss b/frontend/app/components/layout/base/layout-map.component.scss index e31b3e6c..e9487f15 100644 --- a/frontend/app/components/layout/base/layout-map.component.scss +++ b/frontend/app/components/layout/base/layout-map.component.scss @@ -1,18 +1,40 @@ @import '../../../../node_modules/@geoman-io/leaflet-geoman-free/dist/leaflet-geoman.css'; +// patch pour les modal en fixed absolute +// pour que la carte (ou ses contrôles ne passent pas devant la modale +.map .leaflet-map-pane, +.map .leaflet-top, +.map .leaflet-bottom { + z-index: 1; +} + +.map { + height: 100%; + z-index: 1 important; +} + +.map > div { + height: 100%; +} + +.map-container { + height: 100%; + position: relative; +} + .button-container { padding: Opx; } .coordinates { - position: absolute; - bottom: 10px; - z-index: 9999; - text-align: center; - width: 400px; - left: 50%; - margin-left: -200px; - background-color: rgba(255, 255, 255, 0.5); - border-radius: 5px; - border-color: gray; - } \ No newline at end of file + position: absolute; + bottom: 10px; + z-index: 999; + text-align: center; + width: 400px; + left: 50%; + margin-left: -200px; + background-color: rgba(255, 255, 255, 0.5); + border-radius: 5px; + border-color: gray; +} diff --git a/frontend/app/components/layout/base/layout-modal.component.html b/frontend/app/components/layout/base/layout-modal.component.html index 44ed3b39..53014a49 100644 --- a/frontend/app/components/layout/base/layout-modal.component.html +++ b/frontend/app/components/layout/base/layout-modal.component.html @@ -1,6 +1,6 @@ diff --git a/frontend/app/components/layout/base/layout-modal.component.scss b/frontend/app/components/layout/base/layout-modal.component.scss index 9cf504f1..1a3bde46 100644 --- a/frontend/app/components/layout/base/layout-modal.component.scss +++ b/frontend/app/components/layout/base/layout-modal.component.scss @@ -1,7 +1,7 @@ .modal-container { z-index: 100000; background-color: rgba(0,0,0,0.7); - position: absolute; + position: fixed; top:0px; left:0px; height: 100%; @@ -11,12 +11,12 @@ justify-content: center; } -.modal-container > div { +.modal-container > div{ width: fit-content; padding: 10px; opacity: 1; } -.modal-container > div > div{ +.modal-container > div > div { background-color: red; } diff --git a/frontend/app/components/layout/form/generic-form.component.scss b/frontend/app/components/layout/form/generic-form.component.scss index ac0fefe3..f9873669 100644 --- a/frontend/app/components/layout/form/generic-form.component.scss +++ b/frontend/app/components/layout/form/generic-form.component.scss @@ -1,7 +1,3 @@ :host::ng-deep .ng-invalid { border-left: none !important; -} - -// :host::ng-deep .ng-invalid > .ng-invalid { -// border-left: none; -// } \ No newline at end of file +} \ No newline at end of file diff --git a/frontend/app/components/layout/import/layout-import.component.ts b/frontend/app/components/layout/import/layout-import.component.ts index 4498854a..bd0a135f 100644 --- a/frontend/app/components/layout/import/layout-import.component.ts +++ b/frontend/app/components/layout/import/layout-import.component.ts @@ -101,7 +101,6 @@ export class ModulesLayoutImportComponent extends ModulesLayoutComponent impleme this._mLayout.stopActionProcessing(''); const response = importEvent.body as any; this.importData = { ...this.importData, ...response }; - console.log(this.importData); this.setStep(); if (response.status == 'DONE') { if (response.res.nb_unchanged != response.res.nb_process) { diff --git a/frontend/app/components/layout/object/layout-object-geojson.component.ts b/frontend/app/components/layout/object/layout-object-geojson.component.ts index 94846f10..baf6aed1 100644 --- a/frontend/app/components/layout/object/layout-object-geojson.component.ts +++ b/frontend/app/components/layout/object/layout-object-geojson.component.ts @@ -51,6 +51,10 @@ export class ModulesLayoutObjectGeoJSONComponent console.error(`le layer (${this.pkFieldName()}==${value}) n'est pas présent`); return; } + if (layer._latlng) { + this.setObject({ value_xy: { x: layer._latlng.lng, y: layer._latlng.lat } }); + } + layer.bringToFront(); layer.openPopup(); } @@ -72,10 +76,11 @@ export class ModulesLayoutObjectGeoJSONComponent const currentZoom = this._mapService.getZoom(this.context.map_id); const currentMapBounds = this._mapService.getMapBounds(this.context.map_id); - const layerStyle = this.computedLayout.style || this.context.map?.style; + const layerStyle = + this.computedLayout.style || this.objectConfig()?.map?.style || this.context.map?.style; const paneName = this.computedLayout.pane || this.context.map?.pane || `P1`; const bZoom = this.computedLayout.zoom || this.context.map?.zoom; - + const bTooltipPermanent = this.computedLayout.tooltip_permanent; const bring_to_front = this.computedLayout.bring_to_front || this.context.map?.bring_to_front; this.mapData = { geojson, @@ -98,7 +103,7 @@ export class ModulesLayoutObjectGeoJSONComponent }); /** tooltip */ - const label = feature.properties[label_field_name]; + const label = utils.getAttr(feature.properties, label_field_name); if (label) { const action = this._mapService.actionTooltipDisplayZoomThreshold( this.context.map_id, @@ -109,19 +114,21 @@ export class ModulesLayoutObjectGeoJSONComponent currentMapBounds ); layer - .bindTooltip(label, { + .bindTooltip(label.toString(), { direction: 'top', - permanent: action == 'display', + permanent: action == 'display' && bTooltipPermanent, className: 'anim-tooltip', }) .openTooltip(); /** tooltip - zoom et emprise */ - layer.onZoomMoveEnd = this._mapService.layerZoomMoveEndListener( - this.context.map_id, - layer, - this.tooltipDisplayZoomTreshold - ); + if (bTooltipPermanent) { + layer.onZoomMoveEnd = this._mapService.layerZoomMoveEndListener( + this.context.map_id, + layer, + this.tooltipDisplayZoomTreshold + ); + } } layer.bindPopup(this.popupHTML(feature.properties)).on('popupopen', (event) => { this.onPopupOpen(layer); @@ -130,7 +137,7 @@ export class ModulesLayoutObjectGeoJSONComponent }, }; const d = {}; - d[this.computedLayout.key] = this.mapData; + d[this.context.object_code] = this.mapData; this._mapService.processData(this.context.map_id, d, { // key: this.computedLayout.key, zoom: this.computedLayout.zoom, @@ -140,9 +147,10 @@ export class ModulesLayoutObjectGeoJSONComponent popupHTML(properties) { const fields = this.popupFields(); - const label = `${this.utils.capitalize(this.objectConfig().display.label)}: ${ - properties[this.labelFieldName()] - }`; + + const label = `${this.utils.capitalize( + this.objectConfig().display.label + )}: ${utils.getAttr(properties, this.labelFieldName())}`; var propertiesHTML = ''; propertiesHTML += '
    \n'; propertiesHTML += fields diff --git a/frontend/app/components/layout/object/layout-object-table.component.ts b/frontend/app/components/layout/object/layout-object-table.component.ts index 46e8263a..20613542 100644 --- a/frontend/app/components/layout/object/layout-object-table.component.ts +++ b/frontend/app/components/layout/object/layout-object-table.component.ts @@ -82,28 +82,8 @@ export class ModulesLayoutObjectTableComponent } onRowClick = (e, row) => { - let action = utils.getAttr(e, 'target.attributes.action.nodeValue') - ? utils.getAttr(e, 'target.attributes.action.nodeValue') - : e.target.getElementsByClassName('action').length - ? utils.getAttr(e.target.getElementsByClassName('action')[0], 'attributes.action.nodeValue') - : 'selected'; const value = this.getRowValue(row); - - if (['details', 'edit'].includes(action)) { - this._mAction.processAction({ - action, - context: this.context, - value, - }); - } - - if (action == 'delete') { - this._mLayout.openModal('delete', this.getRowData(row)); - } - - if (action == 'selected') { - this.setObject({ value }); - } + this.setObject({ value }); }; getRowValue(row) { diff --git a/frontend/app/services/map/base.ts b/frontend/app/services/map/base.ts index 6b12746d..dfd98472 100644 --- a/frontend/app/services/map/base.ts +++ b/frontend/app/services/map/base.ts @@ -167,7 +167,6 @@ export default { map.on('contextmenu', (event: any) => { map.coordinatesTxt = `${event.latlng.lng}, ${event.latlng.lat}`; navigator.clipboard.writeText(`${event.latlng.lng}, ${event.latlng.lat}`); - console.log(`${event.latlng.lng} ${event.latlng.lat}`); }); // init panes diff --git a/frontend/app/services/map/layer.ts b/frontend/app/services/map/layer.ts index c7cb828d..c93e28f5 100644 --- a/frontend/app/services/map/layer.ts +++ b/frontend/app/services/map/layer.ts @@ -254,6 +254,10 @@ export default { if (bring_to_front) { setTimeout(() => { layer.bringToFront(); + const tooltip = layer.getTooltip(); + if (tooltip) { + layer.unbindTooltip().bindTooltip(tooltip); + } }, 500); } if (!!onEachFeature) { @@ -308,6 +312,7 @@ export default { layerZoomMoveEndListener(mapId, layer, tooltipDisplayZoomTreshold) { // on garde en mémoire le dernier zoom + var lastZoomLevel; var lastMapBounds; @@ -316,6 +321,7 @@ export default { if (!tooltip) { return; } + const tooltipDisplayed = tooltip.options.permanent; const action = this.actionTooltipDisplayZoomThreshold( mapId, diff --git a/frontend/app/services/route.service.ts b/frontend/app/services/route.service.ts index b4687b50..1ff9fca8 100644 --- a/frontend/app/services/route.service.ts +++ b/frontend/app/services/route.service.ts @@ -113,6 +113,9 @@ export class ModulesRouteService { * patch pour pouvoir rediriger sur la meme url */ redirect(url) { + if (url[0] == '#') { + url = url.substring(1); + } this._router.navigateByUrl('/', { skipLocationChange: true }).then(() => { this._router.navigateByUrl(url); }); diff --git a/frontend/app/services/table.service.ts b/frontend/app/services/table.service.ts index fb3fe4e4..034158ed 100644 --- a/frontend/app/services/table.service.ts +++ b/frontend/app/services/table.service.ts @@ -2,14 +2,23 @@ import { Injectable, Injector } from '@angular/core'; import utils from '../utils'; import { ModulesConfigService } from './config.service'; import { ModulesObjectService } from './object.service'; +import { ModulesActionService } from './action.service'; +import { ModulesRouteService } from './route.service'; +import { ModulesLayoutService } from './layout.service'; @Injectable() export class ModulesTableService { + _mAction: ModulesActionService; _mConfig: ModulesConfigService; _mObject: ModulesObjectService; + _mRoute: ModulesRouteService; + _mLayout: ModulesLayoutService; constructor(private _injector: Injector) { this._mConfig = this._injector.get(ModulesConfigService); this._mObject = this._injector.get(ModulesObjectService); + this._mAction = this._injector.get(ModulesActionService); + this._mRoute = this._injector.get(ModulesRouteService); + this._mLayout = this._injector.get(ModulesLayoutService); } /** permet de passer des paramètre de tri du format tabulator @@ -55,7 +64,7 @@ export class ModulesTableService { * - On utilise mPage.chekcLink pour voir si et comment on affiche l'action en question * - L'appartenance (scope) sera fournie par les données du rang de la cellule dans les fonction formatter et tooltip) * */ - columnAction(context, action) { + columnAction(layout, context, action) { // test si l'action est possible (ou avant) const iconAction = { @@ -71,7 +80,8 @@ export class ModulesTableService { }; const { actionAllowed, actionMsg } = this._mObject.checkAction(context, action); - if (actionAllowed == null) { + + if (actionAllowed == null && !(layout?.actions && layout?.actions[action])) { return; } @@ -79,14 +89,40 @@ export class ModulesTableService { headerSort: false, formatter: (cell, formatterParams, onRendered) => { const scope = cell._cell.row.data['scope']; - const { actionAllowed, actionMsg } = this._mObject.checkAction(context, action, scope); - return ``; + }'>`; + return html; }, width: 22, hozAlign: 'center', + cellClick: (e, cell) => { + const data = cell._cell.row.data; + const value = this._mObject.objectId({ context, data }); + if (layout?.actions?.[action]) { + const href = layout.actions[action].url.replace('', value); + this._mRoute.redirect(href); + return; + } + + if (['R', 'U'].includes(action)) { + this._mAction.processAction({ + action: actionTxt[action], + context, + value, + }); + } + + if (action == 'D') { + this._mLayout.openModal('delete', data); + } + }, tooltip: (cell) => { + if (layout.actions?.[action]) { + return layout?.actions[action].title; + } const scope = cell._cell.row.data['scope']; const { actionAllowed, actionMsg } = this._mObject.checkAction(context, action, scope); return actionMsg; @@ -101,10 +137,10 @@ export class ModulesTableService { * U: update / edit * D: delete */ - columnsAction(context) { + columnsAction(layout, context) { const columnsAction = 'RUD' .split('') - .map((action) => this.columnAction(context, action)) + .map((action) => this.columnAction(layout, context, action)) .filter((columnAction) => !!columnAction); return columnsAction; } @@ -116,7 +152,7 @@ export class ModulesTableService { */ columnsTable(fields, layout, context) { //column definition in the columns array - return [...this.columnsAction(context), ...this.columns(fields, layout, context)]; + return [...this.columnsAction(layout, context), ...this.columns(fields, layout, context)]; } columnLayoutItem(layoutItem, context) { @@ -137,7 +173,6 @@ export class ModulesTableService { columns(fields, layout, context) { const columns = fields.map((item) => this.columnLayoutItem(item, context)); - return columns.map((col) => { const column = utils.copy(col); column.headerFilter = column.headerFilter && layout.display_filters; diff --git a/frontend/app/utils/commons.ts b/frontend/app/utils/commons.ts index 615065a9..c759fbeb 100644 --- a/frontend/app/utils/commons.ts +++ b/frontend/app/utils/commons.ts @@ -38,7 +38,7 @@ const addKey = (keys, key) => { const getAttr = (obj, paths, index = 0) => { if (paths == null && index == 0) { - console.log('?????'); + console.log('????? GetAtrr'); console.trace(); } if (paths == null) { From fda2b80f45143417afb07229fb4bf0434ca34abb Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 21 Jun 2023 15:33:20 +0200 Subject: [PATCH 117/142] permissions --- backend/gn_modulator/__init__.py | 2 +- ...0a6e3_gn_modulator_permission_available.py | 69 +++++++++++++++++++ backend/gn_modulator/module/base.py | 5 +- backend/gn_modulator/module/commands.py | 5 +- backend/gn_modulator/module/config/utils.py | 7 +- backend/gn_modulator/schema/__init__.py | 3 +- .../gn_modulator/schema/repositories/base.py | 13 +++- ...ion.schema.yml_ => perm.action.schema.yml} | 6 +- ...ect.schema.yml_ => perm.object.schema.yml} | 2 +- .../permission/perm.perm_dispo.schema.yml | 15 ++++ ...schema.yml_ => perm.permission.schema.yml} | 2 +- .../features/m_sipaf.permissions.data.yml | 14 ++++ contrib/m_sipaf/config/m_sipaf.module.yml | 1 + frontend/app/components/page.component.html | 6 +- frontend/app/components/page.component.ts | 6 +- 15 files changed, 139 insertions(+), 17 deletions(-) create mode 100644 backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py rename config/definitions/utils/permission/{perm.action.schema.yml_ => perm.action.schema.yml} (63%) rename config/definitions/utils/permission/{perm.object.schema.yml_ => perm.object.schema.yml} (81%) create mode 100644 config/definitions/utils/permission/perm.perm_dispo.schema.yml rename config/definitions/utils/permission/{perm.permission.schema.yml_ => perm.permission.schema.yml} (78%) create mode 100644 contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml diff --git a/backend/gn_modulator/__init__.py b/backend/gn_modulator/__init__.py index 6038274f..59cbaab1 100644 --- a/backend/gn_modulator/__init__.py +++ b/backend/gn_modulator/__init__.py @@ -19,7 +19,7 @@ def init_gn_modulator(): config_dir().mkdir(parents=True, exist_ok=True) symlink(config_modulator_dir, config_dir() / "modulator") - verbose = False + verbose = True # - definitions start_time = time.time() DefinitionMethods.init_definitions() diff --git a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py new file mode 100644 index 00000000..bd47124d --- /dev/null +++ b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py @@ -0,0 +1,69 @@ +"""gn_modulator permissions available + +Revision ID: b78eaab0a6e3 +Revises: 3920371728d8 +Create Date: 2023-06-20 15:19:21.097194 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "b78eaab0a6e3" +down_revision = "3920371728d8" +depends_on = None + + +def upgrade(): + pass + + op.execute( + """ +INSERT INTO + gn_permissions.t_permissions_available ( + id_module, + id_object, + id_action, + label, + scope_filter + ) + SELECT + m.id_module, + o.id_object, + a.id_action, + v.label, + v.scope_filter + FROM + ( + VALUES + ('MODULATOR', 'ALL', 'C', False, 'Droit sur le module MODULATOR en création'), + ('MODULATOR', 'ALL', 'R', False, 'Droit sur le module MODULATOR en lecture'), + ('MODULATOR', 'ALL', 'U', False, 'Droit sur le module MODULATOR en édition'), + ('MODULATOR', 'ALL', 'D', False, 'Droit sur le module MODULATOR en suppression') + ) AS v (module_code, object_code, action_code, scope_filter, label) + JOIN + gn_commons.t_modules m ON m.module_code = v.module_code + JOIN + gn_permissions.t_objects o ON o.code_object = v.object_code + JOIN + gn_permissions.bib_actions a ON a.code_action = v.action_code + """ + ) + + +def downgrade(): + # suppression des droits disponibles pour le module MODULATOR + + op.execute( + """ + DELETE FROM + gn_permissions.t_permissions_available pa + USING + gn_commons.t_modules m + WHERE + pa.id_module = m.id_module + AND + module_code = 'MODULATOR' + """ + ) diff --git a/backend/gn_modulator/module/base.py b/backend/gn_modulator/module/base.py index 17b0db7c..f860809e 100644 --- a/backend/gn_modulator/module/base.py +++ b/backend/gn_modulator/module/base.py @@ -101,6 +101,9 @@ def register_db_module(cls, module_code): @classmethod def delete_db_module(cls, module_code): schema_module = SchemaMethods("commons.module") + id_module = schema_module.get_row(module_code, "module_code").one().id_module + SchemaMethods("perm.perm_dispo").delete_row(id_module, "id_module", multiple=True) + schema_module.delete_row(module_code, field_name="module_code", params={}) @classmethod @@ -136,8 +139,8 @@ def process_module_features(cls, module_code): print("- Ajout de données depuis les features") + infos = {} for data_code in data_codes: - infos = {} infos[data_code] = SchemaMethods.process_features(data_code) SchemaMethods.log(SchemaMethods.txt_data_infos(infos)) diff --git a/backend/gn_modulator/module/commands.py b/backend/gn_modulator/module/commands.py index 35a87b7a..499b97ea 100644 --- a/backend/gn_modulator/module/commands.py +++ b/backend/gn_modulator/module/commands.py @@ -84,10 +84,7 @@ def remove_module(cls, module_code, force=False): # suppression du module en base print("- suppression du module {} en base".format(module_code)) - try: - cls.delete_db_module(module_code) - except Exception: - print("Le module n'est pas présent en base") + cls.delete_db_module(module_code) # suppression de la config diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index f4d9b271..b43863dd 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -288,6 +288,8 @@ def process_fields(cls, module_code): @classmethod def process_base_fields(cls, module_code): module_config = cls.module_config(module_code) + if not module_config["registred"]: + return for object_code in module_config["objects"]: object_config = cls.object_config(module_code, object_code) if "R" in object_config["cruved"]: @@ -296,7 +298,8 @@ def process_base_fields(cls, module_code): @classmethod def add_basic_fields(cls, module_code, object_code): sm = SchemaMethods(cls.schema_code(module_code, object_code)) - + if sm.definition is None: + return authorized_read_fields = ( get_global_cache( [ @@ -451,6 +454,8 @@ def add_key(cls, context, key): schema_code = object_config["schema_code"] sm = SchemaMethods(schema_code) + if sm.definition is None: + return if not sm.has_property(key): # raise error ? diff --git a/backend/gn_modulator/schema/__init__.py b/backend/gn_modulator/schema/__init__.py index a28abe24..96b2f2e1 100644 --- a/backend/gn_modulator/schema/__init__.py +++ b/backend/gn_modulator/schema/__init__.py @@ -71,8 +71,9 @@ def init(self): """ Initialise le schema et le place dans le cache """ - definition = self.definition + if definition is None: + return None schema_code = definition["code"] if not definition: diff --git a/backend/gn_modulator/schema/repositories/base.py b/backend/gn_modulator/schema/repositories/base.py index db99a387..dcbbeac2 100644 --- a/backend/gn_modulator/schema/repositories/base.py +++ b/backend/gn_modulator/schema/repositories/base.py @@ -198,7 +198,15 @@ def update_row( return m, True - def delete_row(self, value, field_name=None, module_code=MODULE_CODE, params={}, commit=True): + def delete_row( + self, + value, + field_name=None, + module_code=MODULE_CODE, + params={}, + commit=True, + multiple=False, + ): """ delete row (Model. == value) """ @@ -211,7 +219,8 @@ def delete_row(self, value, field_name=None, module_code=MODULE_CODE, params={}, query_type="delete", ) # pour être sûr qu'il n'y a qu'une seule ligne de supprimée - m.one() + if not multiple: + m.one() # https://stackoverflow.com/questions/49794899/flask-sqlalchemy-delete-query-failing-with-could-not-evaluate-current-criteria?noredirect=1&lq=1 m.delete(synchronize_session=False) db.session.flush() diff --git a/config/definitions/utils/permission/perm.action.schema.yml_ b/config/definitions/utils/permission/perm.action.schema.yml similarity index 63% rename from config/definitions/utils/permission/perm.action.schema.yml_ rename to config/definitions/utils/permission/perm.action.schema.yml index bcd03009..b5b18564 100644 --- a/config/definitions/utils/permission/perm.action.schema.yml_ +++ b/config/definitions/utils/permission/perm.action.schema.yml @@ -5,9 +5,9 @@ description: Définition du schema des actions meta: autoschema: true - model: geonature.core.gn_permissions.models.TActions + model: geonature.core.gn_permissions.models.PermAction label: action - label_field_name: label_action - genre: M + label_field_name: code_action + genre: F unique: - code_action diff --git a/config/definitions/utils/permission/perm.object.schema.yml_ b/config/definitions/utils/permission/perm.object.schema.yml similarity index 81% rename from config/definitions/utils/permission/perm.object.schema.yml_ rename to config/definitions/utils/permission/perm.object.schema.yml index d28474dd..f4def9c6 100644 --- a/config/definitions/utils/permission/perm.object.schema.yml_ +++ b/config/definitions/utils/permission/perm.object.schema.yml @@ -6,7 +6,7 @@ description: Définition du schema des object de permission meta: schema_code: perm.object autoschema: true - model: geonature.core.gn_permissions.models.TObjects + model: geonature.core.gn_permissions.models.PermObject label: object label_field_name: label_object genre: M diff --git a/config/definitions/utils/permission/perm.perm_dispo.schema.yml b/config/definitions/utils/permission/perm.perm_dispo.schema.yml new file mode 100644 index 00000000..51d35de4 --- /dev/null +++ b/config/definitions/utils/permission/perm.perm_dispo.schema.yml @@ -0,0 +1,15 @@ +type: schema +code: perm.perm_dispo +title: Schema perm.perm_dispo +description: Définition du schema des permissions disponibles + +meta: + autoschema: true + model: geonature.core.gn_permissions.models.PermissionAvailable + label: permission + label_field_name: id_permssion + genre: F + unique: + - id_module + - id_action + - id_object diff --git a/config/definitions/utils/permission/perm.permission.schema.yml_ b/config/definitions/utils/permission/perm.permission.schema.yml similarity index 78% rename from config/definitions/utils/permission/perm.permission.schema.yml_ rename to config/definitions/utils/permission/perm.permission.schema.yml index 9b71d7cf..23ab2dd5 100644 --- a/config/definitions/utils/permission/perm.permission.schema.yml_ +++ b/config/definitions/utils/permission/perm.permission.schema.yml @@ -5,7 +5,7 @@ description: Définition du schema des permissions meta: autoschema: true - model: geonature.core.gn_permissions.models.CorRoleActionFilterModuleObject + model: geonature.core.gn_permissions.models.Permission label: permission label_field_name: id_permssion genre: F diff --git a/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml b/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml new file mode 100644 index 00000000..6aac9816 --- /dev/null +++ b/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml @@ -0,0 +1,14 @@ +type: data +code: m_sipaf.permissions +title: Data utils m_sipaf +description: feature pour sipaf (nomenclature, groupe de module) +items: + - schema_code: perm.perm_dispo + defaults: + id_module: m_sipaf + keys: [id_object, id_action, scope_filter, label] + items: + - [ALL, C, true, "Accès au module m_sipaf en création"] + - [ALL, R, true, "Accès au module m_sipaf en lecture"] + - [ALL, U, true, "Accès au module m_sipaf en édition"] + - [ALL, D, true, "Accès au module m_sipaf en suppression"] diff --git a/contrib/m_sipaf/config/m_sipaf.module.yml b/contrib/m_sipaf/config/m_sipaf.module.yml index 564765ba..267e7c8a 100644 --- a/contrib/m_sipaf/config/m_sipaf.module.yml +++ b/contrib/m_sipaf/config/m_sipaf.module.yml @@ -13,6 +13,7 @@ module: features: - m_sipaf.utils + - m_sipaf.permissions objects: site: diff --git a/frontend/app/components/page.component.html b/frontend/app/components/page.component.html index 71eff584..25676a38 100644 --- a/frontend/app/components/page.component.html +++ b/frontend/app/components/page.component.html @@ -5,10 +5,14 @@
    +
    + Vous n'avez pas les accès requis pour le module {{ moduleCode}} +
    + diff --git a/frontend/app/components/page.component.ts b/frontend/app/components/page.component.ts index a7867246..b24e7a46 100644 --- a/frontend/app/components/page.component.ts +++ b/frontend/app/components/page.component.ts @@ -39,7 +39,7 @@ export class PageComponent implements OnInit { data; // data pour le layout pageInitialized: boolean; // test si la page est initialisée (pour affichage) - + pageAuthorized: boolean; // test si on a au moins les accès en lecture sur le module moduleCode; pageCode; params; @@ -125,7 +125,11 @@ export class PageComponent implements OnInit { }) ) .subscribe(() => { + const cruved = this._gnModuleService.modules.find( + (m) => m.module_code == this.moduleCode + ).cruved; this.pageInitialized = true; + this.pageAuthorized = !!cruved.R; }); } From 56a64279351cda9add02e7a8c40fe8b36c49f29c Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 21 Jun 2023 15:45:54 +0200 Subject: [PATCH 118/142] up deps GN --- dependencies/GeoNature | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies/GeoNature b/dependencies/GeoNature index aeb2cd19..367b67ea 160000 --- a/dependencies/GeoNature +++ b/dependencies/GeoNature @@ -1 +1 @@ -Subproject commit aeb2cd1939cb4d498ea575a02ac34a2d08494445 +Subproject commit 367b67ea3ca4d929ade15441138add838f6b29a8 From 635ed80db6a86fe0230970571edbbe1daa5acadb Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 21 Jun 2023 16:56:36 +0200 Subject: [PATCH 119/142] fix import display --- ...78eaab0a6e3_gn_modulator_permission_available.py | 9 +++++---- .../config/features/m_sipaf.permissions.data.yml | 13 +++++++------ frontend/app/services/object.service.ts | 8 ++++++-- 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py index bd47124d..bbf4c4ea 100644 --- a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py +++ b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py @@ -37,10 +37,11 @@ def upgrade(): FROM ( VALUES - ('MODULATOR', 'ALL', 'C', False, 'Droit sur le module MODULATOR en création'), - ('MODULATOR', 'ALL', 'R', False, 'Droit sur le module MODULATOR en lecture'), - ('MODULATOR', 'ALL', 'U', False, 'Droit sur le module MODULATOR en édition'), - ('MODULATOR', 'ALL', 'D', False, 'Droit sur le module MODULATOR en suppression') + ('MODULATOR', 'ALL', 'C', False, 'Accès création'), + ('MODULATOR', 'ALL', 'R', False, 'Accès lecture'), + ('MODULATOR', 'ALL', 'U', False, 'Accès édition'), + ('MODULATOR', 'ALL', 'D', False, 'Accès suppression') + ('MODULATOR', 'ALL', 'E', False, 'Accès export') ) AS v (module_code, object_code, action_code, scope_filter, label) JOIN gn_commons.t_modules m ON m.module_code = v.module_code diff --git a/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml b/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml index 6aac9816..22faf7ad 100644 --- a/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml +++ b/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml @@ -1,14 +1,15 @@ type: data code: m_sipaf.permissions -title: Data utils m_sipaf -description: feature pour sipaf (nomenclature, groupe de module) +title: Data permissions m_sipaf +description: Permissions disponibles pour le module m_sipaf items: - schema_code: perm.perm_dispo defaults: id_module: m_sipaf keys: [id_object, id_action, scope_filter, label] items: - - [ALL, C, true, "Accès au module m_sipaf en création"] - - [ALL, R, true, "Accès au module m_sipaf en lecture"] - - [ALL, U, true, "Accès au module m_sipaf en édition"] - - [ALL, D, true, "Accès au module m_sipaf en suppression"] + - [ALL, C, true, "Créer des passage à faune"] + - [ALL, R, true, "Accéder aux passage à faune"] + - [ALL, U, true, "Éditer les passages à faune"] + - [ALL, D, true, "Supprimer les passages à faune"] + - [ALL, E, true, "Exporter les passages à faune"] diff --git a/frontend/app/services/object.service.ts b/frontend/app/services/object.service.ts index fa1481ae..73a6eb3a 100644 --- a/frontend/app/services/object.service.ts +++ b/frontend/app/services/object.service.ts @@ -292,6 +292,7 @@ export class ModulesObjectService { const moduleConfig = this._mConfig.moduleConfig(context.module_code); const testObjectCruved = (objectConfig.cruved || '').includes(action); + if ('CRU'.includes(action)) { const moduleConfig = this._mConfig.moduleConfig(context.module_code); @@ -319,7 +320,10 @@ export class ModulesObjectService { // 2) l'utilisateur à t'il le droit // - les droit de l'utilisateur pour ce module et pour un action (CRUVED) - const moduleCruvedAction = moduleConfig.cruved[action]; + + // patch pour import on teste les droits en 'C' (creation) + const cruvedAction = action == 'I' ? 'C' : action; + const moduleCruvedAction = moduleConfig.cruved[cruvedAction]; // - on compare ce droit avec l'appartenance de la données // la possibilité d'action doit être supérieure à l'appartenance @@ -334,7 +338,7 @@ export class ModulesObjectService { let testUserCruved; // si les droit du module sont nul pour cet action => FALSE - if (moduleCruvedAction == 0) { + if (!moduleCruvedAction) { testUserCruved = false; // si l'action est CREATE, EXPORT, IMPORT (ne concerne pas une ligne précise) => TRUE } else if ('CEI'.includes(action)) { From b5c85c1642ce8ea29162cd4ad17d79c51741af5f Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Wed, 21 Jun 2023 17:14:30 +0200 Subject: [PATCH 120/142] fix migration permission modulator --- .../versions/b78eaab0a6e3_gn_modulator_permission_available.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py index bbf4c4ea..d96fbfa0 100644 --- a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py +++ b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py @@ -40,7 +40,7 @@ def upgrade(): ('MODULATOR', 'ALL', 'C', False, 'Accès création'), ('MODULATOR', 'ALL', 'R', False, 'Accès lecture'), ('MODULATOR', 'ALL', 'U', False, 'Accès édition'), - ('MODULATOR', 'ALL', 'D', False, 'Accès suppression') + ('MODULATOR', 'ALL', 'D', False, 'Accès suppression'), ('MODULATOR', 'ALL', 'E', False, 'Accès export') ) AS v (module_code, object_code, action_code, scope_filter, label) JOIN From 192eb8b1881c1c23162cec8d976dc0a8000401b0 Mon Sep 17 00:00:00 2001 From: Camille Monchicourt Date: Thu, 22 Jun 2023 14:49:51 +0200 Subject: [PATCH 121/142] Review SIPAF available permissions --- .../m_sipaf/config/features/m_sipaf.permissions.data.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml b/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml index 22faf7ad..dbd7f469 100644 --- a/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml +++ b/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml @@ -8,8 +8,8 @@ items: id_module: m_sipaf keys: [id_object, id_action, scope_filter, label] items: - - [ALL, C, true, "Créer des passage à faune"] - - [ALL, R, true, "Accéder aux passage à faune"] - - [ALL, U, true, "Éditer les passages à faune"] - - [ALL, D, true, "Supprimer les passages à faune"] + - [ALL, C, true, "Créer des passages à faune"] + - [ALL, R, true, "Voir les passages à faune"] + - [ALL, U, true, "Modifier les passages à faune"] + - [ALL, D, true, "Supprimer des passages à faune"] - [ALL, E, true, "Exporter les passages à faune"] From fce06805ffda590e69eb7b1686a71f0ce18e0c88 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 22 Jun 2023 15:50:14 +0200 Subject: [PATCH 122/142] permission available modulator R only --- .../b78eaab0a6e3_gn_modulator_permission_available.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py index d96fbfa0..ee10c860 100644 --- a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py +++ b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py @@ -37,11 +37,7 @@ def upgrade(): FROM ( VALUES - ('MODULATOR', 'ALL', 'C', False, 'Accès création'), - ('MODULATOR', 'ALL', 'R', False, 'Accès lecture'), - ('MODULATOR', 'ALL', 'U', False, 'Accès édition'), - ('MODULATOR', 'ALL', 'D', False, 'Accès suppression'), - ('MODULATOR', 'ALL', 'E', False, 'Accès export') + ('MODULATOR', 'ALL', 'R', False, 'Accéder aux modules') ) AS v (module_code, object_code, action_code, scope_filter, label) JOIN gn_commons.t_modules m ON m.module_code = v.module_code From 6471330af2c83e7336d36467dada77965d348eb5 Mon Sep 17 00:00:00 2001 From: Camille Monchicourt Date: Thu, 22 Jun 2023 15:53:17 +0200 Subject: [PATCH 123/142] Readme - Review doc install Harmonisation de la doc avec les autres modules --- README.md | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 30812848..80bfa7a0 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ ## Présentation Ce module GeoNature est un générateur de modules, qui permet construire dynamiquement des sous-modules disposant de leur propre modèle de données, -à partir de fichiers de configuration JSON. +à partir de fichiers de configuration YAML. Chaque sous-module dispose d'une page d'accueil avec une carte, liste et filtres des objets du sous-module : @@ -15,18 +15,30 @@ Et une fiche détail et de saisie de chaque objet : ## Installation -Compatible avec la version 2.11.2 (et plus) de GeoNature. +Compatible avec la version 2.13.0 (et plus) de GeoNature. -Se placer dans le répertoire backend de GeoNature et activer le virtualenv +- Téléchargez le module dans ``/home//``, en remplacant ``X.Y.Z`` par la version souhaitée ```bash -source venv/bin/activate +cd +wget https://github.com/PnX-SI/gn_modulator/archive/X.Y.Z.zip +unzip X.Y.Z.zip +rm X.Y.Z.zip ``` -Lancer la commande d'installation +- Renommez le répertoire du module ```bash -geonature install_gn_module MODULATOR +mv ~/gn_modulator-X.Y.Z ~/gn_modulator +``` + +- Lancez l'installation du module + +```bash +source ~/geonature/backend/venv/bin/activate +geonature install-gn-module ~/gn_modulator MODULATOR +sudo systemctl restart geonature +deactivate ``` - [Liste des commandes du module](./doc/commandes.md) From c2b9730a692c2a5c9f0b3dd1533967e5f2586f2b Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Thu, 22 Jun 2023 16:14:04 +0200 Subject: [PATCH 124/142] up geoman --- frontend/package-lock.json | 528 +------------------------------------ frontend/package.json | 2 +- 2 files changed, 8 insertions(+), 522 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index f0bdbbbc..97b5e332 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,521 +1,13 @@ { "name": "gn_modulator", "version": "0.0.1", - "lockfileVersion": 2, + "lockfileVersion": 1, "requires": true, - "packages": { - "": { - "name": "gn_modulator", - "version": "0.0.1", - "license": "ISC", - "dependencies": { - "@geoman-io/leaflet-geoman-free": "^2.14.1", - "js-yaml": "^4.1.0", - "tabulator": "^0.2.40", - "tabulator-tables": "^4.9.3" - } - }, - "node_modules/@geoman-io/leaflet-geoman-free": { - "version": "2.14.1", - "resolved": "https://registry.npmjs.org/@geoman-io/leaflet-geoman-free/-/leaflet-geoman-free-2.14.1.tgz", - "integrity": "sha512-Uvynea84IVnT7CNkxyF68gU+qerhDi1ybRJRWbwvMG09isIZzCiVbVkdQzXqlLRMRwY/FIYFwlbCCuv94ML5Gw==", - "dependencies": { - "@turf/boolean-contains": "^6.5.0", - "@turf/kinks": "^6.5.0", - "@turf/line-intersect": "^6.5.0", - "@turf/line-split": "^6.5.0", - "lodash": "4.17.21", - "polygon-clipping": "0.15.3" - }, - "peerDependencies": { - "leaflet": "^1.2.0" - } - }, - "node_modules/@turf/bbox": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/bbox/-/bbox-6.5.0.tgz", - "integrity": "sha512-RBbLaao5hXTYyyg577iuMtDB8ehxMlUqHEJiMs8jT1GHkFhr6sYre3lmLsPeYEi/ZKj5TP5tt7fkzNdJ4GIVyw==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/meta": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/bearing": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/bearing/-/bearing-6.5.0.tgz", - "integrity": "sha512-dxINYhIEMzgDOztyMZc20I7ssYVNEpSv04VbMo5YPQsqa80KO3TFvbuCahMsCAW5z8Tncc8dwBlEFrmRjJG33A==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/boolean-contains": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/boolean-contains/-/boolean-contains-6.5.0.tgz", - "integrity": "sha512-4m8cJpbw+YQcKVGi8y0cHhBUnYT+QRfx6wzM4GI1IdtYH3p4oh/DOBJKrepQyiDzFDaNIjxuWXBh0ai1zVwOQQ==", - "dependencies": { - "@turf/bbox": "^6.5.0", - "@turf/boolean-point-in-polygon": "^6.5.0", - "@turf/boolean-point-on-line": "^6.5.0", - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/boolean-point-in-polygon": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/boolean-point-in-polygon/-/boolean-point-in-polygon-6.5.0.tgz", - "integrity": "sha512-DtSuVFB26SI+hj0SjrvXowGTUCHlgevPAIsukssW6BG5MlNSBQAo70wpICBNJL6RjukXg8d2eXaAWuD/CqL00A==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/boolean-point-on-line": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/boolean-point-on-line/-/boolean-point-on-line-6.5.0.tgz", - "integrity": "sha512-A1BbuQ0LceLHvq7F/P7w3QvfpmZqbmViIUPHdNLvZimFNLo4e6IQunmzbe+8aSStH9QRZm3VOflyvNeXvvpZEQ==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/destination": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/destination/-/destination-6.5.0.tgz", - "integrity": "sha512-4cnWQlNC8d1tItOz9B4pmJdWpXqS0vEvv65bI/Pj/genJnsL7evI0/Xw42RvEGROS481MPiU80xzvwxEvhQiMQ==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/distance": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/distance/-/distance-6.5.0.tgz", - "integrity": "sha512-xzykSLfoURec5qvQJcfifw/1mJa+5UwByZZ5TZ8iaqjGYN0vomhV9aiSLeYdUGtYRESZ+DYC/OzY+4RclZYgMg==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/helpers": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/helpers/-/helpers-6.5.0.tgz", - "integrity": "sha512-VbI1dV5bLFzohYYdgqwikdMVpe7pJ9X3E+dlr425wa2/sMJqYDhTO++ec38/pcPvPE6oD9WEEeU3Xu3gza+VPw==", - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/invariant": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/invariant/-/invariant-6.5.0.tgz", - "integrity": "sha512-Wv8PRNCtPD31UVbdJE/KVAWKe7l6US+lJItRR/HOEW3eh+U/JwRCSUl/KZ7bmjM/C+zLNoreM2TU6OoLACs4eg==", - "dependencies": { - "@turf/helpers": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/kinks": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/kinks/-/kinks-6.5.0.tgz", - "integrity": "sha512-ViCngdPt1eEL7hYUHR2eHR662GvCgTc35ZJFaNR6kRtr6D8plLaDju0FILeFFWSc+o8e3fwxZEJKmFj9IzPiIQ==", - "dependencies": { - "@turf/helpers": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/line-intersect": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/line-intersect/-/line-intersect-6.5.0.tgz", - "integrity": "sha512-CS6R1tZvVQD390G9Ea4pmpM6mJGPWoL82jD46y0q1KSor9s6HupMIo1kY4Ny+AEYQl9jd21V3Scz20eldpbTVA==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0", - "@turf/line-segment": "^6.5.0", - "@turf/meta": "^6.5.0", - "geojson-rbush": "3.x" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/line-segment": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/line-segment/-/line-segment-6.5.0.tgz", - "integrity": "sha512-jI625Ho4jSuJESNq66Mmi290ZJ5pPZiQZruPVpmHkUw257Pew0alMmb6YrqYNnLUuiVVONxAAKXUVeeUGtycfw==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0", - "@turf/meta": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/line-split": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/line-split/-/line-split-6.5.0.tgz", - "integrity": "sha512-/rwUMVr9OI2ccJjw7/6eTN53URtGThNSD5I0GgxyFXMtxWiloRJ9MTff8jBbtPWrRka/Sh2GkwucVRAEakx9Sw==", - "dependencies": { - "@turf/bbox": "^6.5.0", - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0", - "@turf/line-intersect": "^6.5.0", - "@turf/line-segment": "^6.5.0", - "@turf/meta": "^6.5.0", - "@turf/nearest-point-on-line": "^6.5.0", - "@turf/square": "^6.5.0", - "@turf/truncate": "^6.5.0", - "geojson-rbush": "3.x" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/meta": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/meta/-/meta-6.5.0.tgz", - "integrity": "sha512-RrArvtsV0vdsCBegoBtOalgdSOfkBrTJ07VkpiCnq/491W67hnMWmDu7e6Ztw0C3WldRYTXkg3SumfdzZxLBHA==", - "dependencies": { - "@turf/helpers": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/nearest-point-on-line": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/nearest-point-on-line/-/nearest-point-on-line-6.5.0.tgz", - "integrity": "sha512-WthrvddddvmymnC+Vf7BrkHGbDOUu6Z3/6bFYUGv1kxw8tiZ6n83/VG6kHz4poHOfS0RaNflzXSkmCi64fLBlg==", - "dependencies": { - "@turf/bearing": "^6.5.0", - "@turf/destination": "^6.5.0", - "@turf/distance": "^6.5.0", - "@turf/helpers": "^6.5.0", - "@turf/invariant": "^6.5.0", - "@turf/line-intersect": "^6.5.0", - "@turf/meta": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/square": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/square/-/square-6.5.0.tgz", - "integrity": "sha512-BM2UyWDmiuHCadVhHXKIx5CQQbNCpOxB6S/aCNOCLbhCeypKX5Q0Aosc5YcmCJgkwO5BERCC6Ee7NMbNB2vHmQ==", - "dependencies": { - "@turf/distance": "^6.5.0", - "@turf/helpers": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@turf/truncate": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@turf/truncate/-/truncate-6.5.0.tgz", - "integrity": "sha512-pFxg71pLk+eJj134Z9yUoRhIi8vqnnKvCYwdT4x/DQl/19RVdq1tV3yqOT3gcTQNfniteylL5qV1uTBDV5sgrg==", - "dependencies": { - "@turf/helpers": "^6.5.0", - "@turf/meta": "^6.5.0" - }, - "funding": { - "url": "https://opencollective.com/turf" - } - }, - "node_modules/@types/geojson": { - "version": "7946.0.8", - "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.8.tgz", - "integrity": "sha512-1rkryxURpr6aWP7R786/UQOkJ3PcpQiWkAXBmdWc7ryFWqN6a4xfK7BtjXvFBKO9LjQ+MWQSWxYeZX1OApnArA==" - }, - "node_modules/adler-32": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.1.0.tgz", - "integrity": "sha512-lRKKX9RZQBPy6CrdUqiDUsxVcZujjbkkUg++0zLLyi0EwRui+aFyEDJBXRXCqwp+pmmybdZgBNHxOAOQcgdJYg==", - "dependencies": { - "exit-on-epipe": "~1.0.1", - "printj": "~1.1.0" - }, - "bin": { - "adler32": "bin/adler32.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" - }, - "node_modules/best-globals": { - "version": "0.10.34", - "resolved": "https://registry.npmjs.org/best-globals/-/best-globals-0.10.34.tgz", - "integrity": "sha512-B3Y9VQOYWb/qb0AAzVWGnryvHIOlBRbD09a9wofIrZkJLIoqBVQtDX8E9Eq/Ka0mkjIxrx3ZOnIsmbZ4VhT15g==", - "engines": { - "node": ">= 12" - } - }, - "node_modules/cfb": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/cfb/-/cfb-1.0.8.tgz", - "integrity": "sha512-oA7VomcgZRWTo8V20UYLlXu4ZOCFEAfwwrcxE8PcVzXW12WOhsi38PVnymb6Xoj8y7ghoZQOOOVRBMdLJ4jCjg==", - "dependencies": { - "commander": "^2.14.1", - "printj": "~1.1.2" - }, - "bin": { - "cfb": "bin/cfb.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/cfb/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "node_modules/codenautas-xlsx": { - "version": "0.11.12", - "resolved": "https://registry.npmjs.org/codenautas-xlsx/-/codenautas-xlsx-0.11.12.tgz", - "integrity": "sha512-9PcE3yxXknFnx86tS8Az5HxnNgbqV8I9RUYT1O+eqqCs7znNB98ERUYYQn8TzaFvNQ4TVoCNMy+2aFt1CJYSLQ==", - "dependencies": { - "adler-32": "~1.1.0", - "cfb": "~1.0.0", - "codepage": "~1.11.0", - "commander": "~2.11.0", - "crc-32": "~1.1.1", - "exit-on-epipe": "~1.0.1", - "ssf": "~0.10.1" - }, - "bin": { - "xlsx": "bin/xlsx.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/codepage": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/codepage/-/codepage-1.11.1.tgz", - "integrity": "sha512-8O+HHxMgdoSy3w/tyiStZGOnE2uOMep8vAoBtoQXbeOT7q3Ir+jwseM0bUVmeYvhfB2UX04Cb7D72ZzJbxSi5w==", - "dependencies": { - "commander": "~2.11.0", - "exit-on-epipe": "~1.0.1", - "voc": "~1.0.0" - }, - "bin": { - "codepage": "bin/codepage.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/commander": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", - "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==" - }, - "node_modules/crc-32": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.1.1.tgz", - "integrity": "sha512-DWXuRN3Wtu43YRfYZ9r17720WZqM0caEjIfT6Dk1J/3sAxIyyXbUWqIACbz3cjV8l7guJRW+9pZlYMluKJ69wg==", - "dependencies": { - "exit-on-epipe": "~1.0.1", - "printj": "~1.1.0" - }, - "bin": { - "crc32": "bin/crc32.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/exit-on-epipe": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz", - "integrity": "sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==", - "engines": { - "node": ">=0.8" - } - }, - "node_modules/file-saver": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/file-saver/-/file-saver-2.0.5.tgz", - "integrity": "sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==" - }, - "node_modules/frac": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/frac/-/frac-1.1.2.tgz", - "integrity": "sha512-w/XBfkibaTl3YDqASwfDUqkna4Z2p9cFSr1aHDt0WoMTECnRfBOv2WArlZILlqgWlmdIlALXGpM2AOhEk5W3IA==", - "engines": { - "node": ">=0.8" - } - }, - "node_modules/geojson-rbush": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/geojson-rbush/-/geojson-rbush-3.2.0.tgz", - "integrity": "sha512-oVltQTXolxvsz1sZnutlSuLDEcQAKYC/uXt9zDzJJ6bu0W+baTI8LZBaTup5afzibEH4N3jlq2p+a152wlBJ7w==", - "dependencies": { - "@turf/bbox": "*", - "@turf/helpers": "6.x", - "@turf/meta": "6.x", - "@types/geojson": "7946.0.8", - "rbush": "^3.0.1" - } - }, - "node_modules/js-to-html": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/js-to-html/-/js-to-html-1.0.11.tgz", - "integrity": "sha512-B3cyNVI2uQLDFafnYTRLyRabWCp2E59VY0j3FFGD11rC9ISBNapiPF9+mh7w+SVN0cJeDDjQR2P1ZxHdEmocbQ==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/leaflet": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.9.3.tgz", - "integrity": "sha512-iB2cR9vAkDOu5l3HAay2obcUHZ7xwUBBjph8+PGtmW/2lYhbLizWtG7nTeYht36WfOslixQF9D/uSIzhZgGMfQ==", - "peer": true - }, - "node_modules/like-ar": { - "version": "0.2.19", - "resolved": "https://registry.npmjs.org/like-ar/-/like-ar-0.2.19.tgz", - "integrity": "sha512-JGW4ymj2AbDaKDGm+D4bn45MHxoHv25yua+aJ4nu1NwbJbQBGpGueLKneETcFygVj1AMFG9yuSWkWGC7IAbWkQ==", - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "node_modules/polygon-clipping": { - "version": "0.15.3", - "resolved": "https://registry.npmjs.org/polygon-clipping/-/polygon-clipping-0.15.3.tgz", - "integrity": "sha512-ho0Xx5DLkgxRx/+n4O74XyJ67DcyN3Tu9bGYKsnTukGAW6ssnuak6Mwcyb1wHy9MZc9xsUWqIoiazkZB5weECg==", - "dependencies": { - "splaytree": "^3.1.0" - } - }, - "node_modules/printj": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/printj/-/printj-1.1.2.tgz", - "integrity": "sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==", - "bin": { - "printj": "bin/printj.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/quickselect": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/quickselect/-/quickselect-2.0.0.tgz", - "integrity": "sha512-RKJ22hX8mHe3Y6wH/N3wCM6BWtjaxIyyUIkpHOvfFnxdI4yD4tBXEBKSbriGujF6jnSVkJrffuo6vxACiSSxIw==" - }, - "node_modules/rbush": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/rbush/-/rbush-3.0.1.tgz", - "integrity": "sha512-XRaVO0YecOpEuIvbhbpTrZgoiI6xBlz6hnlr6EHhd+0x9ase6EmeN+hdwwUaJvLcsFFQ8iWVF1GAK1yB0BWi0w==", - "dependencies": { - "quickselect": "^2.0.0" - } - }, - "node_modules/splaytree": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.1.tgz", - "integrity": "sha512-9FaQ18FF0+sZc/ieEeXHt+Jw2eSpUgUtTLDYB/HXKWvhYVyOc7h1hzkn5MMO3GPib9MmXG1go8+OsBBzs/NMww==" - }, - "node_modules/ssf": { - "version": "0.10.3", - "resolved": "https://registry.npmjs.org/ssf/-/ssf-0.10.3.tgz", - "integrity": "sha512-pRuUdW0WwyB2doSqqjWyzwCD6PkfxpHAHdZp39K3dp/Hq7f+xfMwNAWIi16DyrRg4gg9c/RvLYkJTSawTPTm1w==", - "dependencies": { - "frac": "~1.1.2" - }, - "bin": { - "ssf": "bin/ssf.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/tabulator": { - "version": "0.2.40", - "resolved": "https://registry.npmjs.org/tabulator/-/tabulator-0.2.40.tgz", - "integrity": "sha512-62i/Vgnf55xPRaNd9xnGajMvul0GtZK1khud4Vv6f4EllkKYIvN3CDB5+jquyVMIqi0aBlThjgYZ6zm4BnRvIA==", - "dependencies": { - "best-globals": "~0.10.10", - "codenautas-xlsx": "0.11.12", - "file-saver": "~2.0.0", - "js-to-html": "~1.0.4", - "like-ar": "~0.2.9" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/tabulator-tables": { - "version": "4.9.3", - "resolved": "https://registry.npmjs.org/tabulator-tables/-/tabulator-tables-4.9.3.tgz", - "integrity": "sha512-iwwQqAEGGxlgrBpcmJJvMJrfjGLcCXOB3AOb/DGkXqBy1YKoYA36hIl7qXGp6Jo8dSkzFAlDT6pKLZgyhs9OnQ==" - }, - "node_modules/voc": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/voc/-/voc-1.0.0.tgz", - "integrity": "sha512-mQwxWlK+zosxxDTqiFb9ZQBNgd794scgkhVwca7h9sEhvA52f3VzbOK+TOWeS8eSrFXnfuKrxElSPc5oLAetfw==", - "bin": { - "voc": "voc.njs" - }, - "engines": { - "node": ">=0.8" - } - } - }, "dependencies": { "@geoman-io/leaflet-geoman-free": { - "version": "2.14.1", - "resolved": "https://registry.npmjs.org/@geoman-io/leaflet-geoman-free/-/leaflet-geoman-free-2.14.1.tgz", - "integrity": "sha512-Uvynea84IVnT7CNkxyF68gU+qerhDi1ybRJRWbwvMG09isIZzCiVbVkdQzXqlLRMRwY/FIYFwlbCCuv94ML5Gw==", + "version": "2.14.2", + "resolved": "https://registry.npmjs.org/@geoman-io/leaflet-geoman-free/-/leaflet-geoman-free-2.14.2.tgz", + "integrity": "sha512-6lIyG8RvSVdFjVjiQgBPyNASjymSyqzsiUeBW0pA+q41lB5fAg4SDC6SfJvWdEyDHa81Jb5FWjUkCc9O+u0gbg==", "requires": { "@turf/boolean-contains": "^6.5.0", "@turf/kinks": "^6.5.0", @@ -809,12 +301,6 @@ "argparse": "^2.0.1" } }, - "leaflet": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.9.3.tgz", - "integrity": "sha512-iB2cR9vAkDOu5l3HAay2obcUHZ7xwUBBjph8+PGtmW/2lYhbLizWtG7nTeYht36WfOslixQF9D/uSIzhZgGMfQ==", - "peer": true - }, "like-ar": { "version": "0.2.19", "resolved": "https://registry.npmjs.org/like-ar/-/like-ar-0.2.19.tgz", @@ -852,9 +338,9 @@ } }, "splaytree": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.1.tgz", - "integrity": "sha512-9FaQ18FF0+sZc/ieEeXHt+Jw2eSpUgUtTLDYB/HXKWvhYVyOc7h1hzkn5MMO3GPib9MmXG1go8+OsBBzs/NMww==" + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.2.tgz", + "integrity": "sha512-4OM2BJgC5UzrhVnnJA4BkHKGtjXNzzUfpQjCO8I05xYPsfS/VuQDwjCGGMi8rYQilHEV4j8NBqTFbls/PZEE7A==" }, "ssf": { "version": "0.10.3", diff --git a/frontend/package.json b/frontend/package.json index 34520d9d..d60484d8 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -9,7 +9,7 @@ "author": "PNX", "license": "ISC", "dependencies": { - "@geoman-io/leaflet-geoman-free": "^2.14.1", + "@geoman-io/leaflet-geoman-free": "^2.14.2", "js-yaml": "^4.1.0", "tabulator": "^0.2.40", "tabulator-tables": "^4.9.3" From a439181b79acf605540ce517aa40ee803f1432a6 Mon Sep 17 00:00:00 2001 From: Camille Monchicourt Date: Thu, 22 Jun 2023 21:42:10 +0200 Subject: [PATCH 125/142] Update m_sipaf.permissions.data.yml --- contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml b/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml index dbd7f469..5b5b17c4 100644 --- a/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml +++ b/contrib/m_sipaf/config/features/m_sipaf.permissions.data.yml @@ -8,7 +8,7 @@ items: id_module: m_sipaf keys: [id_object, id_action, scope_filter, label] items: - - [ALL, C, true, "Créer des passages à faune"] + - [ALL, C, true, "Créer et importer des passages à faune"] - [ALL, R, true, "Voir les passages à faune"] - [ALL, U, true, "Modifier les passages à faune"] - [ALL, D, true, "Supprimer des passages à faune"] From fdf090bc5ddbba59276dcab1e3a2a48405a6adab Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Fri, 23 Jun 2023 15:33:04 +0200 Subject: [PATCH 126/142] remove process assets --- backend/gn_modulator/module/__init__.py | 1 - backend/gn_modulator/module/commands.py | 3 --- backend/gn_modulator/utils/files.py | 1 + frontend/app/services/config.service.ts | 2 +- 4 files changed, 2 insertions(+), 5 deletions(-) diff --git a/backend/gn_modulator/module/__init__.py b/backend/gn_modulator/module/__init__.py index a41b42dc..42bbbcb8 100644 --- a/backend/gn_modulator/module/__init__.py +++ b/backend/gn_modulator/module/__init__.py @@ -27,7 +27,6 @@ def init_modules(cls): for module_code in cls.module_codes(): cls.init_module_config(module_code) - cls.process_module_assets(module_code) for module_code in cls.module_codes(): cls.process_fields(module_code) diff --git a/backend/gn_modulator/module/commands.py b/backend/gn_modulator/module/commands.py index 499b97ea..d35e7cb5 100644 --- a/backend/gn_modulator/module/commands.py +++ b/backend/gn_modulator/module/commands.py @@ -160,9 +160,6 @@ def install_module(cls, module_code=None, module_path=None, force=False): SchemaMethods.reinit_marshmallow_schemas() cls.process_module_features(module_code) - # assets - cls.process_module_assets(module_code) - # register module_config["registred"] = True diff --git a/backend/gn_modulator/utils/files.py b/backend/gn_modulator/utils/files.py index baec7472..7776f34a 100644 --- a/backend/gn_modulator/utils/files.py +++ b/backend/gn_modulator/utils/files.py @@ -4,5 +4,6 @@ def symlink(path_source, path_dest): """create (or recreate) symlink""" if os.path.islink(path_dest): + print(f'rm {path_dest}') os.remove(path_dest) os.symlink(path_source, path_dest) diff --git a/frontend/app/services/config.service.ts b/frontend/app/services/config.service.ts index 6a9d0034..951d1c27 100644 --- a/frontend/app/services/config.service.ts +++ b/frontend/app/services/config.service.ts @@ -141,7 +141,7 @@ export class ModulesConfigService { moduleImg(moduleCode) { const moduleImg = `${this.backendUrl()}/${ this.AppConfig.MEDIA_URL - }/modulator/assets/${moduleCode.toLowerCase()}/module.jpg`; + }/modulator/config/${moduleCode.toLowerCase()}/assets/module.jpg`; return moduleImg; } From 4e38e8e5a804b5262ff2dc24309a47f1f71bb039 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Fri, 23 Jun 2023 15:40:59 +0200 Subject: [PATCH 127/142] lint --- backend/gn_modulator/module/base.py | 32 ----------------------------- backend/gn_modulator/utils/files.py | 1 - 2 files changed, 33 deletions(-) diff --git a/backend/gn_modulator/module/base.py b/backend/gn_modulator/module/base.py index f860809e..91e6cb20 100644 --- a/backend/gn_modulator/module/base.py +++ b/backend/gn_modulator/module/base.py @@ -145,38 +145,6 @@ def process_module_features(cls, module_code): SchemaMethods.log(SchemaMethods.txt_data_infos(infos)) - @classmethod - def process_module_assets(cls, module_code): - """ - copie le dossier assets d'un module dans le repertoire media de geonature - dans le dossier 'media/modulator/assets/{module_code.lower()}' - """ - - if module_code == MODULE_CODE: - return [] - - module_assets_dir = Path(cls.module_dir_path(module_code)) / "assets" - module_img_path = Path(module_assets_dir / "module.jpg") - - # on teste si le fichier assets/module.jpg est bien présent - if not module_img_path.exists(): - return [ - { - "file_path": module_img_path.resolve(), - "msg": f"Le fichier de l'image du module {module_code} n'existe pas", - } - ] - - # s'il y a bien une image du module, - # - on crée le lien des assets vers le dossize static de geonature - assets_dir().mkdir(exist_ok=True, parents=True) - symlink( - module_assets_dir, - assets_dir() / module_code.lower(), - ) - - return [] - @classmethod def test_module_dependencies(cls, module_code): """ diff --git a/backend/gn_modulator/utils/files.py b/backend/gn_modulator/utils/files.py index 7776f34a..baec7472 100644 --- a/backend/gn_modulator/utils/files.py +++ b/backend/gn_modulator/utils/files.py @@ -4,6 +4,5 @@ def symlink(path_source, path_dest): """create (or recreate) symlink""" if os.path.islink(path_dest): - print(f'rm {path_dest}') os.remove(path_dest) os.symlink(path_source, path_dest) From 04f96e62f39e48d67eb97b5672a35a721e235965 Mon Sep 17 00:00:00 2001 From: Camille Monchicourt Date: Sun, 25 Jun 2023 23:18:51 +0200 Subject: [PATCH 128/142] Changelog 1.1.0 --- doc/changelog.md | 52 ++++++++++++++++++++++++++++-------------------- 1 file changed, 30 insertions(+), 22 deletions(-) diff --git a/doc/changelog.md b/doc/changelog.md index fd828ae2..52d3cc79 100644 --- a/doc/changelog.md +++ b/doc/changelog.md @@ -1,46 +1,54 @@ # Changelog -## 1.0.6 (unreleased) +## 1.1.0 (unreleased) +Nécessite la version 2.13.0 (ou plus) de GeoNature. -- Version de GN requise 2.12 +**🚀 Nouveautés** -**✨ Nouveauté** -- Première version de la fonctionalité d'import (commande + interface frontend) +- Ajout de fonctionalités d'import depuis des fichiers CSV (commande + interface frontend) (#25) +- Compatibilité avec GeoNature 2.13.0 et la refonte des permissions, en définissant les permissions disponibles du module (#232) +- Possibilité pour chaque sous-module de déclarer ses permissions disponibles +- [SIPAF] Ajout d'un onglet et du formulaire des diagnostics fonctionnels (#37) +- [SIPAF] Ajout d'un onglet listant les observations à proximité d'un passage à faune (#42) **✨ Améliorations** - Clarification dans la gestion des routes REST -- Meilleure gestion des `tabs` et des `scrolls` -- sécurisation des api (controle des `fields` en lecture et écriture) +- Meilleure gestion des `tabs` et des `scrolls` (#32) +- Sécurisation des API (controle des `fields` en lecture et écriture) (#29) - champs listés à partir de la config - écriture : si un champs demandé n'est pas dans la config -> erreur 403 - - lecture : ce champs n'est pas pris en compte (utilisation de `only` dans l'initialisation des champs mashmallow) -- requetes sql (fonction `query_list`) - - chargement des relations et des champs pour les requetes - - pour éviter les chargement n+1 (1 requête supplémentaire par relation) + - lecture : ce champs n'est pas pris en compte (utilisation de `only` dans l'initialisation des champs marshmallow) +- Requêtes SQL (fonction `query_list`) + - chargement des relations et des champs pour les requêtes + - pour éviter les chargements n+1 (1 requête supplémentaire par relation) - utilisation de `raise_load` - on charge le minimum de champs possibles -- déplacement des config dans le dossier `media/modulator/config` -- changement de nom `ownership` -> `scope` +- Déplacement des configurations dans le dossier `media/modulator/config` (de GeoNature ???) +- Changement de nom `ownership` -> `scope` - [ ] separation des tests par modules (m_sipaf, m_monitoring) - [ ] amélioration du composant list_form - [ ] ajout diagnostic sipaf -**⚠️ Notes de version** - -- Mettre à jour la base +**🐛 Corrections** -``` -geonature db autoupgrade -``` +- Correction des formulaires dans les onglets (#38) -- Mettre à jour les `features` de `m_sipaf` +**⚠️ Notes de version** -``` -geonature modulator features m_sipaf.pf -``` +Si vous mettez à jour le module : +- Mettre à jour la base de données (pas besoin de le dire car fait automatiquement lors de la MAJ par GN) + ``` + geonature db autoupgrade + ``` +- Mettre à jour les `features` de `m_sipaf` + ``` + geonature modulator features m_sipaf.pf + ``` +- Déplacer configuration dans dossier de GeoNature ?? +- Commande à lancer pour ajouter les permissions disponibles de SIPAF ?? ## 1.0.5 (13-03-2023) From 48f1286f0ec6acf1500d4f12bd638c97d912dd8e Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 26 Jun 2023 11:03:44 +0200 Subject: [PATCH 129/142] sipaf fix filters --- contrib/m_sipaf/config/config.yml | 6 +++--- .../object/layout-object-filters.component.ts | 13 ++++++++++--- frontend/app/services/config.service.ts | 19 +++++++++++-------- 3 files changed, 24 insertions(+), 14 deletions(-) diff --git a/contrib/m_sipaf/config/config.yml b/contrib/m_sipaf/config/config.yml index 7fe409c9..0332a3fa 100644 --- a/contrib/m_sipaf/config/config.yml +++ b/contrib/m_sipaf/config/config.yml @@ -76,11 +76,11 @@ site_filters_fields: site_filters_defs: code_passage_faune: - type: ilike + type: '~' nom_usuel_passage_faune: - type: ilike + type: '~' code_ouvrage_gestionnaire: - type: ilike + type: '~' region: field: areas.id_area key: id_area diff --git a/frontend/app/components/layout/object/layout-object-filters.component.ts b/frontend/app/components/layout/object/layout-object-filters.component.ts index bd115a37..039322dd 100644 --- a/frontend/app/components/layout/object/layout-object-filters.component.ts +++ b/frontend/app/components/layout/object/layout-object-filters.component.ts @@ -59,12 +59,19 @@ export class ModulesLayoutObjectFiltersComponent this.filters = Object.entries(data) .filter(([key, val]: any) => val != null) .map(([key, val]: any) => { - const field = filterDefs[key]?.field || key; - let value = filterDefs[key].key ? utils.getAttr(val, filterDefs[key].key) : val; - let type = filterDefs[key]?.type || Array.isArray(value) ? 'in' : '='; + const filterDef = filterDefs[key] || {}; + const field = filterDef?.field || key; + let value = filterDef.key ? utils.getAttr(val, filterDef.key) : val; + let type = filterDef?.type; + if (Array.isArray(value)) { value = value.join(';'); + type = type || 'in'; } + + type = type || '='; + + value != null && console.log(field, type, value, key); return { field, type, diff --git a/frontend/app/services/config.service.ts b/frontend/app/services/config.service.ts index 951d1c27..2eab2078 100644 --- a/frontend/app/services/config.service.ts +++ b/frontend/app/services/config.service.ts @@ -1,7 +1,6 @@ import { Injectable } from '@angular/core'; -import { ModuleConfig } from '../module.config'; -import { ModuleService } from '@geonature/services/module.service'; +import { ModuleService as GnModuleService } from '@geonature/services/module.service'; import { of, forkJoin } from '@librairies/rxjs'; import { mergeMap } from '@librairies/rxjs/operators'; @@ -17,15 +16,15 @@ export class ModulesConfigService { }; constructor( - private _moduleService: ModuleService, + private _gnModuleService: GnModuleService, private _mRequest: ModulesRequestService, private AppConfig: GNConfigService ) {} /** Configuration */ - MODULE_CODE = ModuleConfig.MODULE_CODE; - MODULE_URL = ModuleConfig.MODULE_URL; + MODULE_CODE = 'MODULATOR'; + MODULE_URL = 'modulator'; init() { return forkJoin({ @@ -58,11 +57,11 @@ export class ModulesConfigService { setModuleCruved(modules) { for (const [moduleCode, moduleConfig] of Object.entries(modules)) { - const moduleGN = this._moduleService.getModule(moduleCode); + const moduleGN = this._gnModuleService.getModule(moduleCode); if (!moduleGN) { continue; } - (moduleConfig as any)['cruved'] = this._moduleService.getModule(moduleCode)['cruved']; + (moduleConfig as any)['cruved'] = this._gnModuleService.getModule(moduleCode)['cruved']; } } @@ -133,9 +132,13 @@ export class ModulesConfigService { return this.AppConfig; } + moduleURL() { + return this.AppConfig[this.MODULE_CODE].MODULE_URL; + } + /** Backend Module Url */ backendModuleUrl() { - return `${this.AppConfig.API_ENDPOINT}${ModuleConfig.MODULE_URL}`; + return `${this.AppConfig.API_ENDPOINT}${this.moduleURL()}`; } moduleImg(moduleCode) { From 7be876d93c340180f984a77d4708e26295491ffa Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 26 Jun 2023 11:22:08 +0200 Subject: [PATCH 130/142] remove bad permissions --- ...0a6e3_gn_modulator_permission_available.py | 32 ++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py index ee10c860..27272c7d 100644 --- a/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py +++ b/backend/gn_modulator/migrations/versions/b78eaab0a6e3_gn_modulator_permission_available.py @@ -7,7 +7,7 @@ """ from alembic import op import sqlalchemy as sa - +from gn_modulator import MODULE_CODE # revision identifiers, used by Alembic. revision = "b78eaab0a6e3" @@ -48,6 +48,36 @@ def upgrade(): """ ) + op.execute( + f""" + WITH bad_permissions AS ( + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_commons.t_modules m + USING (id_module) + WHERE + m.module_code IN ('{MODULE_CODE}') + EXCEPT + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_permissions.t_permissions_available pa ON + (p.id_module = pa.id_module + AND p.id_object = pa.id_object + AND p.id_action = pa.id_action) + ) + DELETE + FROM + gn_permissions.t_permissions p + USING bad_permissions bp + WHERE + bp.id_permission = p.id_permission; + """ + ) + def downgrade(): # suppression des droits disponibles pour le module MODULATOR From a53936966836cce08a956029254b46d1f14800d6 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 26 Jun 2023 11:25:06 +0200 Subject: [PATCH 131/142] fix label for perm & perm_dispo --- config/definitions/utils/permission/perm.perm_dispo.schema.yml | 2 +- config/definitions/utils/permission/perm.permission.schema.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/config/definitions/utils/permission/perm.perm_dispo.schema.yml b/config/definitions/utils/permission/perm.perm_dispo.schema.yml index 51d35de4..8e19b686 100644 --- a/config/definitions/utils/permission/perm.perm_dispo.schema.yml +++ b/config/definitions/utils/permission/perm.perm_dispo.schema.yml @@ -7,7 +7,7 @@ meta: autoschema: true model: geonature.core.gn_permissions.models.PermissionAvailable label: permission - label_field_name: id_permssion + label_field_name: label genre: F unique: - id_module diff --git a/config/definitions/utils/permission/perm.permission.schema.yml b/config/definitions/utils/permission/perm.permission.schema.yml index 23ab2dd5..adc67cd7 100644 --- a/config/definitions/utils/permission/perm.permission.schema.yml +++ b/config/definitions/utils/permission/perm.permission.schema.yml @@ -7,7 +7,7 @@ meta: autoschema: true model: geonature.core.gn_permissions.models.Permission label: permission - label_field_name: id_permssion + label_field_name: id_permission genre: F unique: - id_role From aa87be7eb0984293baacbedad5c8e009735838a4 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 26 Jun 2023 11:30:04 +0200 Subject: [PATCH 132/142] sipaf hide observation for release --- contrib/m_sipaf/config/config.yml | 4 +++- .../m_sipaf/config/layouts/m_sipaf.site_details.layout.yml | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/contrib/m_sipaf/config/config.yml b/contrib/m_sipaf/config/config.yml index 0332a3fa..a2974d1f 100644 --- a/contrib/m_sipaf/config/config.yml +++ b/contrib/m_sipaf/config/config.yml @@ -225,7 +225,9 @@ site_details_fields: - label: __f__o.tab_label(x) object_code: synthese - hidden: "__f__!o.config({...context, object_code: 'site'})?.value_xy" + # M_SIPAF_OBS à reprendre après la release + hidden: true + # hidden: "__f__!o.config({...context, object_code: 'site'})?.value_xy" items: - type: object display: table diff --git a/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml b/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml index e593fc72..63ea27c4 100644 --- a/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml +++ b/contrib/m_sipaf/config/layouts/m_sipaf.site_details.layout.yml @@ -20,6 +20,8 @@ layout: bring_to_front: true tooltip_permanent: true - type: object + # M_SIPAF_OBS à reprendre après la release + hidden: true display: geojson object_code: synthese prefilters: | From d5e9afa7e3f4e2200fc7fd2fa141334dc7254c1c Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 26 Jun 2023 12:08:37 +0200 Subject: [PATCH 133/142] get authorized fields from popup_list --- backend/gn_modulator/module/config/utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/backend/gn_modulator/module/config/utils.py b/backend/gn_modulator/module/config/utils.py index b43863dd..1255c4c9 100644 --- a/backend/gn_modulator/module/config/utils.py +++ b/backend/gn_modulator/module/config/utils.py @@ -436,12 +436,14 @@ def get_layout_keys(cls, layout, params, context): ) return cls.get_layout_keys(layout_from_code, params, context) - if layout.get("items"): - return cls.get_layout_keys(layout.get("items"), params, context) + for field_list_type in ["items", "popup_fields"]: + if layout.get(field_list_type): + return cls.get_layout_keys(layout.get(field_list_type), params, context) @classmethod def add_key(cls, context, key): keys = get_global_cache(["keys"]) + if context.get("data_keys"): key = f"{''.join(context['data_keys'])}.{key}" @@ -459,7 +461,7 @@ def add_key(cls, context, key): if not sm.has_property(key): # raise error ? - print(f"pb ? {sm} has no {key}") + # print(f"pb ? {sm} has no {key}") return keys # ajout en lecture From bc42f3cf112f7055a18d83a979292fc361f009bf Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 26 Jun 2023 14:54:10 +0200 Subject: [PATCH 134/142] changelog upgrade sipaf --- doc/changelog.md | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/doc/changelog.md b/doc/changelog.md index 52d3cc79..6bc0b81d 100644 --- a/doc/changelog.md +++ b/doc/changelog.md @@ -17,7 +17,7 @@ Nécessite la version 2.13.0 (ou plus) de GeoNature. - Clarification dans la gestion des routes REST - Meilleure gestion des `tabs` et des `scrolls` (#32) - Sécurisation des API (controle des `fields` en lecture et écriture) (#29) - - champs listés à partir de la config + - champs listés à partir de la config - écriture : si un champs demandé n'est pas dans la config -> erreur 403 - lecture : ce champs n'est pas pris en compte (utilisation de `only` dans l'initialisation des champs marshmallow) - Requêtes SQL (fonction `query_list`) @@ -25,11 +25,9 @@ Nécessite la version 2.13.0 (ou plus) de GeoNature. - pour éviter les chargements n+1 (1 requête supplémentaire par relation) - utilisation de `raise_load` - on charge le minimum de champs possibles -- Déplacement des configurations dans le dossier `media/modulator/config` (de GeoNature ???) +- Déplacement des configurations dans le dossier `media/modulator/config` de GeoNature - Changement de nom `ownership` -> `scope` -- [ ] separation des tests par modules (m_sipaf, m_monitoring) -- [ ] amélioration du composant list_form -- [ ] ajout diagnostic sipaf +- amélioration du composant list_form **🐛 Corrections** @@ -39,16 +37,23 @@ Nécessite la version 2.13.0 (ou plus) de GeoNature. Si vous mettez à jour le module : +- Déplacer configuration dans dossier de GeoNature ?? - Mettre à jour la base de données (pas besoin de le dire car fait automatiquement lors de la MAJ par GN) ``` geonature db autoupgrade ``` -- Mettre à jour les `features` de `m_sipaf` +- Mettre à jour le module sipaf ``` - geonature modulator features m_sipaf.pf + geonature modulator install m_sipaf ``` -- Déplacer configuration dans dossier de GeoNature ?? -- Commande à lancer pour ajouter les permissions disponibles de SIPAF ?? + + - cette commande va effectuer les actions suivantes: + - créer le dossier `/backend/media/modulator` + - déplacer la config du sous-module dans le dossier `/backend/media/modulator/config` + - mettre à jour les `features` du module et notamment: + - ajouter des nomenclatures pour les permissions + - corriger de nomenclatures pour les passages à faune + - ajouter des permissions disponibles pour le module ## 1.0.5 (13-03-2023) From ef848adf60ce966fa694037523f3d88ced045a7d Mon Sep 17 00:00:00 2001 From: Camille Monchicourt Date: Mon, 26 Jun 2023 22:00:25 +0200 Subject: [PATCH 135/142] Changelog 1.1.0 --- doc/changelog.md | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/doc/changelog.md b/doc/changelog.md index 6bc0b81d..34e6c21f 100644 --- a/doc/changelog.md +++ b/doc/changelog.md @@ -10,13 +10,12 @@ Nécessite la version 2.13.0 (ou plus) de GeoNature. - Compatibilité avec GeoNature 2.13.0 et la refonte des permissions, en définissant les permissions disponibles du module (#232) - Possibilité pour chaque sous-module de déclarer ses permissions disponibles - [SIPAF] Ajout d'un onglet et du formulaire des diagnostics fonctionnels (#37) -- [SIPAF] Ajout d'un onglet listant les observations à proximité d'un passage à faune (#42) **✨ Améliorations** - Clarification dans la gestion des routes REST - Meilleure gestion des `tabs` et des `scrolls` (#32) -- Sécurisation des API (controle des `fields` en lecture et écriture) (#29) +- Sécurisation des API (contrôle des `fields` en lecture et écriture) (#29) - champs listés à partir de la config - écriture : si un champs demandé n'est pas dans la config -> erreur 403 - lecture : ce champs n'est pas pris en compte (utilisation de `only` dans l'initialisation des champs marshmallow) @@ -27,7 +26,7 @@ Nécessite la version 2.13.0 (ou plus) de GeoNature. - on charge le minimum de champs possibles - Déplacement des configurations dans le dossier `media/modulator/config` de GeoNature - Changement de nom `ownership` -> `scope` -- amélioration du composant list_form +- Amélioration du composant list_form **🐛 Corrections** @@ -37,20 +36,14 @@ Nécessite la version 2.13.0 (ou plus) de GeoNature. Si vous mettez à jour le module : -- Déplacer configuration dans dossier de GeoNature ?? -- Mettre à jour la base de données (pas besoin de le dire car fait automatiquement lors de la MAJ par GN) - ``` - geonature db autoupgrade - ``` -- Mettre à jour le module sipaf +- Mettre à jour le module SIPAF ``` geonature modulator install m_sipaf ``` - - - cette commande va effectuer les actions suivantes: + - Cette commande va effectuer les actions suivantes : - créer le dossier `/backend/media/modulator` - déplacer la config du sous-module dans le dossier `/backend/media/modulator/config` - - mettre à jour les `features` du module et notamment: + - mettre à jour les `features` du module et notamment : - ajouter des nomenclatures pour les permissions - corriger de nomenclatures pour les passages à faune - ajouter des permissions disponibles pour le module From 5fd1c32d8ec9327a726776c6d4c72edeb2dd1a1b Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Mon, 26 Jun 2023 23:46:23 +0200 Subject: [PATCH 136/142] fix marker --- frontend/app/services/map/draw.ts | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/frontend/app/services/map/draw.ts b/frontend/app/services/map/draw.ts index a8cbacf5..aa637303 100644 --- a/frontend/app/services/map/draw.ts +++ b/frontend/app/services/map/draw.ts @@ -72,6 +72,21 @@ export default { if (!utils.fastDeepEqual(drawOptions, map.drawOptions)) { map.drawOptions = drawOptions; map.pm.addControls(drawOptions); + if (drawOptions.drawMarker) { + const iconRetinaUrl = './marker-icon-2x.png'; + const iconUrl = './marker-icon.png'; + const shadowUrl = './marker-shadow.png'; + map.pm.enableDraw('Marker', { + iconRetinaUrl: iconRetinaUrl, + iconUrl: iconUrl, + shadowUrl: shadowUrl, + iconSize: [25, 41], + iconAnchor: [12, 41], + popupAnchor: [1, -34], + tooltipAnchor: [16, -28], + shadowSize: [41, 41], + }); + } } // init $editedLayer From af23c92301bf57e3d3aed12b8040ae25c7dcc0b8 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 27 Jun 2023 00:46:35 +0200 Subject: [PATCH 137/142] fix marker again --- frontend/app/services/map/draw.ts | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/frontend/app/services/map/draw.ts b/frontend/app/services/map/draw.ts index aa637303..9c2aefac 100644 --- a/frontend/app/services/map/draw.ts +++ b/frontend/app/services/map/draw.ts @@ -1,10 +1,12 @@ import { BehaviorSubject } from '@librairies/rxjs'; import utils from '../../utils'; +import { CustomMarkerIcon } from '@geonature_common/map/marker/marker.component'; const defautDrawOptions = { position: 'topleft', customMarker: true, - drawMarker: true, + drawMarker: false, + drawMarker2: true, editMode: true, drawCircle: false, drawCircleMarker: false, @@ -69,24 +71,15 @@ export default { options.geometry_type == 'geometry' || options.geometry_type.includes('linestring'); } + if(!map.initDrawMarker2) { + map.initDrawMarker2 = true; + map.pm.Toolbar.copyDrawControl('drawMarker',{name: "drawMarker2"}) + .drawInstance.setOptions({markerStyle: {icon : new CustomMarkerIcon()}}) + } + if (!utils.fastDeepEqual(drawOptions, map.drawOptions)) { map.drawOptions = drawOptions; map.pm.addControls(drawOptions); - if (drawOptions.drawMarker) { - const iconRetinaUrl = './marker-icon-2x.png'; - const iconUrl = './marker-icon.png'; - const shadowUrl = './marker-shadow.png'; - map.pm.enableDraw('Marker', { - iconRetinaUrl: iconRetinaUrl, - iconUrl: iconUrl, - shadowUrl: shadowUrl, - iconSize: [25, 41], - iconAnchor: [12, 41], - popupAnchor: [1, -34], - tooltipAnchor: [16, -28], - shadowSize: [41, 41], - }); - } } // init $editedLayer From 17bb838a0d9935067787060212ffc32946056fe6 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 27 Jun 2023 09:33:35 +0200 Subject: [PATCH 138/142] lint & up GN --- dependencies/GeoNature | 2 +- frontend/app/services/map/draw.ts | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/dependencies/GeoNature b/dependencies/GeoNature index 367b67ea..b327a6a6 160000 --- a/dependencies/GeoNature +++ b/dependencies/GeoNature @@ -1 +1 @@ -Subproject commit 367b67ea3ca4d929ade15441138add838f6b29a8 +Subproject commit b327a6a6c0f4c4ae6a10161b793ca442715a3fc9 diff --git a/frontend/app/services/map/draw.ts b/frontend/app/services/map/draw.ts index 9c2aefac..1b08f66e 100644 --- a/frontend/app/services/map/draw.ts +++ b/frontend/app/services/map/draw.ts @@ -71,10 +71,11 @@ export default { options.geometry_type == 'geometry' || options.geometry_type.includes('linestring'); } - if(!map.initDrawMarker2) { + if (!map.initDrawMarker2) { map.initDrawMarker2 = true; - map.pm.Toolbar.copyDrawControl('drawMarker',{name: "drawMarker2"}) - .drawInstance.setOptions({markerStyle: {icon : new CustomMarkerIcon()}}) + map.pm.Toolbar.copyDrawControl('drawMarker', { name: 'drawMarker2' }).drawInstance.setOptions( + { markerStyle: { icon: new CustomMarkerIcon() } } + ); } if (!utils.fastDeepEqual(drawOptions, map.drawOptions)) { From 7f3ccbbada7a6a4f4f6656c5849db4e3b47acc88 Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 27 Jun 2023 10:28:14 +0200 Subject: [PATCH 139/142] saisie carto + fluide --- .../app/components/layout/base/layout-map.component.ts | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/layout/base/layout-map.component.ts b/frontend/app/components/layout/base/layout-map.component.ts index f40594ac..2c02bfa6 100644 --- a/frontend/app/components/layout/base/layout-map.component.ts +++ b/frontend/app/components/layout/base/layout-map.component.ts @@ -16,7 +16,6 @@ export class ModulesLayoutMapComponent extends ModulesLayoutComponent implements firstEdit = true; - editedLayerSubscription; modalData = {}; modalsLayout: any; @@ -62,8 +61,8 @@ export class ModulesLayoutMapComponent extends ModulesLayoutComponent implements // souscrire aux changements de geometrie // (si ce n'est pas déjà fait) - if (!this.editedLayerSubscription) { - this.editedLayerSubscription = this._map.$editedLayer.subscribe((layer) => { + if (!this._subs['edited_layer']) { + this._subs['edited_layer'] = this._map.$editedLayer.subscribe((layer) => { layer && this.onEditedLayerChange(layer); }); } @@ -118,6 +117,11 @@ export class ModulesLayoutMapComponent extends ModulesLayoutComponent implements } this.data[this.computedLayout.key] = dataGeom; + + this._mapService.processData(this.mapId, this.data, { + key: this.computedLayout.key, + }); + this.dataSave[this.computedLayout.key] = dataGeom; this._mLayout.reComputeLayout('map'); } } From 7c0291e518453c3f596419e0e5db1f7b11a6f81e Mon Sep 17 00:00:00 2001 From: "joel.clement" Date: Tue, 27 Jun 2023 10:31:16 +0200 Subject: [PATCH 140/142] map fix marker button --- frontend/app/services/map/draw.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/app/services/map/draw.ts b/frontend/app/services/map/draw.ts index 1b08f66e..815af14a 100644 --- a/frontend/app/services/map/draw.ts +++ b/frontend/app/services/map/draw.ts @@ -26,6 +26,7 @@ const hiddenDrawOptions = { drawRectangle: false, customMarker: false, drawMarker: false, + drawMarker2: false, drawPolygon: false, drawPolyline: false, drawText: false, From 0a17605780d709a7f7ba77632cdafdd1b3da20fe Mon Sep 17 00:00:00 2001 From: Camille Monchicourt Date: Tue, 27 Jun 2023 12:02:07 +0200 Subject: [PATCH 141/142] Update VERSION / 1.1.0 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 1464c521..9084fa2f 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.0.5 \ No newline at end of file +1.1.0 From e5090c843faacc9909ab48ff28a60d2fa524a82e Mon Sep 17 00:00:00 2001 From: Camille Monchicourt Date: Tue, 27 Jun 2023 13:25:24 +0200 Subject: [PATCH 142/142] Changelog 1.1.0 --- doc/changelog.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/changelog.md b/doc/changelog.md index 34e6c21f..88b59124 100644 --- a/doc/changelog.md +++ b/doc/changelog.md @@ -1,6 +1,6 @@ # Changelog -## 1.1.0 (unreleased) +## 1.1.0 (2023-06-27) Nécessite la version 2.13.0 (ou plus) de GeoNature.