Skip to content

Commit

Permalink
wip #96 delete
Browse files Browse the repository at this point in the history
  • Loading branch information
joelclems committed Feb 7, 2024
1 parent c5bc901 commit 926be25
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 5 deletions.
4 changes: 1 addition & 3 deletions backend/gn_modulator/routes/utils/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,4 @@ def get_page_number_and_list(module_code, object_code, value):
id_role=id_role,
)

return get_list_rest(
module_code, object_code, additional_params={"page": page_number}, id_role=id_role
)
return get_list_rest(module_code, object_code, additional_params={"page": page_number})
20 changes: 19 additions & 1 deletion backend/gn_modulator/schema/repositories.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,25 @@ def delete_row(
# https://stackoverflow.com/questions/49794899/flask-sqlalchemy-delete-query-failing-with-could-not-evaluate-current-criteria?noredirect=1&lq=1
if not multiple:
subquery_delete.one()
subquery_delete.delete(synchronize_session=False)

res = subquery_delete.all()

if not res:
return

Model = self.Model()

q_delete = Model.query
ors = []
for r in res:
ands = []
for pk_field_name in Model.pk_field_names():
f = getattr(Model, pk_field_name) == getattr(r, pk_field_name)
ands.append(f)
ors.append(f)

q_delete = q_delete.filter(sa.or_(*ors))
q_delete.delete(synchronize_session=False)
db.session.flush()

if commit:
Expand Down
9 changes: 8 additions & 1 deletion backend/gn_modulator/tests/test_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
- list ??
"""

import os
import pytest
from .utils.repository import test_schema_repository
from .data import commons as data_commons
Expand Down Expand Up @@ -41,6 +42,7 @@ def test_repo_gn_meta_ca(self):
def test_repo_gn_meta_jdd(self):
test_schema_repository("meta.jdd", data_meta.jdd(), data_meta.jdd_update())

# @pytest.mark.skip()
def test_repo_diag(self, users, passages_faune_with_diagnostic):
sm = SchemaMethods("m_sipaf.diag")
fields = ["scope", "id_diagnostic"]
Expand All @@ -61,6 +63,7 @@ def test_repo_diag(self, users, passages_faune_with_diagnostic):

assert True

# @pytest.mark.skip()
def test_repo_pf_update(self, passages_faune_with_diagnostic):
sm = SchemaMethods("m_sipaf.pf")

Expand All @@ -72,6 +75,7 @@ def test_repo_pf_update(self, passages_faune_with_diagnostic):
assert sm.is_new_data(m, data) is False
sm.update_row(uuid_pf, data, "uuid_passage_faune", "m_sipaf")

# @pytest.mark.skip()
def test_repo_diag_cloture(self, passages_faune_with_diagnostic):
sm = SchemaMethods("m_sipaf.diag")
sm_org = SchemaMethods("user.organisme")
Expand Down Expand Up @@ -130,6 +134,7 @@ def test_repo_diag_cloture(self, passages_faune_with_diagnostic):
assert sm.is_new_data(m, data)
sm.update_row(m.id_diagnostic, data)

# @pytest.mark.skip()
def test_repo_pf_rel(self, passages_faune_with_diagnostic, users):
sm = SchemaMethods("m_sipaf.pf")
uuids_filter_value = ";".join(
Expand Down Expand Up @@ -261,6 +266,7 @@ def test_repo_pf_nomenclature_spe(self):
res_nom = res["nomenclature_ouvrage_specificite"]
assert res_nom is None

# @pytest.mark.skip()
def test_repo_pf_cruved(self, passages_faune_with_diagnostic, users):
sm = SchemaMethods("m_sipaf.pf")
uuids_filter_value = ";".join(
Expand Down Expand Up @@ -326,6 +332,7 @@ def test_repo_pf_filter_has_diagnostic(self, passages_faune_with_diagnostic, use
res = sm.serialize_list(m_list, fields)
assert len(res) == 2

# @pytest.mark.skip()
def test_repo_synthese_d_within(
self, passages_faune_with_diagnostic, synthese_data, users, g_permissions
):
Expand Down Expand Up @@ -377,7 +384,7 @@ def test_repo_synthese_scope(self, synthese_data, users, datasets):
assert len(res[user]) == 9
assert all(r["scope"] == 2 for r in res[user])

@pytest.mark.skip()
# @pytest.mark.skip()
def test_repo_synthese_permission(self, synthese_sensitive_data, users, g_permissions):
for key in synthese_sensitive_data:
s = synthese_sensitive_data[key]
Expand Down

0 comments on commit 926be25

Please sign in to comment.