From 7006b00e65b96eebd6a6dde0893088ec5b334148 Mon Sep 17 00:00:00 2001 From: Juha Louhiranta Date: Fri, 27 Jan 2023 13:59:27 +0200 Subject: [PATCH] Handle flake8 errors - Fix simple errors - Ignore too complex functions from QA - Raise allowed level of complexity - Remove duplicated test code - Replace custom active_language function with override from Django Refs LINK-1123 --- events/api.py | 96 ++++++++++--------- events/custom_elasticsearch_search_backend.py | 18 ++-- events/importer/base.py | 12 +-- events/importer/espoo.py | 8 +- events/importer/harrastushaku.py | 4 +- events/importer/helmet.py | 6 +- events/importer/kulke.py | 6 +- events/importer/lippupiste.py | 4 +- events/importer/matko.py | 2 +- events/importer/mikkelinyt.py | 10 +- events/importer/osoite.py | 6 +- events/importer/sync.py | 2 +- events/importer/util.py | 21 ++-- events/importer/yso.py | 15 ++- .../management/commands/add_helfi_topics.py | 2 +- .../commands/add_helsinki_audience.py | 2 +- .../commands/add_helsinki_topics.py | 2 +- events/models.py | 18 ++-- events/parsers.py | 2 +- events/renderers/json.py | 2 +- events/signals.py | 6 +- events/tests/common.py | 6 +- events/tests/test_event_get.py | 6 +- events/tests/test_event_put.py | 10 +- events/tests/test_events_search.py | 4 +- events/tests/utils.py | 16 ++-- events/utils.py | 2 +- helevents/api.py | 2 +- linkedevents/api.py | 2 +- linkedevents/settings.py | 6 +- linkedevents/test_settings.py | 3 +- multilingual_haystack/backends.py | 2 +- notifications/models.py | 2 +- notifications/tests/test_notifications.py | 2 +- registrations/models.py | 8 +- .../tests/test_registration_admin_side.py | 45 ++------- setup.cfg | 2 +- 37 files changed, 160 insertions(+), 202 deletions(-) diff --git a/events/api.py b/events/api.py index cde0fb278..ab417205c 100644 --- a/events/api.py +++ b/events/api.py @@ -12,6 +12,7 @@ from datetime import timedelta from functools import partial, reduce from operator import or_ +from typing import Iterable, Optional from uuid import UUID import bleach @@ -133,14 +134,14 @@ def register_view(klass, name, base_name=None): def get_serializer_for_model(model, version="v1"): - Viewset = viewset_classes_by_model.get(model) - if Viewset is None: + viewset_cls = viewset_classes_by_model.get(model) + if viewset_cls is None: return None serializer = None - if hasattr(Viewset, "get_serializer_class_for_version"): - serializer = Viewset.get_serializer_class_for_version(version) - elif hasattr(Viewset, "serializer_class"): - serializer = Viewset.serializer_class + if hasattr(viewset_cls, "get_serializer_class_for_version"): + serializer = viewset_cls.get_serializer_class_for_version(version) + elif hasattr(viewset_cls, "serializer_class"): + serializer = viewset_cls.serializer_class return serializer @@ -240,7 +241,10 @@ def get_publisher_query(publisher): return q -def clean_text_fields(data, allowed_html_fields=[]): +def clean_text_fields(data, allowed_html_fields: Optional[Iterable[str]] = None): + if allowed_html_fields is None: + allowed_html_fields = [] + for k, v in data.items(): if isinstance(v, str) and any(c in v for c in "<>&"): # only specified fields may contain allowed tags @@ -328,7 +332,7 @@ def __init__(self, *args, **kwargs): self.related_serializer = kwargs.pop("serializer", None) self.hide_ld_context = kwargs.pop("hide_ld_context", False) self.expanded = kwargs.pop("expanded", False) - super(JSONLDRelatedField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def use_pk_only_optimization(self): if self.is_expanded(): @@ -352,7 +356,7 @@ def to_representation(self, obj): return self.related_serializer( obj, hide_ld_context=self.hide_ld_context, context=context ).data - link = super(JSONLDRelatedField, self).to_representation(obj) + link = super().to_representation(obj) if link is None: return None return {"@id": link} @@ -379,7 +383,7 @@ def get_queryset(self): # For certain related fields we preload the queryset to avoid *.objects.all() query which can easily overload # the memory as database grows. if isinstance(self._kwargs["serializer"], str): - return super(JSONLDRelatedField, self).get_queryset() + return super().get_queryset() current_model = self._kwargs["serializer"].Meta.model preloaded_fields = { Place: "location", @@ -390,7 +394,7 @@ def get_queryset(self): if current_model in preloaded_fields.keys(): return self.context.get(preloaded_fields[current_model]) else: - return super(JSONLDRelatedField, self).get_queryset() + return super().get_queryset() class EnumChoiceField(serializers.Field): @@ -405,7 +409,7 @@ class EnumChoiceField(serializers.Field): def __init__(self, choices, prefix="", **kwargs): self.choices = choices self.prefix = prefix - super(EnumChoiceField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_representation(self, obj): if obj is None: @@ -443,7 +447,7 @@ def to_internal_value(self, data): class MPTTModelSerializer(serializers.ModelSerializer): def __init__(self, *args, **kwargs): - super(MPTTModelSerializer, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) for field_name in "lft", "rght", "tree_id", "level": if field_name in self.fields: del self.fields[field_name] @@ -451,7 +455,7 @@ def __init__(self, *args, **kwargs): class TranslatedModelSerializer(serializers.ModelSerializer): def __init__(self, *args, **kwargs): - super(TranslatedModelSerializer, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) model = self.Meta.model try: trans_opts = translator.get_options_for_model(model) @@ -470,7 +474,7 @@ def __init__(self, *args, **kwargs): del self.fields[field_name] def to_representation(self, obj): - ret = super(TranslatedModelSerializer, self).to_representation(obj) + ret = super().to_representation(obj) if obj is None: return ret return self.translated_fields_to_representation(obj, ret) @@ -542,7 +546,7 @@ def translated_fields_to_representation(self, obj, ret): d[lang] = val # If no text provided, leave the field as null - for key, val in d.items(): + for _key, val in d.items(): if val is not None: break else: @@ -587,14 +591,14 @@ def __init__( context=None, partial=False, many=None, - skip_fields=set(), + skip_fields: Optional[set] = None, allow_add_remove=False, hide_ld_context=False, **kwargs, ): - super(LinkedEventsSerializer, self).__init__( - instance=instance, context=context, **kwargs - ) + super().__init__(instance=instance, context=context, **kwargs) + if skip_fields is None: + skip_fields = set() if context is None: return if "request" in context: @@ -666,7 +670,7 @@ def to_representation(self, obj): Renderer is the right place for this but now loop is done just once. Reversal conversion is done in parser. """ - ret = super(LinkedEventsSerializer, self).to_representation(obj) + ret = super().to_representation(obj) if "id" in ret and "request" in self.context: try: ret["@id"] = reverse( @@ -753,14 +757,14 @@ def validate_publisher(self, value): set(self.user.get_admin_organizations_and_descendants()) | set( map( - lambda x: getattr(x, "replaced_by"), + lambda x: x.replaced_by, self.user.get_admin_organizations_and_descendants(), ) ) | set(self.user.organization_memberships.all()) | set( map( - lambda x: getattr(x, "replaced_by"), + lambda x: x.replaced_by, self.user.organization_memberships.all(), ) ) @@ -844,7 +848,7 @@ def initial(self, request, *args, **kwargs): # if srid is not specified, this will yield munigeo default 4326 self.srs = srid_to_srs(self.request.query_params.get("srid", None)) # check for NUL strings that crash psycopg2 - for key, param in self.request.query_params.items(): + for _key, param in self.request.query_params.items(): if "\x00" in param: raise ParseError( "A string literal cannot contain NUL (0x00) characters. " @@ -1206,7 +1210,7 @@ def filter_queryset(self, queryset): @action(methods=["post"], detail=True, permission_classes=[GuestPost]) def reserve_seats(self, request, pk=None, version=None): - def NoneToUnlim(val): + def none_to_unlim(val): # Null value in the waiting_list_capacity or maximum_attendee_capacity # signifies that the amount of seats is unimited if val is None: @@ -1220,7 +1224,7 @@ def NoneToUnlim(val): raise NotFound(detail=f"Registration {pk} doesn't exist.", code=404) waitlist = request.data.get("waitlist", False) if waitlist: - waitlist_seats = NoneToUnlim(registration.waiting_list_capacity) + waitlist_seats = none_to_unlim(registration.waiting_list_capacity) else: waitlist_seats = 0 # if waitlist is False, waiting list is not to be used @@ -1252,8 +1256,8 @@ def NoneToUnlim(val): data["seats_at_event"] = ( min(free_seats, code.seats) if free_seats > 0 else 0 ) - l = code.seats - data["seats_at_event"] - data["waitlist_spots"] = l if l else 0 + waitlist_spots = code.seats - data["seats_at_event"] + data["waitlist_spots"] = waitlist_spots if waitlist_spots else 0 return Response(data, status=status.HTTP_201_CREATED) @@ -1755,7 +1759,7 @@ class PlaceRetrieveViewSet( serializer_class = PlaceSerializer def get_serializer_context(self): - context = super(PlaceRetrieveViewSet, self).get_serializer_context() + context = super().get_serializer_context() context.setdefault("skip_fields", set()).add("origin_id") return context @@ -2274,7 +2278,7 @@ def list(self, request, *args, **kwargs): class EventLinkSerializer(serializers.ModelSerializer): def to_representation(self, obj): - ret = super(EventLinkSerializer, self).to_representation(obj) + ret = super().to_representation(obj) if not ret["name"]: ret["name"] = None return ret @@ -2487,7 +2491,7 @@ class EventSerializer( last_modified_by = serializers.StringRelatedField(required=False, allow_null=True) def __init__(self, *args, skip_empties=False, **kwargs): - super(EventSerializer, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) # The following can be used when serializing when # testing and debugging. self.skip_empties = skip_empties @@ -2766,7 +2770,7 @@ def update(self, instance, validated_data): return instance def to_representation(self, obj): - ret = super(EventSerializer, self).to_representation(obj) + ret = super().to_representation(obj) if obj.deleted: keys_to_preserve = [ @@ -2846,13 +2850,13 @@ def _format_images_v0_1(data): data["image"] = images[0].get("url", None) -class EventSerializerV0_1(EventSerializer): +class EventSerializerV0_1(EventSerializer): # noqa: N801 def __init__(self, *args, **kwargs): kwargs.setdefault("context", {}).setdefault("include", []).append("image") - super(EventSerializerV0_1, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def to_representation(self, obj): - ret = super(EventSerializerV0_1, self).to_representation(obj) + ret = super().to_representation(obj) _format_images_v0_1(ret) return ret @@ -2863,9 +2867,7 @@ class LinkedEventsOrderingFilter(filters.OrderingFilter): class EventOrderingFilter(LinkedEventsOrderingFilter): def filter_queryset(self, request, queryset, view): - queryset = super(EventOrderingFilter, self).filter_queryset( - request, queryset, view - ) + queryset = super().filter_queryset(request, queryset, view) ordering = self.get_ordering(request, queryset, view) if not ordering: ordering = [] @@ -2917,7 +2919,7 @@ def _terms_to_regex(terms, operator, fuzziness=3): return regex.compile(expr, regex.IGNORECASE) -def _filter_event_queryset(queryset, params, srs=None): +def _filter_event_queryset(queryset, params, srs=None): # noqa: C901 """ Filter events queryset by params (e.g. self.request.query_params ingit EventViewSet) @@ -3215,7 +3217,7 @@ def _filter_event_queryset(queryset, params, srs=None): queryset = queryset.filter(type_id__in=search_vals) else: - queryset = queryset.filter(type_id=Event.Type_Id.GENERAL) + queryset = queryset.filter(type_id=Event.TypeId.GENERAL) val = params.get("last_modified_since", None) # This should be in format which dateutil.parser recognizes, e.g. @@ -3249,9 +3251,9 @@ def _filter_event_queryset(queryset, params, srs=None): if not end: # postponed events are considered to be "far" in the future and should be included if end is *not* given - postponed_Q = Q(event_status=Event.Status.POSTPONED) + postponed_q = Q(event_status=Event.Status.POSTPONED) else: - postponed_Q = Q() + postponed_q = Q() if start: dt = utils.parse_time(start, is_start=True)[0] @@ -3263,7 +3265,7 @@ def _filter_event_queryset(queryset, params, srs=None): Q(end_time__gt=dt, has_end_time=True) | Q(end_time__gt=dt, has_start_time=False) | Q(start_time__gte=dt) - | postponed_Q + | postponed_q ) if end: @@ -3687,7 +3689,7 @@ def get_serializer_class(self): return EventViewSet.get_serializer_class_for_version(self.request.version) def get_serializer_context(self): - context = super(EventViewSet, self).get_serializer_context() + context = super().get_serializer_context() context.setdefault("skip_fields", set()).update( set(["headline", "secondary_headline"]) ) @@ -3738,7 +3740,7 @@ def filter_queryset(self, queryset): """ TODO: convert to use proper filter framework """ - original_queryset = super(EventViewSet, self).filter_queryset(queryset) + original_queryset = super().filter_queryset(queryset) if self.request.method in SAFE_METHODS: # we cannot use distinct for performance reasons public_queryset = original_queryset.filter( @@ -4051,9 +4053,9 @@ def to_representation(self, search_result): return data -class SearchSerializerV0_1(SearchSerializer): +class SearchSerializerV0_1(SearchSerializer): # noqa: N801 def to_representation(self, search_result): - ret = super(SearchSerializerV0_1, self).to_representation(search_result) + ret = super().to_representation(search_result) if "resource_type" in ret: ret["object_type"] = ret["resource_type"] del ret["resource_type"] diff --git a/events/custom_elasticsearch_search_backend.py b/events/custom_elasticsearch_search_backend.py index ae6fd26ec..63a257e8a 100644 --- a/events/custom_elasticsearch_search_backend.py +++ b/events/custom_elasticsearch_search_backend.py @@ -15,9 +15,7 @@ class CustomEsSearchBackend(es_backend.ElasticsearchSearchBackend): """ def __init__(self, connection_alias, **connection_options): - super(CustomEsSearchBackend, self).__init__( - connection_alias, **connection_options - ) + super().__init__(connection_alias, **connection_options) self.custom_mappings = connection_options.get("MAPPINGS") settings = connection_options.get("SETTINGS") if settings: @@ -25,9 +23,7 @@ def __init__(self, connection_alias, **connection_options): update(default_settings, settings) def build_schema(self, fields): - content_field_name, mappings = super(CustomEsSearchBackend, self).build_schema( - fields - ) + content_field_name, mappings = super().build_schema(fields) if not self.custom_mappings: return (content_field_name, mappings) @@ -41,9 +37,7 @@ def build_schema(self, fields): return (content_field_name, mappings) def build_search_kwargs(self, query_string, decay_functions=None, **kwargs): - kwargs = super(CustomEsSearchBackend, self).build_search_kwargs( - query_string, **kwargs - ) + kwargs = super().build_search_kwargs(query_string, **kwargs) if not decay_functions: return kwargs @@ -61,11 +55,11 @@ def build_search_kwargs(self, query_string, decay_functions=None, **kwargs): class CustomEsSearchQuery(es_backend.ElasticsearchSearchQuery): def __init__(self, **kwargs): - super(CustomEsSearchQuery, self).__init__(**kwargs) + super().__init__(**kwargs) self.decay_functions = [] def build_params(self, *args, **kwargs): - search_kwargs = super(CustomEsSearchQuery, self).build_params(*args, **kwargs) + search_kwargs = super().build_params(*args, **kwargs) if self.decay_functions: search_kwargs["decay_functions"] = self.decay_functions return search_kwargs @@ -74,7 +68,7 @@ def add_decay_function(self, function_dict): self.decay_functions.append(function_dict) def _clone(self, **kwargs): - clone = super(CustomEsSearchQuery, self)._clone(**kwargs) + clone = super()._clone(**kwargs) clone.decay_functions = self.decay_functions[:] return clone diff --git a/events/importer/base.py b/events/importer/base.py index ec17766b1..a0e5e1162 100644 --- a/events/importer/base.py +++ b/events/importer/base.py @@ -40,11 +40,11 @@ def recur_dict(): class Importer(object): def __init__(self, options): - super(Importer, self).__init__() + super().__init__() self.options = options importer_langs = set(self.supported_languages) - configured_langs = set(l[0] for l in settings.LANGUAGES) + configured_langs = set(lang[0] for lang in settings.LANGUAGES) # Intersection is all the languages possible for the importer to use. self.languages = {} for lang_code in importer_langs & configured_langs: @@ -55,7 +55,7 @@ def __init__(self, options): self.target_srid = settings.PROJECTION_SRID gps_srs = SpatialReference(4326) target_srs = SpatialReference(self.target_srid) - if getattr(settings, "BOUNDING_BOX"): + if settings.BOUNDING_BOX: self.bounding_box = Polygon.from_bbox(settings.BOUNDING_BOX) self.bounding_box.srid = self.target_srid target_to_gps_ct = CoordTransform(target_srs, gps_srs) @@ -189,7 +189,7 @@ def _update_image(self, image, image_data): return image - def link_recurring_events(self, events, instance_fields=[]): + def link_recurring_events(self, events): """Finds events that are instances of a common parent event by comparing the fields that do not differ between instances, for example different nights of the same play. @@ -205,7 +205,7 @@ def event_name(e): events.sort(key=event_name) parent_events = [] - for name_fi, subevents in itertools.groupby(events, event_name): + for _name_fi, subevents in itertools.groupby(events, event_name): subevents = list(subevents) if len(subevents) < 2: parent_events.extend(subevents) @@ -306,7 +306,7 @@ def _update_fields(self, obj, info, skip_fields): continue self._set_field(obj, field_name, info[field_name]) - def save_event(self, info): + def save_event(self, info): # noqa: C901 info = info.copy() args = dict(data_source=info["data_source"], origin_id=info["origin_id"]) diff --git a/events/importer/espoo.py b/events/importer/espoo.py index 06444df74..e103a6ad6 100644 --- a/events/importer/espoo.py +++ b/events/importer/espoo.py @@ -197,12 +197,12 @@ def mark_deleted(obj): def clean_street_address(address): - LATIN1_CHARSET = "a-zàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + latin1_charset = "a-zàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" address = address.strip() pattern = re.compile( r"([%s\ -]*[0-9-\ ]*\ ?[a-z]{0,2}),?\ *(0?2[0-9]{3})?\ *(espoo|esbo)?" - % LATIN1_CHARSET, + % latin1_charset, re.I, ) match = pattern.match(address) @@ -247,7 +247,7 @@ class EspooImporter(Importer): location_cache = {} def _build_cache_places(self): - loc_id_list = [l[1] for l in LOCATIONS.values()] + loc_id_list = [location[1] for location in LOCATIONS.values()] place_list = Place.objects.filter(data_source=self.tprek_data_source).filter( origin_id__in=loc_id_list ) @@ -499,7 +499,7 @@ def _get_classification_keywords(self, classification_node_name, lang): self.keyword_by_id.update(dict({k.id: k for k in keywords})) return keywords - def _import_event(self, lang, event_el, events): + def _import_event(self, lang, event_el, events): # noqa: C901 # Times are in Helsinki timezone def to_utc(dt): return LOCAL_TZ.localize(dt, is_dst=None).astimezone(pytz.utc) diff --git a/events/importer/harrastushaku.py b/events/importer/harrastushaku.py index 13994b629..7b24af376 100644 --- a/events/importer/harrastushaku.py +++ b/events/importer/harrastushaku.py @@ -358,7 +358,7 @@ def get_event_data(self, activity_data): ) | self.get_event_audiences_from_keywords(keywords) keywords |= audience event_data = { - "type_id": Event.Type_Id.COURSE, + "type_id": Event.TypeId.COURSE, "name": get_string("name", localized=True), "description": get_string("description", localized=True), "audience_max_age": get_int("agemax"), @@ -627,7 +627,7 @@ def get_event_audiences_from_ages(self, activity_data): def get_event_audiences_from_keywords(self, keywords): return {kw for kw in keywords if kw.id in KEYWORDS_TO_ADD_TO_AUDIENCE} - @lru_cache() + @lru_cache() # noqa: B019 def match_keyword(self, text): return self.keyword_matcher.match(text, language="fi") diff --git a/events/importer/helmet.py b/events/importer/helmet.py index a2e357351..c1f720105 100644 --- a/events/importer/helmet.py +++ b/events/importer/helmet.py @@ -207,7 +207,7 @@ def setup(self): self.city, _ = Organization.objects.get_or_create(defaults=defaults, **org_args) # Build a cached list of Places - loc_id_list = [l[1] for l in LOCATIONS.values()] + loc_id_list = [location[1] for location in LOCATIONS.values()] place_list = Place.objects.filter(data_source=self.tprek_data_source).filter( origin_id__in=loc_id_list ) @@ -262,7 +262,7 @@ def _get_extended_properties(event_el): continue return ext_props - def _import_event(self, lang, event_el, events): + def _import_event(self, lang, event_el, events): # noqa: C901 def dt_parse(dt_str): """Convert a string to UTC datetime""" # Times are in UTC+02:00 timezone @@ -419,7 +419,7 @@ def set_attr(field_name, val): # The event is only online, do not consider other locations event["location"]["id"] = INTERNET_LOCATION_ID else: - for k, v in LOCATIONS.items(): + for _k, v in LOCATIONS.items(): if classification["NodeId"] in v[0]: event["location"]["id"] = self.tprek_by_id[str(v[1])] break diff --git a/events/importer/kulke.py b/events/importer/kulke.py index 9602b5ed4..7c2c449b8 100644 --- a/events/importer/kulke.py +++ b/events/importer/kulke.py @@ -466,7 +466,7 @@ def _html_format(text): formatted_paragraphs.append(formatted_paragraph) return "".join(formatted_paragraphs) - def _import_event(self, lang, event_el, events, is_course=False): + def _import_event(self, lang, event_el, events, is_course=False): # noqa: C901 def text(t): return unicodetext(event_el.find("event" + t)) @@ -811,7 +811,7 @@ def simple(field): # The name may vary within a recurring event; hence, take the common part if expand_model_fields(super_event, ["headline"])[0] not in common_fields: - words = getattr(events.first(), "headline").split(" ") + words = events.first().headline.split(" ") name = "" while words and all( headline.startswith(name + words[0]) @@ -820,7 +820,7 @@ def simple(field): name += words.pop(0) + " " logger.warning(words) logger.warning(name) - setattr(super_event, "name", name) + super_event.name = name for lang in self.languages.keys(): headline = getattr(super_event, "headline_{}".format(lang)) diff --git a/events/importer/lippupiste.py b/events/importer/lippupiste.py index 6602c2f44..de102d814 100644 --- a/events/importer/lippupiste.py +++ b/events/importer/lippupiste.py @@ -386,12 +386,12 @@ def _get_place_id_from_source_event(self, source_event): "address match, pick the name with most common words and least different words, if any:" ) if len(matches_by_address) > 1: - for common_words, match_list in sorted( + for _common_words, match_list in sorted( matches_by_partial_name.items(), key=(lambda x: int(x[0])), reverse=True, ): - for different_words, sublist in sorted( + for _different_words, sublist in sorted( match_list.items(), key=(lambda x: int(x[0])) ): address_and_word_matches = set(matches_by_address) & set( diff --git a/events/importer/matko.py b/events/importer/matko.py index ed7c4dd59..ea002cc3a 100644 --- a/events/importer/matko.py +++ b/events/importer/matko.py @@ -116,7 +116,7 @@ class MatkoImporter(Importer): supported_languages = ["fi", "sv", "en"] def __init__(self, *args, **kwargs): - super(MatkoImporter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.timezone = pytz.timezone("Europe/Helsinki") def put(self, rdict, key, val): diff --git a/events/importer/mikkelinyt.py b/events/importer/mikkelinyt.py index 031e5011c..df7accc07 100644 --- a/events/importer/mikkelinyt.py +++ b/events/importer/mikkelinyt.py @@ -37,7 +37,7 @@ class MikkeliNytImporter(Importer): supported_languages = ["fi"] def __init__(self, *args, **kwargs): - super(MikkeliNytImporter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.timezone = pytz.timezone("Europe/Helsinki") def items_from_url(self, url): @@ -121,8 +121,8 @@ def upsert_event(self, item): address = self.strip_html(item["address"]) city = self.strip_html(item["city"]) place = self.strip_html(item["place"]) - zipCode = self.strip_html(item["zip"]) - location = self.upsert_place(location_origin_id, address, city, place, zipCode) + zip_code = self.strip_html(item["zip"]) + location = self.upsert_place(location_origin_id, address, city, place, zip_code) categories = item["category"] keywords = self.upsert_keywords(categories) @@ -193,7 +193,7 @@ def upsert_keyword(self, origin_id, name): keywords = Keyword.objects.filter(id__exact="%s" % _id).order_by("id") return keywords.first() - def upsert_place(self, origin_id, address, city, place, zipCode): + def upsert_place(self, origin_id, address, city, place, zip_code): result = recur_dict() _id = "mikkelinyt:{}".format(origin_id) @@ -201,7 +201,7 @@ def upsert_place(self, origin_id, address, city, place, zipCode): result["origin_id"] = origin_id result["name"]["fi"] = place result["street_address"]["fi"] = address - result["postal_code"] = zipCode + result["postal_code"] = zip_code result["address_locality"]["fi"] = city result["publisher"] = self.organization result["data_source"] = self.data_source diff --git a/events/importer/osoite.py b/events/importer/osoite.py index bbf097bca..d589efb93 100644 --- a/events/importer/osoite.py +++ b/events/importer/osoite.py @@ -65,10 +65,10 @@ def get_whole_address(self, address, language): def pk_get(self, resource_name, res_id=None): # support all munigeo resources, not just addresses - Klass = import_string("munigeo.models." + resource_name) + klass = import_string("munigeo.models." + resource_name) if res_id is not None: - return Klass.objects.get(origin_id=res_id) - return Klass.objects.all() + return klass.objects.get(origin_id=res_id) + return klass.objects.all() def delete_and_replace(self, obj): obj.deleted = True diff --git a/events/importer/sync.py b/events/importer/sync.py index cec584c7a..96ca73be3 100644 --- a/events/importer/sync.py +++ b/events/importer/sync.py @@ -43,7 +43,7 @@ def get(self, obj_id): def finish(self, force=False): delete_list = [] - for obj_id, obj in self.obj_dict.items(): + for _obj_id, obj in self.obj_dict.items(): if obj._found: # We have to reset _found so we don't mark or match the same object across several synchers. # Only relevant if consecutive synchers get different querysets; diff --git a/events/importer/util.py b/events/importer/util.py index 3acfdfc3e..921c0e6a6 100644 --- a/events/importer/util.py +++ b/events/importer/util.py @@ -5,7 +5,6 @@ from bs4 import BeautifulSoup from django.core.validators import URLValidator, ValidationError -from django.utils.translation.trans_real import activate, deactivate from langdetect import detect from langdetect.lang_detect_exception import LangDetectException @@ -128,8 +127,12 @@ def address_eq(a, b): return False for key in ["locality", "street_address"]: languages = a[key].viewkeys() | b[key].viewkeys() - for l in languages: - if l in a[key] and l in b[key] and not text_match(a[key][l], b[key][l]): + for lang in languages: + if ( + lang in a[key] + and lang in b[key] + and not text_match(a[key][lang], b[key][lang]) + ): return False return True @@ -186,15 +189,3 @@ def replace_location( % (replace.id, str(replace), by.id) ) return True - - -class active_language: - def __init__(self, language): - self.language = language - - def __enter__(self): - activate(self.language) - return self.language - - def __exit__(self, type, value, traceback): - deactivate() diff --git a/events/importer/yso.py b/events/importer/yso.py index cafda3dd4..dcf7eb210 100644 --- a/events/importer/yso.py +++ b/events/importer/yso.py @@ -4,6 +4,7 @@ import rdflib import requests from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.utils.translation import override from django_orghierarchy.models import Organization from rdflib import RDF from rdflib.namespace import DCTERMS, OWL, SKOS @@ -12,7 +13,6 @@ from .base import Importer, register_importer from .sync import ModelSyncher -from .util import active_language # Per module logger logger = logging.getLogger(__name__) @@ -71,7 +71,7 @@ def is_aggregate_concept(graph, subject): def get_replacement(graph, subject): - for subject, verb, object in graph.triples((subject, DCTERMS.isReplacedBy, None)): + for _subject, _verb, object in graph.triples((subject, DCTERMS.isReplacedBy, None)): return object @@ -155,7 +155,6 @@ def save_keywords(self, graph): ) keyword_labels = {} - labels_to_create = set() for subject, label in graph.subject_objects(SKOS.altLabel): if (subject, RDF.type, SKOS.Concept) in graph: try: @@ -203,9 +202,9 @@ def save_keyword_label_relationships_in_bulk(self, keyword_labels): yids = Keyword.objects.all().values_list("id", flat=True) labels = KeywordLabel.objects.all().values("id", "name", "language") label_id_from_name_and_language = { - (l["name"], l["language"]): l["id"] for l in labels + (label["name"], label["language"]): label["id"] for label in labels } - KeywordAltLabels = Keyword.alt_labels.through + keyword_alt_labels_model = Keyword.alt_labels.through relations_to_create = [] for yid, url_labels in keyword_labels.items(): if yid not in yids: @@ -220,8 +219,8 @@ def save_keyword_label_relationships_in_bulk(self, keyword_labels): ), ) if params["keyword_id"] and params["keywordlabel_id"]: - relations_to_create.append(KeywordAltLabels(**params)) - KeywordAltLabels.objects.bulk_create(relations_to_create) + relations_to_create.append(keyword_alt_labels_model(**params)) + keyword_alt_labels_model.objects.bulk_create(relations_to_create) def create_keyword(self, graph, subject): if is_deprecated(graph, subject): @@ -236,7 +235,7 @@ def create_keyword(self, graph, subject): def update_keyword(self, keyword, graph, subject): for _, literal in graph.preferredLabel(subject): - with active_language(literal.language): + with override(literal.language, deactivate=True): if keyword.name != str(literal): logger.debug( "(re)naming keyword " + keyword.name + " to " + str(literal) diff --git a/events/management/commands/add_helfi_topics.py b/events/management/commands/add_helfi_topics.py index 576fe18ac..afee745aa 100644 --- a/events/management/commands/add_helfi_topics.py +++ b/events/management/commands/add_helfi_topics.py @@ -63,7 +63,7 @@ class Command(BaseCommand): help = "Creates www.hel.fi topic keywords and keyword set used by the UI." - @lru_cache() + @lru_cache() # noqa: B019 def get_keyword_obj(self, keyword_id): try: keyword = Keyword.objects.get(id=keyword_id) diff --git a/events/management/commands/add_helsinki_audience.py b/events/management/commands/add_helsinki_audience.py index 45af79b33..ac30a8299 100644 --- a/events/management/commands/add_helsinki_audience.py +++ b/events/management/commands/add_helsinki_audience.py @@ -50,7 +50,7 @@ class Command(BaseCommand): help = "Creates SOTE keywords and Helsinki audience keyword set and adds YSO audience keywords to events." - @lru_cache() + @lru_cache() # noqa: B019 def get_keyword_obj(self, keyword_id): try: keyword = Keyword.objects.get(id=keyword_id) diff --git a/events/management/commands/add_helsinki_topics.py b/events/management/commands/add_helsinki_topics.py index c0b54309d..d2a1f5581 100644 --- a/events/management/commands/add_helsinki_topics.py +++ b/events/management/commands/add_helsinki_topics.py @@ -41,7 +41,7 @@ class Command(BaseCommand): help = "Creates Helsinki topics keyword set." - @lru_cache() + @lru_cache() # noqa: B019 def get_keyword_obj(self, keyword_id): try: keyword = Keyword.objects.get(id=keyword_id) diff --git a/events/models.py b/events/models.py index b48e93a7e..b6037fbaa 100644 --- a/events/models.py +++ b/events/models.py @@ -249,7 +249,7 @@ def save(self, *args, **kwargs): if self.url and self.image: raise ValidationError(_("You can only provide image or url, not both.")) self.last_modified_time = BaseModel.now() - super(Image, self).save(*args, **kwargs) + super().save(*args, **kwargs) def is_user_editable(self): return bool(self.data_source and self.data_source.user_editable) @@ -743,15 +743,15 @@ class SuperEventType: (SuperEventType.UMBRELLA, _("Umbrella event")), ) - class Type_Id: + class TypeId: GENERAL = 1 COURSE = 2 VOLUNTEERING = 3 TYPE_IDS = ( - (Type_Id.GENERAL, _("General")), - (Type_Id.COURSE, _("Course")), - (Type_Id.VOLUNTEERING, _("Volunteering")), + (TypeId.GENERAL, _("General")), + (TypeId.COURSE, _("Course")), + (TypeId.VOLUNTEERING, _("Volunteering")), ) # Properties from schema.org/Thing @@ -851,7 +851,7 @@ class Type_Id: blank=False, null=False, db_index=False, - default=Type_Id.GENERAL, + default=TypeId.GENERAL, choices=TYPE_IDS, ) @@ -973,7 +973,7 @@ def save(self, *args, **kwargs): # if self.location__divisions__ocd_id__endswith == MUNIGEO_MUNI: # self.local = True - super(Event, self).save(*args, **kwargs) + super().save(*args, **kwargs) # needed to cache location event numbers if not old_location and self.location: @@ -1180,7 +1180,7 @@ class Meta: def save(self, *args, **kwargs): self.last_exported_time = BaseModel.now() - super(ExportInfo, self).save(*args, **kwargs) + super().save(*args, **kwargs) class EventAggregate(models.Model): @@ -1214,4 +1214,4 @@ def save(self, *args, **kwargs): ) except SMTPException as e: logger.error(e, exc_info=True) - super(Feedback, self).save(*args, **kwargs) + super().save(*args, **kwargs) diff --git a/events/parsers.py b/events/parsers.py index 1899488f1..e24bd1cc3 100644 --- a/events/parsers.py +++ b/events/parsers.py @@ -31,7 +31,7 @@ class CamelCaseJSONParser(JSONParser): def parse(self, stream, media_type=None, parser_context=None): parser_context = parser_context or {} if "disable_camelcase" in parser_context["request"].query_params: - return super(CamelCaseJSONParser, self).parse(media_type, parser_context) + return super().parse(media_type, parser_context) else: encoding = parser_context.get("encoding", settings.DEFAULT_CHARSET) try: diff --git a/events/renderers/json.py b/events/renderers/json.py index a9329ad15..2b1b76869 100644 --- a/events/renderers/json.py +++ b/events/renderers/json.py @@ -6,7 +6,7 @@ class JSONRenderer(renderers.JSONRenderer): charset = "utf-8" def render(self, data, media_type=None, renderer_context=None): - return super(JSONRenderer, self).render(data, media_type, renderer_context) + return super().render(data, media_type, renderer_context) class JSONLDRenderer(JSONRenderer): diff --git a/events/signals.py b/events/signals.py index 881725907..9c3367245 100644 --- a/events/signals.py +++ b/events/signals.py @@ -28,10 +28,12 @@ def organization_post_save(sender, instance, created, **kwargs): def user_post_save(sender, instance, created, **kwargs): if created: - User = get_user_model() + user_model = get_user_model() recipient_list = [ item[0] - for item in User.objects.filter(is_superuser=True).values_list("email") + for item in user_model.objects.filter(is_superuser=True).values_list( + "email" + ) ] notification_type = NotificationType.USER_CREATED context = {"user": instance} diff --git a/events/tests/common.py b/events/tests/common.py index 05c8fad84..d8f4962f8 100644 --- a/events/tests/common.py +++ b/events/tests/common.py @@ -8,12 +8,12 @@ class TestDataMixin: def set_up_test_data(self): # dummy inputs - TEXT = "testing" + text = "testing" # data source - self.test_ds, _ = DataSource.objects.get_or_create(id=TEXT) + self.test_ds, _ = DataSource.objects.get_or_create(id=text) # organization self.test_org, _ = Organization.objects.get_or_create( - id=TEXT, data_source=self.test_ds + id=text, data_source=self.test_ds ) diff --git a/events/tests/test_event_get.py b/events/tests/test_event_get.py index 5aab567fd..f5224a0e7 100644 --- a/events/tests/test_event_get.py +++ b/events/tests/test_event_get.py @@ -951,7 +951,7 @@ def test_keywordset_search( @pytest.mark.django_db -def test_keyword_OR_set_search( +def test_keyword_or_set_search( api_client, event, event2, @@ -975,9 +975,9 @@ def test_keyword_OR_set_search( @pytest.mark.django_db def test_event_get_by_type(api_client, event, event2, event3): # default type is General, only general events should be present in the default search - event2.type_id = Event.Type_Id.COURSE + event2.type_id = Event.TypeId.COURSE event2.save() - event3.type_id = Event.Type_Id.VOLUNTEERING + event3.type_id = Event.TypeId.VOLUNTEERING event3.save() get_list_and_assert_events("", [event]) get_list_and_assert_events("event_type=general", [event]) diff --git a/events/tests/test_event_put.py b/events/tests/test_event_put.py index 1a2e42ee5..b85122d71 100644 --- a/events/tests/test_event_put.py +++ b/events/tests/test_event_put.py @@ -220,8 +220,8 @@ def test__update_an_event_complex_dict(api_client, complex_event_dict, user): response = create_with_post(api_client, complex_event_dict) # dummy inputs - TEXT = "text updated" - URL = "http://localhost" + text = "text updated" + url = "http://localhost" # set up updates data2 = response.data @@ -235,9 +235,9 @@ def test__update_an_event_complex_dict(api_client, complex_event_dict, user): data2["offers"] = [ { "is_free": False, - "price": {"en": TEXT, "sv": TEXT, "fi": TEXT}, - "description": {"en": TEXT, "fi": TEXT}, - "info_url": {"en": URL, "sv": URL, "fi": URL}, + "price": {"en": text, "sv": text, "fi": text}, + "description": {"en": text, "fi": text}, + "info_url": {"en": url, "sv": url, "fi": url}, } ] data2["keywords"] = data2["keywords"][:1] diff --git a/events/tests/test_events_search.py b/events/tests/test_events_search.py index 3be412986..7e584e915 100644 --- a/events/tests/test_events_search.py +++ b/events/tests/test_events_search.py @@ -15,7 +15,7 @@ from .common import TestDataMixin # Make sure we don't overwrite our main indices -for key, val in settings.HAYSTACK_CONNECTIONS.items(): +for _key, val in settings.HAYSTACK_CONNECTIONS.items(): if "INDEX_NAME" in val: val["INDEX_NAME"] = "test_%s" % val["INDEX_NAME"] @@ -45,7 +45,7 @@ def setUp(self): # simple backend doesn't have an index, so we cannot test indexing # rebuild_index.Command().handle(interactive=False) - super(EventSearchTests, self).setUp() + super().setUp() def _get_response(self, query): return self.client.get("/v1/search/", {"q": query}, format="json") diff --git a/events/tests/utils.py b/events/tests/utils.py index db6542d6c..468be87a3 100644 --- a/events/tests/utils.py +++ b/events/tests/utils.py @@ -10,7 +10,7 @@ def assert_event_data_is_equal(d1, d2, version="v1"): # TODO: start using version parameter # make sure the saved data is equal to the one we posted before - FIELDS = ( + fields = ( "data_source", "publisher", "location", @@ -33,19 +33,19 @@ def assert_event_data_is_equal(d1, d2, version="v1"): "end_time", ) if version == "v1": - FIELDS += ("images",) + fields += ("images",) elif version == "v0.1": - FIELDS += ( + fields += ( "image", "headline", "secondary_headline", "origin_id", ) - assert_data_is_equal(d1, d2, FIELDS) + assert_data_is_equal(d1, d2, fields) def assert_place_data_is_equal(d1, d2, version="v1"): - FIELDS = ( + fields = ( "data_source", "publisher", "email", @@ -55,12 +55,12 @@ def assert_place_data_is_equal(d1, d2, version="v1"): "street_address", "address_locality", ) - assert_data_is_equal(d1, d2, FIELDS) + assert_data_is_equal(d1, d2, fields) def assert_keyword_data_is_equal(d1, d2, version="v1"): - FIELDS = ("data_source", "publisher", "name") - assert_data_is_equal(d1, d2, FIELDS) + fields = ("data_source", "publisher", "name") + assert_data_is_equal(d1, d2, fields) def assert_data_is_equal(d1, d2, fields): diff --git a/events/utils.py b/events/utils.py index 98b4270e0..d46bc1f31 100644 --- a/events/utils.py +++ b/events/utils.py @@ -33,7 +33,7 @@ def get_value_from_tuple_list(list_of_tuples, search_key, value_index): :return: Value from either side of tuple """ - for i, v in enumerate(list_of_tuples): + for _i, v in enumerate(list_of_tuples): if str(v[value_index ^ 1]) == str(search_key): return v[value_index] diff --git a/helevents/api.py b/helevents/api.py index 360b6c699..ed8fe115c 100644 --- a/helevents/api.py +++ b/helevents/api.py @@ -15,7 +15,7 @@ class UserSerializer(serializers.ModelSerializer): display_name = serializers.ReadOnlyField(source="get_display_name") def to_representation(self, obj): - rep = super(UserSerializer, self).to_representation(obj) + rep = super().to_representation(obj) default_org = obj.get_default_organization() if default_org: rep["organization"] = default_org.id diff --git a/linkedevents/api.py b/linkedevents/api.py index 02b883ea1..cf8672297 100644 --- a/linkedevents/api.py +++ b/linkedevents/api.py @@ -17,7 +17,7 @@ class LinkedEventsAPIRouter(DefaultRouter): ) def __init__(self): - super(LinkedEventsAPIRouter, self).__init__() + super().__init__() self.registered_api_views = set() self._register_all_views() diff --git a/linkedevents/settings.py b/linkedevents/settings.py index 37050abab..979d288b1 100644 --- a/linkedevents/settings.py +++ b/linkedevents/settings.py @@ -10,7 +10,7 @@ from django.conf.global_settings import LANGUAGES as GLOBAL_LANGUAGES from django.core.exceptions import ImproperlyConfigured from django_jinja.builtins import DEFAULT_EXTENSIONS -from easy_thumbnails.conf import Settings as thumbnail_settings +from easy_thumbnails.conf import Settings as thumbnail_settings # noqa: N813 from sentry_sdk.integrations.django import DjangoIntegration CONFIG_FILE_NAME = "config_dev.toml" @@ -214,7 +214,7 @@ def get_git_revision_hash() -> str: # thus some gyrations language_map = {x: y for x, y in GLOBAL_LANGUAGES} try: - LANGUAGES = tuple((l, language_map[l]) for l in env("LANGUAGES")) + LANGUAGES = tuple((lang, language_map[lang]) for lang in env("LANGUAGES")) except KeyError as e: raise ImproperlyConfigured(f'unknown language code "{e.args[0]}"') LANGUAGE_CODE = env("LANGUAGES")[0] @@ -414,7 +414,7 @@ def dummy_haystack_connection_for_lang(language_code): } } -for language in [l[0] for l in LANGUAGES]: +for language in [lang[0] for lang in LANGUAGES]: if env("ELASTICSEARCH_URL"): connection = haystack_connection_for_lang(language) else: diff --git a/linkedevents/test_settings.py b/linkedevents/test_settings.py index 0d0903144..84d32dbc1 100644 --- a/linkedevents/test_settings.py +++ b/linkedevents/test_settings.py @@ -1,3 +1,4 @@ +# flake8: noqa """ Django settings module for pytest """ @@ -13,6 +14,6 @@ def dummy_haystack_connection_without_warnings_for_lang(language_code): } -for language in [l[0] for l in LANGUAGES]: +for language in [lang[0] for lang in LANGUAGES]: connection = dummy_haystack_connection_without_warnings_for_lang(language) HAYSTACK_CONNECTIONS.update(connection) diff --git a/multilingual_haystack/backends.py b/multilingual_haystack/backends.py index e2f3ca583..eef4de763 100644 --- a/multilingual_haystack/backends.py +++ b/multilingual_haystack/backends.py @@ -82,7 +82,7 @@ def __init__(self, **kwargs): self.query = base_engine.query - super(LanguageSearchEngine, self).__init__(**kwargs) + super().__init__(**kwargs) class SimpleSearchBackendWithoutWarnings(SimpleSearchBackend): diff --git a/notifications/models.py b/notifications/models.py index ce39cb5ee..0a3623527 100644 --- a/notifications/models.py +++ b/notifications/models.py @@ -2,7 +2,7 @@ from django.conf import settings from django.db import models -from django.utils import timezone, translation +from django.utils import timezone from django.utils.formats import date_format from django.utils.html import strip_tags from django.utils.translation import activate diff --git a/notifications/tests/test_notifications.py b/notifications/tests/test_notifications.py index 82f01f382..20c074713 100644 --- a/notifications/tests/test_notifications.py +++ b/notifications/tests/test_notifications.py @@ -14,7 +14,7 @@ @pytest.fixture(scope="function") def notification_type(): - setattr(NotificationType, "TEST", "test") + NotificationType.TEST = "test" yield NotificationType.TEST delattr(NotificationType, "TEST") diff --git a/registrations/models.py b/registrations/models.py index 08fc47213..af4363980 100644 --- a/registrations/models.py +++ b/registrations/models.py @@ -1,8 +1,6 @@ -from datetime import datetime from smtplib import SMTPException from uuid import uuid4 -import pytz from django.conf import settings from django.contrib.sites.models import Site from django.core.mail import send_mail @@ -187,9 +185,9 @@ def send_notification(self, confirmation_type): email_variables["instructions"] = self.registration.instructions event_type_name = { - str(Event.Type_Id.GENERAL): "tapahtumaan", - str(Event.Type_Id.COURSE): "kurssille", - str(Event.Type_Id.VOLUNTEERING): "vapaaehtoistehtävään", + str(Event.TypeId.GENERAL): "tapahtumaan", + str(Event.TypeId.COURSE): "kurssille", + str(Event.TypeId.VOLUNTEERING): "vapaaehtoistehtävään", } email_variables["event_type"] = event_type_name[self.registration.event.type_id] diff --git a/registrations/tests/test_registration_admin_side.py b/registrations/tests/test_registration_admin_side.py index 3be147ccc..6b48f4abb 100644 --- a/registrations/tests/test_registration_admin_side.py +++ b/registrations/tests/test_registration_admin_side.py @@ -1,14 +1,14 @@ import uuid from copy import deepcopy +from datetime import datetime, timedelta import environ import pytest from dateutil.parser import parse -from django.conf import settings from django.core import mail -from events.models import Language -from events.tests.conftest import * +from events.models import Event, Language +from events.tests.conftest import * # noqa from events.tests.utils import versioned_reverse as reverse from registrations.models import Registration, SignUp @@ -107,10 +107,10 @@ def test_list_all_registrations(api_client, user, user2, event, event2, event3): @pytest.mark.django_db def test_successful_sign_up(api_client, user, event): url = reverse("registration-list") - l = Language() - l.id = "fi" - l.name = "finnish" - l.save() + lang = Language() + lang.id = "fi" + lang.name = "finnish" + lang.save() api_client.force_authenticate(user) registration_data = {"event": event.id} @@ -528,35 +528,6 @@ def test_get_signup_info_with_cancel_code_no_auth(api_client, user, event): signup_url = reverse("signup-list") response = api_client.post(signup_url, sign_up_payload, format="json") - delete_payload = {"cancellation_code": response.data["cancellation_code"]} - - response = api_client.get( - f'{signup_url}?cancellation_code={response.data["cancellation_code"]}' - ) - assert response.data["name"] == "Michael Jackson" - - -@pytest.mark.django_db -def test_get_signup_info_with_cancel_code_no_auth(api_client, user, event): - registration_url = reverse("registration-list") - - api_client.force_authenticate(user) - registration_data = {"event": event.id} - - response = api_client.post(registration_url, registration_data, format="json") - registration_id = response.data["id"] - - api_client.force_authenticate(user=None) - sign_up_payload = { - "registration": registration_id, - "name": "Michael Jackson", - "email": "test@test.com", - } - signup_url = reverse("signup-list") - response = api_client.post(signup_url, sign_up_payload, format="json") - - delete_payload = {"cancellation_code": response.data["cancellation_code"]} - response = api_client.get( f'{signup_url}?cancellation_code={response.data["cancellation_code"]}' ) @@ -692,7 +663,7 @@ def test_filter_registrations(api_client, user, user2, event, event2): response = api_client.post(registration_url, registration_data, format="json") registration_id2 = response.data["id"] - event2.type_id = Event.Type_Id.COURSE + event2.type_id = Event.TypeId.COURSE event2.save() response = api_client.get(registration_url) diff --git a/setup.cfg b/setup.cfg index 151f0f1e5..1f31620b0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,7 @@ [flake8] exclude = migrations,tests max-line-length = 120 -max-complexity = 10 +max-complexity = 20 extend-ignore = E203 [tool:pytest]