diff --git a/tapir/statistics/migrations/0006_remove_fancygraphcache_view_name_and_more.py b/tapir/statistics/migrations/0006_remove_fancygraphcache_view_name_and_more.py
new file mode 100644
index 000000000..4e54eb3f2
--- /dev/null
+++ b/tapir/statistics/migrations/0006_remove_fancygraphcache_view_name_and_more.py
@@ -0,0 +1,23 @@
+# Generated by Django 5.1.5 on 2025-01-30 17:47
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ("statistics", "0005_merge_20241209_2116"),
+    ]
+
+    operations = [
+        migrations.RemoveField(
+            model_name="fancygraphcache",
+            name="view_name",
+        ),
+        migrations.AddField(
+            model_name="fancygraphcache",
+            name="data_provider_name_name",
+            field=models.CharField(default="unused", max_length=500),
+            preserve_default=False,
+        ),
+    ]
diff --git a/tapir/statistics/models.py b/tapir/statistics/models.py
index 7c8fb2e25..9741eb454 100644
--- a/tapir/statistics/models.py
+++ b/tapir/statistics/models.py
@@ -33,7 +33,7 @@ class PurchaseBasket(models.Model):
 
 
 class FancyGraphCache(models.Model):
-    view_name = models.CharField(max_length=255)
+    data_provider_name_name = models.CharField(max_length=500)
     date = models.DateField()
     value = models.IntegerField()
 
diff --git a/tapir/statistics/services/data_providers/data_provider_abcd_members.py b/tapir/statistics/services/data_providers/data_provider_abcd_members.py
index 5100d9c88..773ca3720 100644
--- a/tapir/statistics/services/data_providers/data_provider_abcd_members.py
+++ b/tapir/statistics/services/data_providers/data_provider_abcd_members.py
@@ -15,7 +15,7 @@
 class DataProviderAbcdMembers(BaseDataProvider):
     @classmethod
     def get_display_name(cls):
-        return _("Number of ABCD members")
+        return _("ABCD members")
 
     @classmethod
     def get_description(cls):
diff --git a/tapir/statistics/urls.py b/tapir/statistics/urls.py
index 7e277ce84..d525d5f75 100644
--- a/tapir/statistics/urls.py
+++ b/tapir/statistics/urls.py
@@ -1,14 +1,11 @@
 from django.urls import path
 
-import tapir.statistics.views.fancy_graph.base_view
 from tapir.statistics import views
-from tapir.statistics.views import fancy_graph
+from tapir.statistics.views.available_columns_view import AvailableColumnsView
 from tapir.statistics.views.available_datasets_view import AvailableDatasetsView
 from tapir.statistics.views.dataset_export_view import DatasetExportView
-from tapir.statistics.views.fancy_export.base_view import (
-    AvailableColumnsView,
-    FancyExportView,
-)
+from tapir.statistics.views.fancy_export_view import FancyExportView
+from tapir.statistics.views.fancy_graph_view import FancyGraphView
 
 app_name = "statistics"
 urlpatterns = [
@@ -54,7 +51,7 @@
     ),
     path(
         "fancy_graph",
-        fancy_graph.base_view.FancyGraphView.as_view(),
+        FancyGraphView.as_view(),
         name="fancy_graph",
     ),
     path(
diff --git a/tapir/statistics/views/available_columns_view.py b/tapir/statistics/views/available_columns_view.py
new file mode 100644
index 000000000..477451352
--- /dev/null
+++ b/tapir/statistics/views/available_columns_view.py
@@ -0,0 +1,26 @@
+from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
+from drf_spectacular.utils import extend_schema
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+from tapir.settings import PERMISSION_COOP_MANAGE
+from tapir.statistics.serializers import ColumnSerializer, DatapointExportSerializer
+
+
+class AvailableColumnsView(LoginRequiredMixin, PermissionRequiredMixin, APIView):
+    permission_required = PERMISSION_COOP_MANAGE
+
+    @extend_schema(
+        responses={200: ColumnSerializer(many=True)},
+    )
+    def get(self, request):
+        objects = [
+            {"column_name": column_name}
+            for column_name in DatapointExportSerializer().get_fields().keys()
+        ]
+
+        return Response(
+            ColumnSerializer(objects, many=True).data,
+            status=status.HTTP_200_OK,
+        )
diff --git a/tapir/statistics/views/fancy_graph/base_view.py b/tapir/statistics/views/dataset_graph_point_view.py
similarity index 61%
rename from tapir/statistics/views/fancy_graph/base_view.py
rename to tapir/statistics/views/dataset_graph_point_view.py
index ef60b2436..e0beae0b2 100644
--- a/tapir/statistics/views/fancy_graph/base_view.py
+++ b/tapir/statistics/views/dataset_graph_point_view.py
@@ -1,56 +1,51 @@
 import datetime
-from abc import ABC, abstractmethod
+from abc import ABC
+from typing import Type
 
 from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
-from django.db.models import QuerySet
 from django.utils import timezone
-from django.views import generic
 from drf_spectacular.utils import extend_schema, OpenApiParameter
 from rest_framework import status
 from rest_framework.response import Response
 from rest_framework.views import APIView
 
-from tapir.coop.models import ShareOwner
 from tapir.settings import PERMISSION_COOP_MANAGE
 from tapir.statistics.models import FancyGraphCache
+from tapir.statistics.services.data_providers.base_data_provider import (
+    data_providers,
+    BaseDataProvider,
+)
 
 
-class FancyGraphView(LoginRequiredMixin, PermissionRequiredMixin, generic.TemplateView):
+class DatasetGraphPointView(LoginRequiredMixin, PermissionRequiredMixin, APIView, ABC):
     permission_required = PERMISSION_COOP_MANAGE
-    template_name = "statistics/fancy_graph.html"
 
-    def get_context_data(self, **kwargs):
-        context_data = super().get_context_data(**kwargs)
-
-        return context_data
-
-
-class DatapointView(LoginRequiredMixin, PermissionRequiredMixin, APIView, ABC):
-    permission_required = PERMISSION_COOP_MANAGE
-
-    @abstractmethod
-    def get_queryset(self, reference_time: datetime.datetime) -> QuerySet[ShareOwner]:
-        pass
-
-    def calculate_datapoint(self, reference_time: datetime.datetime) -> int:
-        return self.get_queryset(reference_time).distinct().count()
-
-    def get_datapoint(self, reference_time: datetime.datetime):
+    @staticmethod
+    def calculate_datapoint(
+        data_provider: Type[BaseDataProvider], reference_time: datetime.datetime
+    ) -> int:
+        return data_provider.get_queryset(reference_time).distinct().count()
+
+    def get_datapoint(
+        self, data_provider: Type[BaseDataProvider], reference_time: datetime.datetime
+    ):
         reference_date = reference_time.date()
-        view_name = f"{self.__class__.__module__}.{self.__class__.__name__}"
+        data_provider_name = (
+            f"{data_provider.__class__.__module__}.{data_provider.__class__.__name__}"
+        )
 
         if reference_date < timezone.now().date():
             # Only use the cache for dates in the past:
             # someone may make changes and check the results on the graph on the same day.
             cached_value = FancyGraphCache.objects.filter(
-                view_name=view_name, date=reference_date
+                data_provider_name=data_provider_name, date=reference_date
             ).first()
             if cached_value:
                 return cached_value.value
 
-        value = self.calculate_datapoint(reference_time)
+        value = self.calculate_datapoint(data_provider, reference_time)
         FancyGraphCache.objects.create(
-            view_name=view_name, date=reference_date, value=value
+            data_provider_name=data_provider_name, date=reference_date, value=value
         )
         return value
 
@@ -70,19 +65,24 @@ def transfer_attributes(source, target, attributes):
         parameters=[
             OpenApiParameter(name="at_date", required=True, type=datetime.date),
             OpenApiParameter(name="relative", required=True, type=bool),
+            OpenApiParameter(name="dataset", required=True, type=str),
         ],
     )
     def get(self, request):
         reference_time = self.get_reference_time(request)
         relative = request.query_params.get("relative") == "true"
 
-        result = self.get_datapoint(reference_time)
+        data_provider = data_providers[request.query_params.get("dataset")]
+
+        result = self.get_datapoint(data_provider, reference_time)
 
         if relative:
             previous_datapoint_time = (
                 reference_time - datetime.timedelta(days=1)
             ).replace(day=1)
-            previous_datapoint = self.get_datapoint(previous_datapoint_time)
+            previous_datapoint = self.get_datapoint(
+                data_provider, previous_datapoint_time
+            )
             result = result - previous_datapoint
 
         return Response(
diff --git a/tapir/statistics/views/fancy_export/__init__.py b/tapir/statistics/views/fancy_export/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/tapir/statistics/views/fancy_export/base_view.py b/tapir/statistics/views/fancy_export/base_view.py
deleted file mode 100644
index ccd7e1b4a..000000000
--- a/tapir/statistics/views/fancy_export/base_view.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import datetime
-from abc import ABC
-from typing import List
-
-from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
-from django.views import generic
-from drf_spectacular.utils import extend_schema, OpenApiParameter
-from rest_framework import status
-from rest_framework.response import Response
-from rest_framework.views import APIView
-
-from tapir.coop.models import ShareOwner
-from tapir.settings import PERMISSION_COOP_MANAGE
-from tapir.statistics.serializers import DatapointExportSerializer, ColumnSerializer
-from tapir.statistics.services.datapoint_export_column_builder import (
-    DatapointExportColumnBuilder,
-)
-from tapir.statistics.views.fancy_graph.base_view import DatapointView
-
-
-class FancyExportView(
-    LoginRequiredMixin, PermissionRequiredMixin, generic.TemplateView
-):
-    permission_required = PERMISSION_COOP_MANAGE
-    template_name = "statistics/fancy_export.html"
-
-    def get_context_data(self, **kwargs):
-        context_data = super().get_context_data(**kwargs)
-
-        return context_data
-
-
-class DatapointExportView(DatapointView, ABC):
-    @extend_schema(
-        responses={200: DatapointExportSerializer(many=True)},
-        parameters=[
-            OpenApiParameter(name="at_date", required=True, type=datetime.date),
-            OpenApiParameter(name="export_columns", required=True, type=str, many=True),
-        ],
-    )
-    def get(self, request):
-        reference_time = self.get_reference_time(request)
-        export_columns = request.query_params.getlist("export_columns")
-
-        queryset = self.get_queryset(reference_time).distinct().order_by("id")
-
-        return Response(
-            DatapointExportSerializer(
-                self.build_serializer_data(queryset, export_columns, reference_time),
-                many=True,
-            ).data,
-            status=status.HTTP_200_OK,
-        )
-
-    def build_serializer_data(
-        self, queryset, export_columns: List[str], reference_time: datetime.datetime
-    ):
-        return [
-            self.build_single_entry_data(share_owner, export_columns, reference_time)
-            for share_owner in queryset
-        ]
-
-    def build_single_entry_data(
-        self,
-        share_owner: ShareOwner,
-        export_columns: List[str],
-        reference_time: datetime.datetime,
-    ):
-        return {
-            column_name: self.build_column_data(
-                share_owner, column_name, reference_time
-            )
-            for column_name in export_columns
-        }
-
-    @staticmethod
-    def build_column_data(
-        share_owner: ShareOwner, column_name: str, reference_time: datetime.datetime
-    ):
-        function_name = f"build_column_{column_name}"
-        return getattr(DatapointExportColumnBuilder, function_name)(
-            share_owner=share_owner, reference_time=reference_time
-        )
-
-
-class AvailableColumnsView(LoginRequiredMixin, PermissionRequiredMixin, APIView):
-    permission_required = PERMISSION_COOP_MANAGE
-
-    @extend_schema(
-        responses={200: ColumnSerializer(many=True)},
-    )
-    def get(self, request):
-        objects = [
-            {"column_name": column_name}
-            for column_name in DatapointExportSerializer().get_fields().keys()
-        ]
-
-        return Response(
-            ColumnSerializer(objects, many=True).data,
-            status=status.HTTP_200_OK,
-        )
diff --git a/tapir/statistics/views/fancy_export_view.py b/tapir/statistics/views/fancy_export_view.py
new file mode 100644
index 000000000..b492eeb70
--- /dev/null
+++ b/tapir/statistics/views/fancy_export_view.py
@@ -0,0 +1,16 @@
+from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
+from django.views import generic
+
+from tapir.settings import PERMISSION_COOP_MANAGE
+
+
+class FancyExportView(
+    LoginRequiredMixin, PermissionRequiredMixin, generic.TemplateView
+):
+    permission_required = PERMISSION_COOP_MANAGE
+    template_name = "statistics/fancy_export.html"
+
+    def get_context_data(self, **kwargs):
+        context_data = super().get_context_data(**kwargs)
+
+        return context_data
diff --git a/tapir/statistics/views/fancy_graph/__init__.py b/tapir/statistics/views/fancy_graph/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/tapir/statistics/views/fancy_graph_view.py b/tapir/statistics/views/fancy_graph_view.py
new file mode 100644
index 000000000..8d21583ae
--- /dev/null
+++ b/tapir/statistics/views/fancy_graph_view.py
@@ -0,0 +1,14 @@
+from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
+from django.views import generic
+
+from tapir.settings import PERMISSION_COOP_MANAGE
+
+
+class FancyGraphView(LoginRequiredMixin, PermissionRequiredMixin, generic.TemplateView):
+    permission_required = PERMISSION_COOP_MANAGE
+    template_name = "statistics/fancy_graph.html"
+
+    def get_context_data(self, **kwargs):
+        context_data = super().get_context_data(**kwargs)
+
+        return context_data