diff --git a/products/error_tracking/backend/api/error_tracking.py b/products/error_tracking/backend/api/error_tracking.py index c45c5668bdc4b..fb225d83dc680 100644 --- a/products/error_tracking/backend/api/error_tracking.py +++ b/products/error_tracking/backend/api/error_tracking.py @@ -1016,6 +1016,8 @@ def finish_upload(self, request, **kwargs): @action(methods=["POST"], detail=False, parser_classes=[JSONParser]) def bulk_start_upload(self, request, **kwargs): + if request.user.pk: + posthoganalytics.identify_context(request.user.pk) # Earlier ones send a list of chunk IDs, all associated with one release # Extract a list of chunk IDs from the request json chunk_ids: list[str] = request.data.get("chunk_ids") or [] @@ -1024,7 +1026,6 @@ def bulk_start_upload(self, request, **kwargs): posthoganalytics.capture( "error_tracking_symbol_set_upload_started", - distinct_id=request.user.pk, properties={"team_id": self.team.id, "endpoint": "bulk_start_upload"}, groups=groups(self.team.organization, self.team), ) @@ -1049,6 +1050,8 @@ def bulk_start_upload(self, request, **kwargs): @action(methods=["POST"], detail=False, parser_classes=[JSONParser]) def bulk_finish_upload(self, request, **kwargs): + if request.user.pk: + posthoganalytics.identify_context(request.user.pk) # Get the map of symbol_set_id:content_hashes content_hashes = request.data.get("content_hashes", {}) if content_hashes is None: @@ -1107,7 +1110,6 @@ def bulk_finish_upload(self, request, **kwargs): posthoganalytics.capture( "error_tracking_symbol_set_uploaded", - distinct_id=request.user.pk, groups=groups(self.team.organization, self.team), )