Skip to content

Commit f656b2d

Browse files
JonasBashashjar
authored andcommitted
api: refactor POST chunk-upload to fail early (#102536)
This PR refactors the chunk upload endpoint to sequentially upload files being uploaded and fail early in case any of the validation rules are broken.
1 parent f969fac commit f656b2d

File tree

1 file changed

+29
-19
lines changed

1 file changed

+29
-19
lines changed

src/sentry/api/endpoints/chunk.py

Lines changed: 29 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -183,36 +183,37 @@ def post(self, request: Request, organization) -> Response:
183183
logger.info("chunkupload.start")
184184

185185
files = []
186-
if request.FILES:
187-
files = request.FILES.getlist("file")
188-
files += [GzipChunk(chunk) for chunk in request.FILES.getlist("file_gzip")]
186+
checksums = []
187+
total_size = 0
188+
189+
# Validate if chunks exceed the maximum chunk limit before attempting to decompress them.
190+
num_files = len(request.FILES.getlist("file")) + len(request.FILES.getlist("file_gzip"))
191+
192+
if num_files > MAX_CHUNKS_PER_REQUEST:
193+
logger.info("chunkupload.end", extra={"status": status.HTTP_400_BAD_REQUEST})
194+
return Response({"error": "Too many chunks"}, status=status.HTTP_400_BAD_REQUEST)
189195

190-
if len(files) == 0:
191-
# No files uploaded is ok
196+
# No files uploaded is ok
197+
if num_files == 0:
192198
logger.info("chunkupload.end", extra={"status": status.HTTP_200_OK})
193199
return Response(status=status.HTTP_200_OK)
194200

195-
logger.info("chunkupload.post.files", extra={"len": len(files)})
196-
197-
# Validate file size
198-
checksums = []
199-
size = 0
200-
for chunk in files:
201-
size += chunk.size
201+
for chunk, name in get_files(request):
202202
if chunk.size > settings.SENTRY_CHUNK_UPLOAD_BLOB_SIZE:
203203
logger.info("chunkupload.end", extra={"status": status.HTTP_400_BAD_REQUEST})
204204
return Response(
205205
{"error": "Chunk size too large"}, status=status.HTTP_400_BAD_REQUEST
206206
)
207-
checksums.append(chunk.name)
208207

209-
if size > MAX_REQUEST_SIZE:
210-
logger.info("chunkupload.end", extra={"status": status.HTTP_400_BAD_REQUEST})
211-
return Response({"error": "Request too large"}, status=status.HTTP_400_BAD_REQUEST)
208+
total_size += chunk.size
209+
if total_size > MAX_REQUEST_SIZE:
210+
logger.info("chunkupload.end", extra={"status": status.HTTP_400_BAD_REQUEST})
211+
return Response({"error": "Request too large"}, status=status.HTTP_400_BAD_REQUEST)
212212

213-
if len(files) > MAX_CHUNKS_PER_REQUEST:
214-
logger.info("chunkupload.end", extra={"status": status.HTTP_400_BAD_REQUEST})
215-
return Response({"error": "Too many chunks"}, status=status.HTTP_400_BAD_REQUEST)
213+
files.append(chunk)
214+
checksums.append(name)
215+
216+
logger.info("chunkupload.post.files", extra={"len": len(files)})
216217

217218
try:
218219
FileBlob.from_files(zip(files, checksums), organization=organization, logger=logger)
@@ -222,3 +223,12 @@ def post(self, request: Request, organization) -> Response:
222223

223224
logger.info("chunkupload.end", extra={"status": status.HTTP_200_OK})
224225
return Response(status=status.HTTP_200_OK)
226+
227+
228+
def get_files(request: Request):
229+
for chunk in request.FILES.getlist("file"):
230+
yield chunk, chunk.name
231+
232+
for chunk in request.FILES.getlist("file_gzip"):
233+
decompressed_chunk = GzipChunk(chunk)
234+
yield decompressed_chunk, chunk.name

0 commit comments

Comments
 (0)