Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -189,4 +189,13 @@ cache
*.iml

# MacOS Finder
**/.DS_Store
**/.DS_Store

# Local benchmarks
out_benchmark.json
out_storage.json
local_deployment.json
experiments.json

# Editors
.vscode
6 changes: 6 additions & 0 deletions benchmarks/000.microbenchmarks/010.sleep/bun/function.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
const timer = ms => new Promise( res => setTimeout(res, ms));

exports.handler = async function(event) {
var sleep = event.sleep;
return timer(sleep*1000);
};
9 changes: 9 additions & 0 deletions benchmarks/000.microbenchmarks/010.sleep/bun/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"name": "",
"version": "1.0.0",
"description": "",
"author": "",
"license": "",
"dependencies": {
}
}
6 changes: 5 additions & 1 deletion benchmarks/000.microbenchmarks/010.sleep/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
{
"timeout": 120,
"memory": 128,
"languages": ["python", "nodejs"],
"languages": [
"python",
"nodejs",
"bun"
],
"modules": []
}
29 changes: 29 additions & 0 deletions benchmarks/100.webapps/110.dynamic-html/bun/function.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
const Mustache = require('mustache'),
fs = require('fs'),
path = require('path');

function random(b, e) {
return Math.round(Math.random() * (e - b) + b);
}

exports.handler = async function(event) {
var random_numbers = new Array(event.random_len);
for(var i = 0; i < event.random_len; ++i) {
random_numbers[i] = random(0, 100);
}
var input = {
cur_time: new Date().toLocaleString(),
username: event.username,
random_numbers: random_numbers
};

var file = path.resolve(__dirname, 'templates', 'template.html');
return new Promise((resolve, reject) => {
fs.readFile(file, "utf-8",
function(err, data) {
if(err) reject(err);
resolve(Mustache.render(data, input));
}
);
});
};
10 changes: 10 additions & 0 deletions benchmarks/100.webapps/110.dynamic-html/bun/init.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/bash

DIR=$1
VERBOSE=$2
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
path="${SCRIPT_DIR}/templates/"
if [ "$VERBOSE" = true ]; then
echo "Update ${DIR} with static templates ${path}"
fi
cp -r ${SCRIPT_DIR}/templates ${DIR}
10 changes: 10 additions & 0 deletions benchmarks/100.webapps/110.dynamic-html/bun/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"name": "",
"version": "1.0.0",
"description": "",
"author": "",
"license": "",
"dependencies": {
"mustache": "^3.2.1"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
<!DOCTYPE html>
<html>
<head>
<title>Randomly generated data.</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link href="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css" rel="stylesheet" media="screen">
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Use HTTPS for Bootstrap CDN and consider updating Bootstrap version.

The template has two issues:

  1. Using http:// instead of https:// for the Bootstrap CDN creates a mixed content vulnerability and will trigger browser warnings when the page is served over HTTPS.
  2. Bootstrap 3.0.0 is from 2013 and has known security vulnerabilities.
🔒 Proposed fix
-    <link href="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css" rel="stylesheet" media="screen">
+    <link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-HSMxcRTRxnN+Bdg0JdbxYKrThecOKuH5zCYotlSAcp1+c8xmyTe9GYg1l9a69psu" crossorigin="anonymous">

Or update to Bootstrap 5 (latest stable):

-    <link href="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css" rel="stylesheet" media="screen">
+    <link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-9ndCyUaIbzAi2FUVXJi0CjmCapSmO7SnpJef0486qhLnuZ2cdeRhO02iuK6FUUVM" crossorigin="anonymous">
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
<link href="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css" rel="stylesheet" media="screen">
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-HSMxcRTRxnN+Bdg0JdbxYKrThecOKuH5zCYotlSAcp1+c8xmyTe9GYg1l9a69psu" crossorigin="anonymous">
Suggested change
<link href="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css" rel="stylesheet" media="screen">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-9ndCyUaIbzAi2FUVXJi0CjmCapSmO7SnpJef0486qhLnuZ2cdeRhO02iuK6FUUVM" crossorigin="anonymous">
🤖 Prompt for AI Agents
In @benchmarks/100.webapps/110.dynamic-html/bun/templates/template.html at line
6, The link tag in template.html uses an insecure HTTP Bootstrap CDN and an
outdated version; update the href on the <link> element to use HTTPS (https://)
and replace the v3.0.0 CDN URL with a current, supported Bootstrap release (for
example a Bootstrap 5 CDN URL) to eliminate mixed-content warnings and address
known vulnerabilities.

<style type="text/css">
.container {
max-width: 500px;
padding-top: 100px;
}
</style>
</head>
<body>
<div class="container">
<p>Welcome {{username}}!</p>
<p>Data generated at: {{cur_time}}!</p>
<p>Requested random numbers:</p>
<ul>
{{#random_numbers}}
<li>{{.}}</li>
{{/random_numbers}}
</ul>
</div>
</body>
</html>
6 changes: 5 additions & 1 deletion benchmarks/100.webapps/110.dynamic-html/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
{
"timeout": 10,
"memory": 128,
"languages": ["python", "nodejs"],
"languages": [
"python",
"nodejs",
"bun"
],
"modules": []
}
36 changes: 36 additions & 0 deletions benchmarks/100.webapps/120.uploader/bun/function.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
const fs = require('fs'),
path = require('path'),
request = require('request'),
storage = require('./storage');

let storage_handler = new storage.storage();

function streamToPromise(stream) {
return new Promise(function(resolve, reject) {
stream.on("close", () => {
resolve();
});
stream.on("error", reject);
})
}

exports.handler = async function(event) {
let bucket = event.bucket.bucket
let output_prefix = event.bucket.output
let url = event.object.url
let upload_key = path.basename(url)
let download_path = path.join('/tmp', upload_key)

var file = fs.createWriteStream(download_path);
request(url).pipe(file);
let promise = streamToPromise(file);
Comment on lines +24 to +26
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Add error handling for HTTP request.

The request() call can fail due to network errors, invalid URLs, or HTTP errors, but there's no error handler attached to the request stream. This could lead to unhandled promise rejections.

🛡️ Proposed fix
  var file = fs.createWriteStream(download_path);
- request(url).pipe(file);
+ const req = request(url);
+ req.on('error', (err) => {
+   file.destroy();
+   throw new Error(`Download failed: ${err.message}`);
+ });
+ req.pipe(file);
  let promise = streamToPromise(file);
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
var file = fs.createWriteStream(download_path);
request(url).pipe(file);
let promise = streamToPromise(file);
var file = fs.createWriteStream(download_path);
const req = request(url);
req.on('error', (err) => {
file.destroy();
throw new Error(`Download failed: ${err.message}`);
});
req.pipe(file);
let promise = streamToPromise(file);
🤖 Prompt for AI Agents
In @benchmarks/100.webapps/120.uploader/bun/function.js around lines 24 - 26,
The HTTP request stream created by request(url) is missing error handling which
can cause unhandled rejections; attach 'error' listeners to the request stream
and the file stream used by fs.createWriteStream(download_path) and ensure
streamToPromise(file) is rejected on those errors (e.g., add request.on('error',
...) and file.on('error', ...) and propagate/abort the other stream as needed);
also handle non-2xx HTTP responses from the request stream (check 'response' or
statusCode on the request) and reject the promise with a clear error so callers
of streamToPromise(file) get proper failure information.

var keyName;
let upload = promise.then(
async () => {
[keyName, promise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
await promise;
}
);
await upload;
return {bucket: bucket, url: url, key: keyName}
Comment on lines +27 to +35
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Initialize keyName or handle undefined case.

keyName is declared on line 27 but only assigned inside the promise callback on line 30. If an error occurs during the upload, keyName will be undefined when returned on line 35, which could cause issues downstream.

🔧 Proposed fix

Option 1: Initialize with a default value:

- var keyName;
+ var keyName = null;
  let upload = promise.then(
    async () => {
      [keyName, promise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
      await promise;
    }
  );
  await upload;
+ if (!keyName) {
+   throw new Error('Upload failed: key name not set');
+ }
  return {bucket: bucket, url: url, key: keyName}

Option 2: Restructure to avoid the issue:

- var keyName;
- let upload = promise.then(
-   async () => {
-     [keyName, promise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
-     await promise;
-   }
- );
- await upload;
- return {bucket: bucket, url: url, key: keyName}
+ await promise;
+ const [keyName, uploadPromise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
+ await uploadPromise;
+ return {bucket: bucket, url: url, key: keyName}
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
var keyName;
let upload = promise.then(
async () => {
[keyName, promise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
await promise;
}
);
await upload;
return {bucket: bucket, url: url, key: keyName}
var keyName = null;
let upload = promise.then(
async () => {
[keyName, promise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
await promise;
}
);
await upload;
if (!keyName) {
throw new Error('Upload failed: key name not set');
}
return {bucket: bucket, url: url, key: keyName}
Suggested change
var keyName;
let upload = promise.then(
async () => {
[keyName, promise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
await promise;
}
);
await upload;
return {bucket: bucket, url: url, key: keyName}
await promise;
const [keyName, uploadPromise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
await uploadPromise;
return {bucket: bucket, url: url, key: keyName}
🤖 Prompt for AI Agents
In @benchmarks/100.webapps/120.uploader/bun/function.js around lines 27 - 35,
keyName is declared but only set inside the async promise callback (via
storage_handler.upload), so if the upload fails you return undefined; initialize
keyName to a safe default (e.g., null or empty string) or restructure to capture
the upload result directly: call const [resultKey, uploadPromise] =
storage_handler.upload(bucket, path.join(output_prefix, upload_key),
download_path); await uploadPromise; then set keyName = resultKey (or wrap in
try/catch to handle errors and set a fallback) before returning {bucket, url,
key: keyName}; ensure references to storage_handler.upload, keyName,
upload/promise are updated accordingly.

};
10 changes: 10 additions & 0 deletions benchmarks/100.webapps/120.uploader/bun/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"name": "",
"version": "1.0.0",
"description": "",
"author": "",
"license": "",
"dependencies": {
"request": "^2.88.0"
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

🧩 Analysis chain

🌐 Web query:

npm request package deprecation status 2020

💡 Result:

Short answer: The npm package request was officially deprecated in February 2020 (deprecation message added on Feb 11, 2020). See the package pages and the maintainers' deprecation discussion (GitHub issue #3142). [1][2][3]

Sources

  • request package on npm (deprecation message). [1]
  • GitHub issue "Request’s Past, Present and Future" (issue #3142 discussing deprecation/maintenance). [2]
  • Coverage/reporting of the deprecation (Feb 2020). [3]

🏁 Script executed:

# Check the context of this package.json and see how request is used
find benchmarks/100.webapps/120.uploader/bun -type f -name "*.js" -o -name "*.ts" | head -20

Repository: spcl/serverless-benchmarks

Length of output: 121


🏁 Script executed:

# Search for usage of request package in the uploader code
rg "require.*request|import.*request" benchmarks/100.webapps/120.uploader/bun/ 2>/dev/null || echo "No direct request imports found"

Repository: spcl/serverless-benchmarks

Length of output: 157


🏁 Script executed:

# Check the full package.json to understand the context
cat benchmarks/100.webapps/120.uploader/bun/package.json

Repository: spcl/serverless-benchmarks

Length of output: 212


🏁 Script executed:

cat benchmarks/100.webapps/120.uploader/bun/function.js

Repository: spcl/serverless-benchmarks

Length of output: 1057


Replace deprecated request package with native fetch API.

The request package was officially deprecated in February 2020 and is no longer maintained, creating security risks from unpatched vulnerabilities. The code uses it simply to download and pipe a file: request(url).pipe(file), which is easily replaceable with Bun's native fetch API.

♻️ Replace with Bun's native fetch
  const fs = require('fs'),
        path = require('path'),
-       request = require('request'),
        storage = require('./storage');

Then update the download:

  var file = fs.createWriteStream(download_path);
- request(url).pipe(file);
+ const response = await fetch(url);
+ response.body.pipe(file);

Remove "request": "^2.88.0" from package.json dependencies.

Committable suggestion skipped: line range outside the PR's diff.

🤖 Prompt for AI Agents
In @benchmarks/100.webapps/120.uploader/bun/package.json at line 8, Remove the
deprecated "request" dependency from package.json and replace any usage of
request(url).pipe(file) with Bun's native fetch streaming: perform fetch(url),
check response.ok, then stream the response body to a writable file stream via
response.body.pipeTo(writableStream) (or use Bun.file/Bun.write APIs to create
the writable stream). Update any code that imports or references the "request"
module to use this fetch-based download flow instead.

}
}
10 changes: 8 additions & 2 deletions benchmarks/100.webapps/120.uploader/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
{
"timeout": 30,
"memory": 128,
"languages": ["python", "nodejs"],
"modules": ["storage"]
"languages": [
"python",
"nodejs",
"bun"
],
"modules": [
"storage"
]
}
28 changes: 28 additions & 0 deletions benchmarks/200.multimedia/210.thumbnailer/bun/function.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
const sharp = require('sharp'),
path = require('path'),
storage = require('./storage');

let storage_handler = new storage.storage();

exports.handler = async function(event) {

bucket = event.bucket.bucket
input_prefix = event.bucket.input
output_prefix = event.bucket.output
let key = event.object.key
width = event.object.width
height = event.object.height
Comment on lines +9 to +14
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Critical: Missing variable declarations create implicit globals.

Variables bucket, input_prefix, output_prefix, width, and height are assigned without const, let, or var declarations. This creates implicit global variables, which can lead to:

  • Unintended side effects and difficult-to-debug issues
  • Pollution of the global scope
  • Potential conflicts in concurrent execution contexts
🐛 Proposed fix
-  bucket = event.bucket.bucket
-  input_prefix = event.bucket.input
-  output_prefix = event.bucket.output
+  const bucket = event.bucket.bucket;
+  const input_prefix = event.bucket.input;
+  const output_prefix = event.bucket.output;
   let key = event.object.key
-  width = event.object.width
-  height = event.object.height
+  const width = event.object.width;
+  const height = event.object.height;
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
bucket = event.bucket.bucket
input_prefix = event.bucket.input
output_prefix = event.bucket.output
let key = event.object.key
width = event.object.width
height = event.object.height
const bucket = event.bucket.bucket;
const input_prefix = event.bucket.input;
const output_prefix = event.bucket.output;
let key = event.object.key
const width = event.object.width;
const height = event.object.height;
🤖 Prompt for AI Agents
In @benchmarks/200.multimedia/210.thumbnailer/bun/function.js around lines 9 -
14, The assignments in the thumbnailer entry (the block that sets bucket =
event.bucket.bucket, input_prefix = event.bucket.input, output_prefix =
event.bucket.output, and width/height from event.object) are missing
declarations and create implicit globals; fix by declaring them with appropriate
keywords (e.g., const bucket = event.bucket.bucket, const input_prefix =
event.bucket.input, const output_prefix = event.bucket.output, and use const or
let for width and height depending on whether they are reassigned) in the same
location (the top of the function handling the event) so they are block-scoped
and do not pollute the global scope.

let pos = key.lastIndexOf('.');
let upload_key = key.substr(0, pos < 0 ? key.length : pos) + '.png';

const sharp_resizer = sharp().resize(width, height).png();
let read_promise = storage_handler.downloadStream(bucket, path.join(input_prefix, key));
let [writeStream, promise, uploadName] = storage_handler.uploadStream(bucket, path.join(output_prefix, upload_key));
read_promise.then(
(input_stream) => {
input_stream.pipe(sharp_resizer).pipe(writeStream);
}
);
await promise;
return {bucket: output_prefix, key: uploadName}
};
10 changes: 10 additions & 0 deletions benchmarks/200.multimedia/210.thumbnailer/bun/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"name": "",
"version": "1.0.0",
"description": "",
"author": "",
"license": "",
"dependencies": {
"sharp": "0.32.6"
}
Comment on lines +7 to +9
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

🧩 Analysis chain

🌐 Web query:

sharp version 0.32.6 Bun compatibility native dependencies

💡 Result:

Short answer: [email protected] can work in Bun, but it depends on Bun running sharp’s native install steps (prebuilt libvips or a native build). Bun has strong Node-API/N‑API support and now reports a Node version compatible with recent prebuilt binaries, and Bun’s package manager will run known postinstall scripts (including optimizations for sharp) or can be told to trust the package. If Bun does not run the lifecycle script, sharp’s install will fail and you’ll need to allow the lifecycle script or build libvips locally.

What to do

  • Try: bun add sharp (or bun install). Bun includes optimizations for popular packages (sharp) so prebuilt libvips is often downloaded automatically. [1][2]
  • If install fails with missing prebuilt binaries, add "sharp" to package.json trustedDependencies (or run bun pm trust sharp) so Bun runs the package lifecycle (postinstall) that fetches/copies libvips. [2]
  • If you must build from source, sharp requires a C++17 toolchain, node-gyp/node-addon-api and libvips — be prepared to install those build deps. [1]
  • Note occasional edge cases: users have reported install/path errors with [email protected] when custom binary hosts or nonstandard installs are used. If you hit that, inspect the install logs and either provide a reachable sharp_dist_base_url or build locally. [4][1]

Why this works

  • sharp ships a Node-API native addon plus prebuilt libvips binaries; Bun has substantially improved Node-API/native-addon compatibility and reports a recent Node version (helpful for prebuilt binary selection). [1][3]

Sources

  • sharp installation docs (prebuilt libvips, build-from-source requirements). [1]
  • Bun package-manager lifecycle / trustedDependencies docs (how Bun runs or blocks dependency lifecycle scripts; optimizations for popular packages). [2]
  • Bun release notes about Node-API / reported Node version improvements. [3]
  • Example GitHub issue showing [email protected] install errors in practice (illustrates failure modes). [4]

Ensure sharp's postinstall script runs in Bun.

The sharp library (version 0.32.6) requires postinstall scripts to download or build prebuilt libvips binaries. Bun may need explicit configuration: either add "sharp" to trustedDependencies in package.json or run bun pm trust sharp to allow the lifecycle script to execute. If this is not done, the installation may fail. Additionally, there are occasional edge cases reported with [email protected] (e.g., custom binary hosts or nonstandard setups); if issues arise, check the install logs and ensure sharp_dist_base_url is correctly configured or build locally.

🤖 Prompt for AI Agents
In @benchmarks/200.multimedia/210.thumbnailer/bun/package.json around lines 7 -
9, The sharp dependency ("sharp": "0.32.6") needs Bun to allow its postinstall
lifecycle script; either add "sharp" to package.json's trustedDependencies array
(so Bun runs postinstall) or instruct the environment to run `bun pm trust
sharp` before install, and if installation still fails check install logs and
configure sharp_dist_base_url or build libvips locally for custom binary hosts;
update package.json or CI setup accordingly to ensure sharp's postinstall
executes under Bun.

}
10 changes: 8 additions & 2 deletions benchmarks/200.multimedia/210.thumbnailer/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
{
"timeout": 60,
"memory": 256,
"languages": ["python", "nodejs"],
"modules": ["storage"]
"languages": [
"python",
"nodejs",
"bun"
],
"modules": [
"storage"
]
}
47 changes: 47 additions & 0 deletions benchmarks/wrappers/aws/bun/handler.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@

const path = require('path'), fs = require('fs');

function process_output(data, http_trigger) {
if(http_trigger)
return JSON.stringify(data);
else
return data;
}

exports.handler = async function(event, context) {
var begin = Date.now()/1000;
var start = process.hrtime();
var http_trigger = "body" in event;
var input_data = http_trigger ? JSON.parse(event.body) : event
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Add error handling for JSON parsing.

JSON.parse can throw a SyntaxError if event.body contains malformed JSON. This would cause the handler to fail without returning a proper error response.

🛡️ Proposed fix
-  var input_data = http_trigger ? JSON.parse(event.body) : event
+  var input_data = http_trigger ? (() => {
+    try {
+      return JSON.parse(event.body);
+    } catch (e) {
+      throw new Error(`Invalid JSON in request body: ${e.message}`);
+    }
+  })() : event
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
var input_data = http_trigger ? JSON.parse(event.body) : event
var input_data = http_trigger ? (() => {
try {
return JSON.parse(event.body);
} catch (e) {
throw new Error(`Invalid JSON in request body: ${e.message}`);
}
})() : event
🤖 Prompt for AI Agents
In @benchmarks/wrappers/aws/bun/handler.js at line 15, Wrap the JSON.parse call
that assigns input_data when http_trigger is true in a try/catch to handle
malformed JSON; catch the SyntaxError from parsing event.body, log or record the
parsing error, and return a proper HTTP 400/Bad Request response (or set an
appropriate error payload) instead of letting the handler throw. Specifically,
update the code around the input_data assignment (the http_trigger conditional
that uses JSON.parse(event.body)) to try parsing, catch errors, and handle them
with a clear error response and logging.

var func = require('./function/function')
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion | 🟠 Major

Move require statement to module level.

Loading the function module inside the handler adds unnecessary overhead to every invocation. In AWS Lambda, the module scope is preserved between invocations in warm containers, so requiring at the top level improves performance.

⚡ Proposed refactor

Move the require to the top of the file:

 const path = require('path'), fs = require('fs');
+const func = require('./function/function');

 function process_output(data, http_trigger) {
   if(http_trigger)
     return JSON.stringify(data);
   else
     return data;
 }

 exports.handler = async function(event, context) {
   var begin = Date.now()/1000;
   var start = process.hrtime();
   var http_trigger = "body" in event;
   var input_data = http_trigger ? JSON.parse(event.body) : event
-  var func = require('./function/function')
   var ret = func.handler(input_data);
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
var func = require('./function/function')
const path = require('path'), fs = require('fs');
const func = require('./function/function');
function process_output(data, http_trigger) {
if(http_trigger)
return JSON.stringify(data);
else
return data;
}
exports.handler = async function(event, context) {
var begin = Date.now()/1000;
var start = process.hrtime();
var http_trigger = "body" in event;
var input_data = http_trigger ? JSON.parse(event.body) : event
var ret = func.handler(input_data);
🤖 Prompt for AI Agents
In @benchmarks/wrappers/aws/bun/handler.js at line 16, The
require('./function/function') call is inside the handler and causes
per-invocation overhead; move it to module scope by hoisting the var func =
require('./function/function') to the top of the file (outside the exported
handler function) so the module is loaded once and reused across warm AWS Lambda
invocations, leaving the handler (e.g., the exported function that currently
references func) unchanged.

var ret = func.handler(input_data);
return ret.then(
(result) => {
var elapsed = process.hrtime(start);
var end = Date.now()/1000;
var micro = elapsed[1] / 1e3 + elapsed[0] * 1e6;

var is_cold = false;
var fname = path.join('/tmp','cold_run');
if(!fs.existsSync(fname)) {
is_cold = true;
fs.closeSync(fs.openSync(fname, 'w'));
}
return {
statusCode: 200,
body: process_output({
begin: begin,
end: end,
compute_time: micro,
results_time: 0,
result: {output: result},
is_cold: is_cold,
request_id: context.awsRequestId
}, http_trigger)
};
},
(error) => {
throw(error);
}
);
}
40 changes: 40 additions & 0 deletions benchmarks/wrappers/aws/bun/runtime.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/**
* Custom runtime while loop for AWS lambda.
* Listens for function events, executes handler, and returns results.
*
* ENV variables based on https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html#configuration-envvars-runtime
* API endpoints based on https://docs.aws.amazon.com/lambda/latest/dg/runtimes-api.html
*/

import { handler } from "./handler.js";

const RUNTIME_API = process.env.AWS_LAMBDA_RUNTIME_API;
const API_BASE = `http://${RUNTIME_API}/2018-06-01/runtime`;

while (true) {
const nextResponse = await fetch(`${API_BASE}/invocation/next`);
const event = await nextResponse.json();
const requestId = nextResponse.headers.get("Lambda-Runtime-Aws-Request-Id");
Comment on lines +15 to +17
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Add error handling for Lambda API invocation fetch.

The fetch to retrieve the next invocation lacks error handling. Network failures or API errors will crash the runtime loop, causing the Lambda function to fail permanently.

🛡️ Proposed fix with error handling
-  const nextResponse = await fetch(`${API_BASE}/invocation/next`);
-  const event = await nextResponse.json();
-  const requestId = nextResponse.headers.get("Lambda-Runtime-Aws-Request-Id");
+  let nextResponse, event, requestId;
+  try {
+    nextResponse = await fetch(`${API_BASE}/invocation/next`);
+    event = await nextResponse.json();
+    requestId = nextResponse.headers.get("Lambda-Runtime-Aws-Request-Id");
+  } catch (error) {
+    console.error("Failed to fetch next invocation:", error);
+    // Brief delay before retrying to avoid tight loop on persistent errors
+    await new Promise(resolve => setTimeout(resolve, 1000));
+    continue;
+  }
🤖 Prompt for AI Agents
In @benchmarks/wrappers/aws/bun/runtime.js around lines 15 - 17, The fetch call
to `${API_BASE}/invocation/next` (producing nextResponse, event, requestId)
lacks error handling and can crash the runtime loop; wrap the fetch and
subsequent nextResponse.json()/headers access in a try/catch, detect non-OK HTTP
responses (nextResponse.ok) and throw or handle them, and on any error log the
failure (including the error and request URL), optionally wait/retry or continue
the loop so the runtime doesn't exit; ensure the code that uses event and
requestId only runs when nextResponse was successfully retrieved and parsed.


// NOTE: If more context is needed inside the handler, they can be added here
const context = { awsRequestId: requestId };

try {
const response = await handler(event, context);

await fetch(`${API_BASE}/invocation/${requestId}/response`, {
method: "POST",
body: JSON.stringify(response),
});
} catch (error) {
console.error(error);
await fetch(`${API_BASE}/invocation/${requestId}/error`, {
method: "POST",
body: JSON.stringify({
errorMessage: error.message,
errorType: "Runtime.UserCodeError",
stackTrace: error.stack ? error.stack.split("\n") : [],
}),
});
Comment on lines +25 to +38
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Add error handling for response/error posting.

The fetch calls that post responses and errors lack error handling. If these POST requests fail, the runtime will crash instead of continuing to process subsequent invocations.

🛡️ Proposed fix with error handling
  try {
    const response = await handler(event, context);

-    await fetch(`${API_BASE}/invocation/${requestId}/response`, {
-      method: "POST",
-      body: JSON.stringify(response),
-    });
+    try {
+      await fetch(`${API_BASE}/invocation/${requestId}/response`, {
+        method: "POST",
+        body: JSON.stringify(response),
+      });
+    } catch (postError) {
+      console.error("Failed to post response:", postError);
+    }
  } catch (error) {
    console.error(error);
-    await fetch(`${API_BASE}/invocation/${requestId}/error`, {
-      method: "POST",
-      body: JSON.stringify({
-        errorMessage: error.message,
-        errorType: "Runtime.UserCodeError",
-        stackTrace: error.stack ? error.stack.split("\n") : [],
-      }),
-    });
+    try {
+      await fetch(`${API_BASE}/invocation/${requestId}/error`, {
+        method: "POST",
+        body: JSON.stringify({
+          errorMessage: error.message,
+          errorType: "Runtime.UserCodeError",
+          stackTrace: error.stack ? error.stack.split("\n") : [],
+        }),
+      });
+    } catch (postError) {
+      console.error("Failed to post error:", postError);
+    }
  }
🤖 Prompt for AI Agents
In @benchmarks/wrappers/aws/bun/runtime.js around lines 25 - 38, The POSTs to
`${API_BASE}/invocation/${requestId}/response` and
`${API_BASE}/invocation/${requestId}/error` are awaited without error handling
so any network failure will throw and crash the runtime; wrap each await fetch
that sends the response and the error payload in its own try/catch (or append a
.catch) so failures are logged (include requestId and endpoint/context) but do
not rethrow, ensuring the runtime continues processing subsequent invocations;
reference the API_BASE variable, the requestId, response object and the caught
error to produce clear log messages.

}
}
50 changes: 50 additions & 0 deletions benchmarks/wrappers/aws/bun/storage.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@

const aws = require('aws-sdk'),
fs = require('fs'),
path = require('path'),
uuid = require('uuid'),
util = require('util'),
stream = require('stream');

class aws_storage {

constructor() {
this.S3 = new aws.S3();
}

unique_name(file) {
let name = path.parse(file);
let uuid_name = uuid.v4().split('-')[0];
return path.join(name.dir, util.format('%s.%s%s', name.name, uuid_name, name.ext));
}

upload(bucket, file, filepath) {
var upload_stream = fs.createReadStream(filepath);
let uniqueName = this.unique_name(file);
let params = {Bucket: bucket, Key: uniqueName, Body: upload_stream};
var upload = this.S3.upload(params);
return [uniqueName, upload.promise()];
};

download(bucket, file, filepath) {
var file = fs.createWriteStream(filepath);
this.S3.getObject( {Bucket: bucket, Key: file} ).createReadStream().pipe(file);
};
Comment on lines +29 to +32
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Critical: Variable shadowing breaks the download function.

Line 30 redeclares file as the write stream, shadowing the function parameter that contains the S3 key. When line 31 attempts to use file as the S3 key, it will pass the WriteStream object instead, causing S3 getObject to fail.

🐛 Proposed fix
 download(bucket, file, filepath) {
-  var file = fs.createWriteStream(filepath);
-  this.S3.getObject( {Bucket: bucket, Key: file} ).createReadStream().pipe(file);
+  var writeStream = fs.createWriteStream(filepath);
+  this.S3.getObject( {Bucket: bucket, Key: file} ).createReadStream().pipe(writeStream);
 };
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
download(bucket, file, filepath) {
var file = fs.createWriteStream(filepath);
this.S3.getObject( {Bucket: bucket, Key: file} ).createReadStream().pipe(file);
};
download(bucket, file, filepath) {
var writeStream = fs.createWriteStream(filepath);
this.S3.getObject( {Bucket: bucket, Key: file} ).createReadStream().pipe(writeStream);
};
🧰 Tools
🪛 Biome (2.1.2)

[error] 30-30: Shouldn't redeclare 'file'. Consider to delete it or rename it.

'file' is defined here:

(lint/suspicious/noRedeclare)

🤖 Prompt for AI Agents
In @benchmarks/wrappers/aws/bun/storage.js around lines 29 - 32, The download
function is shadowing the parameter file with a local var file (the write
stream), so this.S3.getObject is receiving the WriteStream instead of the S3
key; rename the local write stream variable (e.g., writeStream or dest) and use
the original file parameter as the Key in the this.S3.getObject call, and prefer
let/const for the stream variable in the download method to avoid future
shadowing.


uploadStream(bucket, file) {
var write_stream = new stream.PassThrough();
let uniqueName = this.unique_name(file);
// putObject won't work correctly for streamed data (length has to be known before)
// https://stackoverflow.com/questions/38442512/difference-between-upload-and-putobject-for-uploading-a-file-to-s3
var upload = this.S3.upload( {Bucket: bucket, Key: uniqueName, Body: write_stream} );
return [write_stream, upload.promise(), uniqueName];
};

// We return a promise to match the API for other providers
downloadStream(bucket, file) {
// AWS.Request -> read stream
let downloaded = this.S3.getObject( {Bucket: bucket, Key: file} ).createReadStream();
return Promise.resolve(downloaded);
};
};
exports.storage = aws_storage;
Loading