diff --git a/.gitignore b/.gitignore
index 274165ed8..e548cc581 100644
--- a/.gitignore
+++ b/.gitignore
@@ -189,4 +189,13 @@ cache
*.iml
# MacOS Finder
-**/.DS_Store
\ No newline at end of file
+**/.DS_Store
+
+# Local benchmarks
+out_benchmark.json
+out_storage.json
+local_deployment.json
+experiments.json
+
+# Editors
+.vscode
diff --git a/benchmarks/000.microbenchmarks/010.sleep/bun/function.js b/benchmarks/000.microbenchmarks/010.sleep/bun/function.js
new file mode 100644
index 000000000..26960a0c5
--- /dev/null
+++ b/benchmarks/000.microbenchmarks/010.sleep/bun/function.js
@@ -0,0 +1,6 @@
+const timer = ms => new Promise( res => setTimeout(res, ms));
+
+exports.handler = async function(event) {
+ var sleep = event.sleep;
+ return timer(sleep*1000);
+};
diff --git a/benchmarks/000.microbenchmarks/010.sleep/bun/package.json b/benchmarks/000.microbenchmarks/010.sleep/bun/package.json
new file mode 100644
index 000000000..967cd8b7a
--- /dev/null
+++ b/benchmarks/000.microbenchmarks/010.sleep/bun/package.json
@@ -0,0 +1,9 @@
+{
+ "name": "",
+ "version": "1.0.0",
+ "description": "",
+ "author": "",
+ "license": "",
+ "dependencies": {
+ }
+}
diff --git a/benchmarks/000.microbenchmarks/010.sleep/config.json b/benchmarks/000.microbenchmarks/010.sleep/config.json
index 93ce2f561..478754526 100644
--- a/benchmarks/000.microbenchmarks/010.sleep/config.json
+++ b/benchmarks/000.microbenchmarks/010.sleep/config.json
@@ -1,6 +1,10 @@
{
"timeout": 120,
"memory": 128,
- "languages": ["python", "nodejs"],
+ "languages": [
+ "python",
+ "nodejs",
+ "bun"
+ ],
"modules": []
}
diff --git a/benchmarks/100.webapps/110.dynamic-html/bun/function.js b/benchmarks/100.webapps/110.dynamic-html/bun/function.js
new file mode 100644
index 000000000..65dd7760e
--- /dev/null
+++ b/benchmarks/100.webapps/110.dynamic-html/bun/function.js
@@ -0,0 +1,29 @@
+const Mustache = require('mustache'),
+ fs = require('fs'),
+ path = require('path');
+
+function random(b, e) {
+ return Math.round(Math.random() * (e - b) + b);
+}
+
+exports.handler = async function(event) {
+ var random_numbers = new Array(event.random_len);
+ for(var i = 0; i < event.random_len; ++i) {
+ random_numbers[i] = random(0, 100);
+ }
+ var input = {
+ cur_time: new Date().toLocaleString(),
+ username: event.username,
+ random_numbers: random_numbers
+ };
+
+ var file = path.resolve(__dirname, 'templates', 'template.html');
+ return new Promise((resolve, reject) => {
+ fs.readFile(file, "utf-8",
+ function(err, data) {
+ if(err) reject(err);
+ resolve(Mustache.render(data, input));
+ }
+ );
+ });
+};
diff --git a/benchmarks/100.webapps/110.dynamic-html/bun/init.sh b/benchmarks/100.webapps/110.dynamic-html/bun/init.sh
new file mode 100755
index 000000000..7b047bff1
--- /dev/null
+++ b/benchmarks/100.webapps/110.dynamic-html/bun/init.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+DIR=$1
+VERBOSE=$2
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+path="${SCRIPT_DIR}/templates/"
+if [ "$VERBOSE" = true ]; then
+ echo "Update ${DIR} with static templates ${path}"
+fi
+cp -r ${SCRIPT_DIR}/templates ${DIR}
diff --git a/benchmarks/100.webapps/110.dynamic-html/bun/package.json b/benchmarks/100.webapps/110.dynamic-html/bun/package.json
new file mode 100644
index 000000000..ad7fad86b
--- /dev/null
+++ b/benchmarks/100.webapps/110.dynamic-html/bun/package.json
@@ -0,0 +1,10 @@
+{
+ "name": "",
+ "version": "1.0.0",
+ "description": "",
+ "author": "",
+ "license": "",
+ "dependencies": {
+ "mustache": "^3.2.1"
+ }
+}
diff --git a/benchmarks/100.webapps/110.dynamic-html/bun/templates/template.html b/benchmarks/100.webapps/110.dynamic-html/bun/templates/template.html
new file mode 100644
index 000000000..46199563c
--- /dev/null
+++ b/benchmarks/100.webapps/110.dynamic-html/bun/templates/template.html
@@ -0,0 +1,26 @@
+
+
+
+ Randomly generated data.
+
+
+
+
+
+
+
Welcome {{username}}!
+
Data generated at: {{cur_time}}!
+
Requested random numbers:
+
+ {{#random_numbers}}
+ - {{.}}
+ {{/random_numbers}}
+
+
+
+
diff --git a/benchmarks/100.webapps/110.dynamic-html/config.json b/benchmarks/100.webapps/110.dynamic-html/config.json
index 25254c247..b8481f442 100644
--- a/benchmarks/100.webapps/110.dynamic-html/config.json
+++ b/benchmarks/100.webapps/110.dynamic-html/config.json
@@ -1,6 +1,10 @@
{
"timeout": 10,
"memory": 128,
- "languages": ["python", "nodejs"],
+ "languages": [
+ "python",
+ "nodejs",
+ "bun"
+ ],
"modules": []
}
diff --git a/benchmarks/100.webapps/120.uploader/bun/function.js b/benchmarks/100.webapps/120.uploader/bun/function.js
new file mode 100644
index 000000000..2dd3a6503
--- /dev/null
+++ b/benchmarks/100.webapps/120.uploader/bun/function.js
@@ -0,0 +1,36 @@
+const fs = require('fs'),
+ path = require('path'),
+ request = require('request'),
+ storage = require('./storage');
+
+let storage_handler = new storage.storage();
+
+function streamToPromise(stream) {
+ return new Promise(function(resolve, reject) {
+ stream.on("close", () => {
+ resolve();
+ });
+ stream.on("error", reject);
+ })
+}
+
+exports.handler = async function(event) {
+ let bucket = event.bucket.bucket
+ let output_prefix = event.bucket.output
+ let url = event.object.url
+ let upload_key = path.basename(url)
+ let download_path = path.join('/tmp', upload_key)
+
+ var file = fs.createWriteStream(download_path);
+ request(url).pipe(file);
+ let promise = streamToPromise(file);
+ var keyName;
+ let upload = promise.then(
+ async () => {
+ [keyName, promise] = storage_handler.upload(bucket, path.join(output_prefix, upload_key), download_path);
+ await promise;
+ }
+ );
+ await upload;
+ return {bucket: bucket, url: url, key: keyName}
+};
diff --git a/benchmarks/100.webapps/120.uploader/bun/package.json b/benchmarks/100.webapps/120.uploader/bun/package.json
new file mode 100644
index 000000000..7dcc22b1d
--- /dev/null
+++ b/benchmarks/100.webapps/120.uploader/bun/package.json
@@ -0,0 +1,10 @@
+{
+ "name": "",
+ "version": "1.0.0",
+ "description": "",
+ "author": "",
+ "license": "",
+ "dependencies": {
+ "request": "^2.88.0"
+ }
+}
diff --git a/benchmarks/100.webapps/120.uploader/config.json b/benchmarks/100.webapps/120.uploader/config.json
index cbc635670..5a97742f9 100644
--- a/benchmarks/100.webapps/120.uploader/config.json
+++ b/benchmarks/100.webapps/120.uploader/config.json
@@ -1,6 +1,12 @@
{
"timeout": 30,
"memory": 128,
- "languages": ["python", "nodejs"],
- "modules": ["storage"]
+ "languages": [
+ "python",
+ "nodejs",
+ "bun"
+ ],
+ "modules": [
+ "storage"
+ ]
}
diff --git a/benchmarks/200.multimedia/210.thumbnailer/bun/function.js b/benchmarks/200.multimedia/210.thumbnailer/bun/function.js
new file mode 100644
index 000000000..d4171dea1
--- /dev/null
+++ b/benchmarks/200.multimedia/210.thumbnailer/bun/function.js
@@ -0,0 +1,28 @@
+const sharp = require('sharp'),
+ path = require('path'),
+ storage = require('./storage');
+
+let storage_handler = new storage.storage();
+
+exports.handler = async function(event) {
+
+ bucket = event.bucket.bucket
+ input_prefix = event.bucket.input
+ output_prefix = event.bucket.output
+ let key = event.object.key
+ width = event.object.width
+ height = event.object.height
+ let pos = key.lastIndexOf('.');
+ let upload_key = key.substr(0, pos < 0 ? key.length : pos) + '.png';
+
+ const sharp_resizer = sharp().resize(width, height).png();
+ let read_promise = storage_handler.downloadStream(bucket, path.join(input_prefix, key));
+ let [writeStream, promise, uploadName] = storage_handler.uploadStream(bucket, path.join(output_prefix, upload_key));
+ read_promise.then(
+ (input_stream) => {
+ input_stream.pipe(sharp_resizer).pipe(writeStream);
+ }
+ );
+ await promise;
+ return {bucket: output_prefix, key: uploadName}
+};
diff --git a/benchmarks/200.multimedia/210.thumbnailer/bun/package.json b/benchmarks/200.multimedia/210.thumbnailer/bun/package.json
new file mode 100644
index 000000000..7b69df5aa
--- /dev/null
+++ b/benchmarks/200.multimedia/210.thumbnailer/bun/package.json
@@ -0,0 +1,10 @@
+{
+ "name": "",
+ "version": "1.0.0",
+ "description": "",
+ "author": "",
+ "license": "",
+ "dependencies": {
+ "sharp": "0.32.6"
+ }
+}
diff --git a/benchmarks/200.multimedia/210.thumbnailer/config.json b/benchmarks/200.multimedia/210.thumbnailer/config.json
index 8edb99e52..68f36e4e4 100644
--- a/benchmarks/200.multimedia/210.thumbnailer/config.json
+++ b/benchmarks/200.multimedia/210.thumbnailer/config.json
@@ -1,6 +1,12 @@
{
"timeout": 60,
"memory": 256,
- "languages": ["python", "nodejs"],
- "modules": ["storage"]
+ "languages": [
+ "python",
+ "nodejs",
+ "bun"
+ ],
+ "modules": [
+ "storage"
+ ]
}
diff --git a/benchmarks/wrappers/aws/bun/handler.js b/benchmarks/wrappers/aws/bun/handler.js
new file mode 100644
index 000000000..1138de026
--- /dev/null
+++ b/benchmarks/wrappers/aws/bun/handler.js
@@ -0,0 +1,47 @@
+
+const path = require('path'), fs = require('fs');
+
+function process_output(data, http_trigger) {
+ if(http_trigger)
+ return JSON.stringify(data);
+ else
+ return data;
+}
+
+exports.handler = async function(event, context) {
+ var begin = Date.now()/1000;
+ var start = process.hrtime();
+ var http_trigger = "body" in event;
+ var input_data = http_trigger ? JSON.parse(event.body) : event
+ var func = require('./function/function')
+ var ret = func.handler(input_data);
+ return ret.then(
+ (result) => {
+ var elapsed = process.hrtime(start);
+ var end = Date.now()/1000;
+ var micro = elapsed[1] / 1e3 + elapsed[0] * 1e6;
+
+ var is_cold = false;
+ var fname = path.join('/tmp','cold_run');
+ if(!fs.existsSync(fname)) {
+ is_cold = true;
+ fs.closeSync(fs.openSync(fname, 'w'));
+ }
+ return {
+ statusCode: 200,
+ body: process_output({
+ begin: begin,
+ end: end,
+ compute_time: micro,
+ results_time: 0,
+ result: {output: result},
+ is_cold: is_cold,
+ request_id: context.awsRequestId
+ }, http_trigger)
+ };
+ },
+ (error) => {
+ throw(error);
+ }
+ );
+}
diff --git a/benchmarks/wrappers/aws/bun/runtime.js b/benchmarks/wrappers/aws/bun/runtime.js
new file mode 100644
index 000000000..32f3d951c
--- /dev/null
+++ b/benchmarks/wrappers/aws/bun/runtime.js
@@ -0,0 +1,40 @@
+/**
+ * Custom runtime while loop for AWS lambda.
+ * Listens for function events, executes handler, and returns results.
+ *
+ * ENV variables based on https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html#configuration-envvars-runtime
+ * API endpoints based on https://docs.aws.amazon.com/lambda/latest/dg/runtimes-api.html
+ */
+
+import { handler } from "./handler.js";
+
+const RUNTIME_API = process.env.AWS_LAMBDA_RUNTIME_API;
+const API_BASE = `http://${RUNTIME_API}/2018-06-01/runtime`;
+
+while (true) {
+ const nextResponse = await fetch(`${API_BASE}/invocation/next`);
+ const event = await nextResponse.json();
+ const requestId = nextResponse.headers.get("Lambda-Runtime-Aws-Request-Id");
+
+ // NOTE: If more context is needed inside the handler, they can be added here
+ const context = { awsRequestId: requestId };
+
+ try {
+ const response = await handler(event, context);
+
+ await fetch(`${API_BASE}/invocation/${requestId}/response`, {
+ method: "POST",
+ body: JSON.stringify(response),
+ });
+ } catch (error) {
+ console.error(error);
+ await fetch(`${API_BASE}/invocation/${requestId}/error`, {
+ method: "POST",
+ body: JSON.stringify({
+ errorMessage: error.message,
+ errorType: "Runtime.UserCodeError",
+ stackTrace: error.stack ? error.stack.split("\n") : [],
+ }),
+ });
+ }
+}
diff --git a/benchmarks/wrappers/aws/bun/storage.js b/benchmarks/wrappers/aws/bun/storage.js
new file mode 100644
index 000000000..0b4407fc8
--- /dev/null
+++ b/benchmarks/wrappers/aws/bun/storage.js
@@ -0,0 +1,50 @@
+
+const aws = require('aws-sdk'),
+ fs = require('fs'),
+ path = require('path'),
+ uuid = require('uuid'),
+ util = require('util'),
+ stream = require('stream');
+
+class aws_storage {
+
+ constructor() {
+ this.S3 = new aws.S3();
+ }
+
+ unique_name(file) {
+ let name = path.parse(file);
+ let uuid_name = uuid.v4().split('-')[0];
+ return path.join(name.dir, util.format('%s.%s%s', name.name, uuid_name, name.ext));
+ }
+
+ upload(bucket, file, filepath) {
+ var upload_stream = fs.createReadStream(filepath);
+ let uniqueName = this.unique_name(file);
+ let params = {Bucket: bucket, Key: uniqueName, Body: upload_stream};
+ var upload = this.S3.upload(params);
+ return [uniqueName, upload.promise()];
+ };
+
+ download(bucket, file, filepath) {
+ var file = fs.createWriteStream(filepath);
+ this.S3.getObject( {Bucket: bucket, Key: file} ).createReadStream().pipe(file);
+ };
+
+ uploadStream(bucket, file) {
+ var write_stream = new stream.PassThrough();
+ let uniqueName = this.unique_name(file);
+ // putObject won't work correctly for streamed data (length has to be known before)
+ // https://stackoverflow.com/questions/38442512/difference-between-upload-and-putobject-for-uploading-a-file-to-s3
+ var upload = this.S3.upload( {Bucket: bucket, Key: uniqueName, Body: write_stream} );
+ return [write_stream, upload.promise(), uniqueName];
+ };
+
+ // We return a promise to match the API for other providers
+ downloadStream(bucket, file) {
+ // AWS.Request -> read stream
+ let downloaded = this.S3.getObject( {Bucket: bucket, Key: file} ).createReadStream();
+ return Promise.resolve(downloaded);
+ };
+};
+exports.storage = aws_storage;
diff --git a/benchmarks/wrappers/local/bun/storage.js b/benchmarks/wrappers/local/bun/storage.js
new file mode 100644
index 000000000..9fb9d45f5
--- /dev/null
+++ b/benchmarks/wrappers/local/bun/storage.js
@@ -0,0 +1,61 @@
+
+const minio = require('minio'),
+ path = require('path'),
+ uuid = require('uuid'),
+ util = require('util'),
+ stream = require('stream');
+
+class minio_storage {
+
+ constructor() {
+ let address = process.env.MINIO_ADDRESS;
+ let access_key = process.env.MINIO_ACCESS_KEY;
+ let secret_key = process.env.MINIO_SECRET_KEY;
+ this.client = new minio.Client(
+ {
+ endPoint: address.split(':')[0],
+ port: parseInt(address.split(':')[1], 10),
+ accessKey: access_key,
+ secretKey: secret_key,
+ useSSL: false
+ }
+ );
+ }
+
+ unique_name(file) {
+ let name = path.parse(file);
+ let uuid_name = uuid.v4().split('-')[0];
+ return path.join(name.dir, util.format('%s.%s%s', name.name, uuid_name, name.ext));
+ }
+
+ upload(bucket, file, filepath) {
+ let uniqueName = this.unique_name(file);
+ return [uniqueName, this.client.fPutObject(bucket, uniqueName, filepath)];
+ };
+
+ download(bucket, file, filepath) {
+ return this.client.fGetObject(bucket, file, filepath);
+ };
+
+ uploadStream(bucket, file) {
+ var write_stream = new stream.PassThrough();
+ let uniqueName = this.unique_name(file);
+ let promise = this.client.putObject(bucket, uniqueName, write_stream, write_stream.size);
+ return [write_stream, promise, uniqueName];
+ };
+
+ downloadStream(bucket, file) {
+ var read_stream = new stream.PassThrough();
+ return this.client.getObject(bucket, file);
+ };
+
+ static get_instance() {
+ if(!this.instance) {
+ this.instance = new storage();
+ }
+ return this.instance;
+ }
+
+
+};
+exports.storage = minio_storage;
diff --git a/config/systems.json b/config/systems.json
index 5a38b4965..23dfb5381 100644
--- a/config/systems.json
+++ b/config/systems.json
@@ -64,10 +64,42 @@
],
"packages": []
}
+ },
+ "bun": {
+ "base_images": {
+ "x64": {
+ "1.2": "oven/bun:1.2",
+ "1.3": "oven/bun:1.3"
+ },
+ "arm64": {
+ "1.2": "oven/bun:1.2",
+ "1.3": "oven/bun:1.3"
+ }
+ },
+ "images": [
+ "run",
+ "build"
+ ],
+ "username": "docker_user",
+ "deployment": {
+ "files": [
+ "storage.js"
+ ],
+ "packages": {
+ "uuid": "3.4.0",
+ "strftime": "0.10.3",
+ "express": "5.2.1",
+ "minio": "7.0.16"
+ }
+ }
}
},
- "architecture": ["x64"],
- "deployments": ["package"]
+ "architecture": [
+ "x64"
+ ],
+ "deployments": [
+ "package"
+ ]
},
"aws": {
"languages": {
@@ -121,10 +153,42 @@
"uuid": "3.4.0"
}
}
+ },
+ "bun": {
+ "base_images": {
+ "x64": {
+ "1.2": "oven/bun:1.2",
+ "1.3": "oven/bun:1.3"
+ },
+ "arm64": {
+ "1.2": "oven/bun:1.2",
+ "1.3": "oven/bun:1.3"
+ }
+ },
+ "images": [
+ "build"
+ ],
+ "deployment": {
+ "files": [
+ "handler.js",
+ "storage.js",
+ "runtime.js"
+ ],
+ "packages": {
+ "uuid": "3.4.0",
+ "aws-sdk": "2.1693.0"
+ }
+ }
}
},
- "architecture": ["x64", "arm64"],
- "deployments": ["package", "container"]
+ "architecture": [
+ "x64",
+ "arm64"
+ ],
+ "deployments": [
+ "package",
+ "container"
+ ]
},
"azure": {
"languages": {
@@ -188,8 +252,12 @@
"username": "docker_user"
}
},
- "architecture": ["x64"],
- "deployments": ["package"]
+ "architecture": [
+ "x64"
+ ],
+ "deployments": [
+ "package"
+ ]
},
"gcp": {
"languages": {
@@ -252,8 +320,12 @@
"username": "docker_user"
}
},
- "architecture": ["x64"],
- "deployments": ["package"]
+ "architecture": [
+ "x64"
+ ],
+ "deployments": [
+ "package"
+ ]
},
"openwhisk": {
"languages": {
@@ -313,7 +385,11 @@
}
}
},
- "architecture": ["x64"],
- "deployments": ["container"]
+ "architecture": [
+ "x64"
+ ],
+ "deployments": [
+ "container"
+ ]
}
}
diff --git a/dockerfiles/aws/bun/Dockerfile.build b/dockerfiles/aws/bun/Dockerfile.build
new file mode 100644
index 000000000..34c77fae9
--- /dev/null
+++ b/dockerfiles/aws/bun/Dockerfile.build
@@ -0,0 +1,15 @@
+ARG BASE_IMAGE
+FROM ${BASE_IMAGE}
+
+COPY --from=tianon/gosu:1.19-debian /usr/local/bin/gosu /usr/local/bin/gosu
+RUN apt-get update && apt-get install -y curl unzip
+
+RUN mkdir -p /sebs/
+COPY dockerfiles/bun_installer.sh /sebs/installer.sh
+COPY dockerfiles/entrypoint.sh /sebs/entrypoint.sh
+RUN chmod +x /sebs/entrypoint.sh
+
+# useradd and groupmod is installed in /usr/sbin which is not in PATH
+ENV SCRIPT_FILE=/mnt/function/package.sh
+CMD /bin/bash /sebs/installer.sh
+ENTRYPOINT ["/sebs/entrypoint.sh"]
diff --git a/dockerfiles/aws/bun/Dockerfile.function b/dockerfiles/aws/bun/Dockerfile.function
new file mode 100644
index 000000000..1195015c0
--- /dev/null
+++ b/dockerfiles/aws/bun/Dockerfile.function
@@ -0,0 +1,8 @@
+ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+WORKDIR /app
+COPY . function/
+COPY runtime.js handler.js ./
+
+ENTRYPOINT [ "bun", "--bun", "runtime.js" ]
diff --git a/dockerfiles/bun_installer.sh b/dockerfiles/bun_installer.sh
new file mode 100644
index 000000000..549b130f2
--- /dev/null
+++ b/dockerfiles/bun_installer.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+cd /mnt/function
+
+if [ "${TARGET_ARCHITECTURE}" == "arm64" ]; then
+ bun install --arch=arm64
+ BUN_ARCH_URL="https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64.zip"
+elif [ "${TARGET_ARCHITECTURE}" = "x64" ]; then
+ bun install --arch=x64 --platform=linux
+ BUN_ARCH_URL="https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip"
+else
+ echo "Unsupported architecture: $TARGET_ARCHITECTURE"
+ exit 1
+fi
+
+# install bun directly
+curl -L -o bun.zip $BUN_ARCH_URL
+unzip -j bun.zip */bun
+chmod +x bun
+rm bun.zip
+
+rm -r bun.lock
+# moves to correct directory on AWS if needed
+echo -e '#!/bin/bash\nif $LAMBDA_TASK_ROOT; then cd $LAMBDA_TASK_ROOT; fi\n./bun --bun runtime.js' > bootstrap
+chmod +x bootstrap
diff --git a/dockerfiles/local/bun/Dockerfile.build b/dockerfiles/local/bun/Dockerfile.build
new file mode 100755
index 000000000..34c77fae9
--- /dev/null
+++ b/dockerfiles/local/bun/Dockerfile.build
@@ -0,0 +1,15 @@
+ARG BASE_IMAGE
+FROM ${BASE_IMAGE}
+
+COPY --from=tianon/gosu:1.19-debian /usr/local/bin/gosu /usr/local/bin/gosu
+RUN apt-get update && apt-get install -y curl unzip
+
+RUN mkdir -p /sebs/
+COPY dockerfiles/bun_installer.sh /sebs/installer.sh
+COPY dockerfiles/entrypoint.sh /sebs/entrypoint.sh
+RUN chmod +x /sebs/entrypoint.sh
+
+# useradd and groupmod is installed in /usr/sbin which is not in PATH
+ENV SCRIPT_FILE=/mnt/function/package.sh
+CMD /bin/bash /sebs/installer.sh
+ENTRYPOINT ["/sebs/entrypoint.sh"]
diff --git a/dockerfiles/local/bun/Dockerfile.run b/dockerfiles/local/bun/Dockerfile.run
new file mode 100755
index 000000000..51a6c1914
--- /dev/null
+++ b/dockerfiles/local/bun/Dockerfile.run
@@ -0,0 +1,30 @@
+ARG BASE_IMAGE
+FROM ${BASE_IMAGE}
+
+WORKDIR /sebs
+
+ARG DEBIAN_FRONTEND=noninteractive
+RUN deps=''\
+ && apt-get update\
+ && apt-get install -y --no-install-recommends curl net-tools gosu python3 sudo ${deps}\
+ && apt-get purge -y --auto-remove ${deps}
+
+# function node_modules includes both the package specific and local-required packages
+ENV NODE_PATH=/function/node_modules
+
+COPY dockerfiles/local/*.py ./
+COPY dockerfiles/local/run.sh .
+COPY dockerfiles/local/nodejs/*.js ./
+COPY dockerfiles/local/nodejs/package.json .
+
+# bun specific files
+COPY dockerfiles/local/bun/*.js ./
+COPY dockerfiles/local/bun/run_server.sh .
+COPY dockerfiles/local/bun/timeit.sh .
+COPY dockerfiles/local/bun/runners.json .
+
+COPY dockerfiles/local/entrypoint.sh entrypoint.sh
+RUN chmod +x entrypoint.sh
+RUN chmod +x run.sh
+
+ENTRYPOINT ["/sebs/entrypoint.sh"]
diff --git a/dockerfiles/local/bun/run_server.sh b/dockerfiles/local/bun/run_server.sh
new file mode 100755
index 000000000..c4dd91fcd
--- /dev/null
+++ b/dockerfiles/local/bun/run_server.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+cd /sebs
+bun /sebs/server.js "$@"
diff --git a/dockerfiles/local/bun/runners.json b/dockerfiles/local/bun/runners.json
new file mode 100644
index 000000000..aafb17bf8
--- /dev/null
+++ b/dockerfiles/local/bun/runners.json
@@ -0,0 +1,6 @@
+{
+ "time" : {"warm" : "time-in-proc.js", "cold" : "time-out-proc.py"},
+ "memory": "analyzer-runner.js",
+ "disk-io": "analyzer-runner.js",
+ "config": ["bun", "config.js"]
+}
diff --git a/dockerfiles/local/bun/server.js b/dockerfiles/local/bun/server.js
new file mode 100644
index 000000000..c98b3fa72
--- /dev/null
+++ b/dockerfiles/local/bun/server.js
@@ -0,0 +1,48 @@
+const http = require('http'),
+ strftime = require('strftime'),
+ express = require('express'),
+ f = require('/function/function/function');
+//import { v4 as uuidv4 } from 'uuid';
+const { v4: uuidv4 } = require('uuid');
+
+
+var app = express();
+app.use(express.json());
+
+app.post('/alive', function (req, res) {
+ res.send(JSON.stringify({
+ status: "ok"
+ }));
+});
+
+app.post('/', function (req, res) {
+
+ let begin = Date.now();
+ let ret = f.handler(req.body);
+ ret.then((func_res) => {
+
+ let end = Date.now();
+ res.setHeader('Content-Type', 'application/json');
+ res.end(JSON.stringify({
+ begin: strftime('%s.%L', new Date(begin)),
+ end: strftime('%s.%L', new Date(end)),
+ request_id: uuidv4(),
+ is_cold: false,
+ result: {
+ output: func_res
+ }
+ }));
+ },
+ (reason) => {
+ console.log('Function invocation failed!');
+ console.log(reason);
+ process.exit(1);
+ }
+ );
+});
+
+app.listen(port=process.argv[2], function () {
+ console.log(`Server listening on port ${process.argv[2]}.`);
+});
+
+
diff --git a/dockerfiles/local/bun/timeit.sh b/dockerfiles/local/bun/timeit.sh
new file mode 100644
index 000000000..a458d35fa
--- /dev/null
+++ b/dockerfiles/local/bun/timeit.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+OUT=$1
+#ts=$(date +%s%N);
+export TIMEFORMAT='%3R,%3U,%3S'
+time bun --expose-gc -e "var fs = require('fs'), f = require('./function/function');
+async function test() {
+ var input = JSON.parse(fs.readFileSync('input.json', 'utf-8'));
+ return await f.handler(input);
+}
+test().then( (data) => console.log(data) );" > $OUT
+#tt=$((($(date +%s%N) - $ts)/1000)) ; echo $tt
diff --git a/dockerfiles/local/runner.py b/dockerfiles/local/runner.py
index 96261fc33..2c882c700 100644
--- a/dockerfiles/local/runner.py
+++ b/dockerfiles/local/runner.py
@@ -4,7 +4,7 @@
from utils import *
def get_language(lang):
- languages = {'python': 'python3', 'nodejs': 'nodejs'}
+ languages = {'python': 'python3', 'nodejs': 'nodejs', 'bun': 'bun'}
return languages[lang]
def get_runner(experiment, options=None):
diff --git a/docs/modularity.md b/docs/modularity.md
index 4febc1b7f..1e6e37bb0 100644
--- a/docs/modularity.md
+++ b/docs/modularity.md
@@ -197,6 +197,8 @@ Then, we need to add the new language in [`config/systems.json`](/config/systems
Once done, we can build the image with `tools/build_docker_images.py`.
+*Note: Building the docker files might fail to the scylladb volume having the wrong permissions. Can be circumvented using `chown -R : scylladb-volume`.*
+
#### Benchmark Wrappers
For each language and cloud platform, we need to implement benchmark wrappers.
diff --git a/sebs.py b/sebs.py
index 80fb11ed3..81c84afe0 100755
--- a/sebs.py
+++ b/sebs.py
@@ -64,7 +64,7 @@ def simplified_common_params(func):
@click.option(
"--language",
default=None,
- type=click.Choice(["python", "nodejs"]),
+ type=click.Choice(["python", "nodejs", "bun"]),
help="Benchmark language",
)
@click.option("--language-version", default=None, type=str, help="Benchmark language version")
diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py
index 243a6f0f9..849cc5b54 100644
--- a/sebs/aws/aws.py
+++ b/sebs/aws/aws.py
@@ -137,11 +137,12 @@ def package_code(
CONFIG_FILES = {
"python": ["handler.py", "requirements.txt", ".python_packages"],
"nodejs": ["handler.js", "package.json", "node_modules"],
+ "bun": ["bootstrap", "bun", "runtime.js", "handler.js", "package.json", "node_modules"],
}
package_config = CONFIG_FILES[language_name]
function_dir = os.path.join(directory, "function")
os.makedirs(function_dir)
- # move all files to 'function' except handler.py
+ # move all files to 'function' except config files like handler.py
for file in os.listdir(directory):
if file not in package_config:
file = os.path.join(directory, file)
@@ -152,7 +153,7 @@ def package_code(
benchmark_archive = "{}.zip".format(os.path.join(directory, benchmark))
self.logging.info("Created {} archive".format(benchmark_archive))
- bytes_size = os.path.getsize(os.path.join(directory, benchmark_archive))
+ bytes_size = os.path.getsize(benchmark_archive)
mbytes = bytes_size / 1024.0 / 1024.0
self.logging.info("Zip archive size {:2f} MB".format(mbytes))
@@ -174,6 +175,8 @@ def _map_language_runtime(self, language: str, runtime: str):
# For example, it's 12.x instead of 12.
if language == "nodejs":
return f"{runtime}.x"
+ elif language == "bun":
+ return "provided.al2023"
return runtime
def create_function(
@@ -251,9 +254,7 @@ def create_function(
"S3Key": code_prefix,
}
- create_function_params["Runtime"] = "{}{}".format(
- language, self._map_language_runtime(language, language_runtime)
- )
+ create_function_params["Runtime"] = self._map_language_runtime(language, language_runtime)
create_function_params["Handler"] = "handler.handler"
create_function_params = {
diff --git a/sebs/benchmark.py b/sebs/benchmark.py
index f159e820c..1aa204d26 100644
--- a/sebs/benchmark.py
+++ b/sebs/benchmark.py
@@ -252,8 +252,9 @@ def hash_directory(directory: str, deployment: str, language: str):
FILES = {
"python": ["*.py", "requirements.txt*"],
"nodejs": ["*.js", "package.json"],
+ "bun": ["*.js", "package.json"],
}
- WRAPPERS = {"python": "*.py", "nodejs": "*.js"}
+ WRAPPERS = {"python": "*.py", "nodejs": "*.js", "bun": "*.js"}
NON_LANG_FILES = ["*.sh", "*.json"]
selected_files = FILES[language] + NON_LANG_FILES
for file_type in selected_files:
@@ -316,6 +317,7 @@ def copy_code(self, output_dir):
FILES = {
"python": ["*.py", "requirements.txt*"],
"nodejs": ["*.js", "package.json"],
+ "bun": ["*.js", "package.json"],
}
path = os.path.join(self.benchmark_path, self.language_name)
for file_type in FILES[self.language_name]:
@@ -406,6 +408,8 @@ def add_deployment_package(self, output_dir):
self.add_deployment_package_python(output_dir)
elif self.language == Language.NODEJS:
self.add_deployment_package_nodejs(output_dir)
+ elif self.language == Language.BUN:
+ self.add_deployment_package_nodejs(output_dir)
else:
raise NotImplementedError
@@ -483,7 +487,7 @@ def ensure_image(name: str) -> None:
}
# run Docker container to install packages
- PACKAGE_FILES = {"python": "requirements.txt", "nodejs": "package.json"}
+ PACKAGE_FILES = {"python": "requirements.txt", "nodejs": "package.json", "bun": "package.json"}
file = os.path.join(output_dir, PACKAGE_FILES[self.language_name])
if os.path.exists(file):
try:
diff --git a/sebs/faas/function.py b/sebs/faas/function.py
index 0fab7bcf4..a967027da 100644
--- a/sebs/faas/function.py
+++ b/sebs/faas/function.py
@@ -263,6 +263,7 @@ def deserialize(cached_config: dict) -> "Trigger":
class Language(Enum):
PYTHON = "python"
NODEJS = "nodejs"
+ BUN = "bun"
# FIXME: 3.7+ python with future annotations
@staticmethod
@@ -270,7 +271,7 @@ def deserialize(val: str) -> Language:
for member in Language:
if member.value == val:
return member
- raise Exception(f"Unknown language type {member}")
+ raise Exception(f"Unknown language type {val}")
class Architecture(Enum):
@@ -285,7 +286,7 @@ def deserialize(val: str) -> Architecture:
for member in Architecture:
if member.value == val:
return member
- raise Exception(f"Unknown architecture type {member}")
+ raise Exception(f"Unknown architecture type {val}")
@dataclass
@@ -299,7 +300,7 @@ def serialize(self) -> dict:
@staticmethod
def deserialize(config: dict) -> Runtime:
- languages = {"python": Language.PYTHON, "nodejs": Language.NODEJS}
+ languages = {"python": Language.PYTHON, "nodejs": Language.NODEJS, "bun": Language.BUN}
return Runtime(language=languages[config["language"]], version=config["version"])
diff --git a/sebs/local/local.py b/sebs/local/local.py
index 32b9f9ffb..b7f49717f 100644
--- a/sebs/local/local.py
+++ b/sebs/local/local.py
@@ -124,6 +124,7 @@ def package_code(
CONFIG_FILES = {
"python": ["handler.py", "requirements.txt", ".python_packages"],
"nodejs": ["handler.js", "package.json", "node_modules"],
+ "bun": ["handler.js", "package.json", "node_modules"],
}
package_config = CONFIG_FILES[language_name]
function_dir = os.path.join(directory, "function")
@@ -144,10 +145,11 @@ def _start_container(
self, code_package: Benchmark, func_name: str, func: Optional[LocalFunction]
) -> LocalFunction:
- container_name = "{}:run.local.{}.{}".format(
+ container_name = "{}:run.local.{}.{}-{}".format(
self._system_config.docker_repository(),
code_package.language_name,
code_package.language_version,
+ self._system_config.version(),
)
environment = {
diff --git a/tools/build_docker_images.py b/tools/build_docker_images.py
index 5336fb485..10e830c13 100755
--- a/tools/build_docker_images.py
+++ b/tools/build_docker_images.py
@@ -13,7 +13,7 @@
"--deployment", default=None, choices=["local", "aws", "azure", "gcp"], action="store"
)
parser.add_argument("--type", default=None, choices=["build", "run", "manage"], action="store")
-parser.add_argument("--language", default=None, choices=["python", "nodejs"], action="store")
+parser.add_argument("--language", default=None, choices=["python", "nodejs", "bun"], action="store")
parser.add_argument("--language-version", default=None, type=str, action="store")
args = parser.parse_args()
config = json.load(open(os.path.join(PROJECT_DIR, "config", "systems.json"), "r"))