diff --git a/examples/FileStreaming.js b/examples/FileStreaming.js new file mode 100644 index 00000000..95f9f355 --- /dev/null +++ b/examples/FileStreaming.js @@ -0,0 +1,105 @@ +/* This is an example of streaming files */ + +const uWS = require('../dist/uws.js'); +const fs = require('fs'); +const port = 9001; + +/* Small file is cached in memory */ +const smallFileType = 'application/json'; +const smallFileName = 'absolutPathTo/smallFile.json'; +const smallFileCachedBuffer = fs.readFileSync(smallFileName); +console.log('Small file size is: '+ smallFileCachedBuffer.length +' bytes'); + +/* Big file is streamed from storage */ +const bigFileType = 'video/mpeg'; +const bigFileName = 'absolutPathTo/bigFile.mp3'; +const bigFileSize = fs.statSync(bigFileName).size; +console.log('Big file size is: '+ bigFileSize +' bytes'); + +let lastStreamIndex = 0; +let openStreams = 0; + +/* Helper function to stream data */ +/** @param {import('node:Stream').Readable} readStream */ +const streamData = (res, readStream, totalSize, onSucceed) => { + let chunkBuffer; /* Actual chunk being streamed */ + let totalOffset = 0; /* Actual chunk offset */ + + /* Function to send actual chunk */ + const sendChunkBuffer = () => { + const [ok, done] = res.tryEnd(chunkBuffer, totalSize); + if (done) { + /* Streaming finished */ + readStream.destroy(); + onSucceed(); + } else if (ok) { + /* Chunk send succeed */ + totalOffset += chunkBuffer.length; + } else { + /* Chunk send failed (client backpressure) + * onWritable will be called once client ready to receive new chunk + * Pause stream to wait client */ + readStream.pause(); + } + return ok; + }; + + /* Register onWritable callback + * Will be called to drain client backpressure */ + res.onWritable((offset) => { + if (offset !== totalOffset) { + /* If start of the chunk was successfully sent + * We only send the missing part */ + chunkBuffer = chunkBuffer.subarray(offset - totalOffset); + totalOffset = offset; + } + if (sendChunkBuffer()) { + /* Resume stream if resend succeed */ + readStream.resume(); + return true; + } + return false; + }); + + /* Register callback for stream events */ + readStream.on('error', (err) => { + console.log('Error reading file: '+ err); + /* res.close() calls onAborted callback */ + res.close(); + }).on('data', (newChunkBuffer) => { + chunkBuffer = newChunkBuffer; + /* Cork before sending new chunk */ + res.cork(sendChunkBuffer); + }); +}; + +const app = uWS./*SSL*/App({ + key_file_name: 'misc/key.pem', + cert_file_name: 'misc/cert.pem', + passphrase: '1234' +}).get('/smallFile', (res, req) => { + res.writeHeader('Content-Type', smallFileType); + /* !! Use this only for small-sized bodies !! + * May cause server backpressure and bad performance + * For large bodies you must use the streaming method */ + res.end(smallFileCachedBuffer); +}).get('/bigFile', (res, req) => { + const streamIndex = ++ lastStreamIndex; + console.log('Stream ('+ streamIndex +') was opened, openStreams: '+ (++ openStreams)); + const readStream = fs.createReadStream(bigFileName); + /* Attach onAborted handler because streaming is async */ + res.onAborted(() => { + readStream.destroy(); + console.log('Stream ('+ streamIndex +') failed, openStreams: '+ (-- openStreams)); + }); + res.writeHeader('Content-Type', bigFileType); + streamData(res, readStream, bigFileSize, () => { + console.log('Stream ('+ streamIndex +') succeed, openStreams: '+ (-- openStreams)); + }); +}).listen(port, (token) => { + if (token) { + console.log('Listening to port ' + port); + } else { + console.log('Failed to listen to port ' + port); + } +}); diff --git a/examples/JsonPost.js b/examples/JsonPost.js deleted file mode 100644 index abaeea1b..00000000 --- a/examples/JsonPost.js +++ /dev/null @@ -1,72 +0,0 @@ -/* Simple example of getting JSON from a POST */ - -const uWS = require('../dist/uws.js'); -const port = 9001; - -const app = uWS./*SSL*/App({ - key_file_name: 'misc/key.pem', - cert_file_name: 'misc/cert.pem', - passphrase: '1234' -}).post('/*', (res, req) => { - - /* Note that you cannot read from req after returning from here */ - let url = req.getUrl(); - - /* Read the body until done or error */ - readJson(res, (obj) => { - console.log('Posted to ' + url + ': '); - console.log(obj); - - res.end('Thanks for this json!'); - }, () => { - /* Request was prematurely aborted or invalid or missing, stop reading */ - console.log('Invalid JSON or no data at all!'); - }); - -}).listen(port, (token) => { - if (token) { - console.log('Listening to port ' + port); - } else { - console.log('Failed to listen to port ' + port); - } -}); - -/* Helper function for reading a posted JSON body */ -function readJson(res, cb, err) { - let buffer; - /* Register data cb */ - res.onData((ab, isLast) => { - let chunk = Buffer.from(ab); - if (isLast) { - let json; - if (buffer) { - try { - json = JSON.parse(Buffer.concat([buffer, chunk])); - } catch (e) { - /* res.close calls onAborted */ - res.close(); - return; - } - cb(json); - } else { - try { - json = JSON.parse(chunk); - } catch (e) { - /* res.close calls onAborted */ - res.close(); - return; - } - cb(json); - } - } else { - if (buffer) { - buffer = Buffer.concat([buffer, chunk]); - } else { - buffer = Buffer.concat([chunk]); - } - } - }); - - /* Register error cb */ - res.onAborted(err); -} diff --git a/examples/ParseRequestBody.js b/examples/ParseRequestBody.js new file mode 100644 index 00000000..60247625 --- /dev/null +++ b/examples/ParseRequestBody.js @@ -0,0 +1,56 @@ +/* This is an example of parsing request body */ + +const uWS = require('../dist/uws.js'); +const port = 9001; + +/* Helper function to parse JSON body */ +const parseJSONBody = (res, callback) => { + let buffer; + /* Register data callback */ + res.onData((ab, isLast) => { + const chunk = Buffer.from(ab); + if (isLast) { + let json; + try { + json = JSON.parse(buffer ? Buffer.concat([buffer, chunk]) : chunk); + } catch (e) { + json = undefined; + } + callback(json); + } else if (buffer) { + buffer = Buffer.concat([buffer, chunk]); + } else { + buffer = Buffer.concat([chunk]); + } + }); +}; + +const app = uWS./*SSL*/App({ + key_file_name: 'misc/key.pem', + cert_file_name: 'misc/cert.pem', + passphrase: '1234' +}).get('/jsonAPI', (res, req) => { + /* Attach onAborted handler because body parsing is async */ + res.onAborted(() => { + console.log('Request aborted!'); + }); + parseJSONBody(res, (parsedJson) => { + if (parsedJson !== undefined) { + console.log('Valid JSON:', parsedJson); + res.cork(() => { + res.end('Thanks for your data!'); + }); + } else { + console.log('Invalid JSON or no data at all!'); + res.cork(() => { + res.writeStatus('400 Bad Request').end(); + }); + } + }); +}).listen(port, (token) => { + if (token) { + console.log('Listening to port ' + port); + } else { + console.log('Failed to listen to port ' + port); + } +}); diff --git a/examples/VideoStreamer.js b/examples/VideoStreamer.js deleted file mode 100644 index fd3e3c9b..00000000 --- a/examples/VideoStreamer.js +++ /dev/null @@ -1,115 +0,0 @@ -/* This is an example of async streaming of large files. - * Try navigating to the adderss with Chrome and see the video - * in real time. */ - -const uWS = require('../dist/uws.js'); -const fs = require('fs'); - -const port = 9001; -const fileName = 'C:\\Users\\Alex\\Downloads\\Sintel.2010.720p.mkv'; -const totalSize = fs.statSync(fileName).size; - -let openStreams = 0; -let streamIndex = 0; - -console.log('Video size is: ' + totalSize + ' bytes'); - -/* Helper function converting Node.js buffer to ArrayBuffer */ -function toArrayBuffer(buffer) { - return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); -} - -/* Either onAborted or simply finished request */ -function onAbortedOrFinishedResponse(res, readStream) { - - if (res.id == -1) { - console.log("ERROR! onAbortedOrFinishedResponse called twice for the same res!"); - } else { - console.log('Stream was closed, openStreams: ' + --openStreams); - console.timeEnd(res.id); - readStream.destroy(); - } - - /* Mark this response already accounted for */ - res.id = -1; -} - -/* Helper function to pipe the ReadaleStream over an Http responses */ -function pipeStreamOverResponse(res, readStream, totalSize) { - /* Careful! If Node.js would emit error before the first res.tryEnd, res will hang and never time out */ - /* For this demo, I skipped checking for Node.js errors, you are free to PR fixes to this example */ - readStream.on('data', (chunk) => { - /* We only take standard V8 units of data */ - const ab = toArrayBuffer(chunk); - - /* Store where we are, globally, in our response */ - let lastOffset = res.getWriteOffset(); - - /* Streaming a chunk returns whether that chunk was sent, and if that chunk was last */ - let [ok, done] = res.tryEnd(ab, totalSize); - - /* Did we successfully send last chunk? */ - if (done) { - onAbortedOrFinishedResponse(res, readStream); - } else if (!ok) { - /* If we could not send this chunk, pause */ - readStream.pause(); - - /* Save unsent chunk for when we can send it */ - res.ab = ab; - res.abOffset = lastOffset; - - /* Register async handlers for drainage */ - res.onWritable((offset) => { - /* Here the timeout is off, we can spend as much time before calling tryEnd we want to */ - - /* On failure the timeout will start */ - let [ok, done] = res.tryEnd(res.ab.slice(offset - res.abOffset), totalSize); - if (done) { - onAbortedOrFinishedResponse(res, readStream); - } else if (ok) { - /* We sent a chunk and it was not the last one, so let's resume reading. - * Timeout is still disabled, so we can spend any amount of time waiting - * for more chunks to send. */ - readStream.resume(); - } - - /* We always have to return true/false in onWritable. - * If you did not send anything, return true for success. */ - return ok; - }); - } - - }).on('error', () => { - /* Todo: handle errors of the stream, probably good to simply close the response */ - console.log('Unhandled read error from Node.js, you need to handle this!'); - }); - - /* If you plan to asyncronously respond later on, you MUST listen to onAborted BEFORE returning */ - res.onAborted(() => { - onAbortedOrFinishedResponse(res, readStream); - }); -} - -/* Yes, you can easily swap to SSL streaming by uncommenting here */ -const app = uWS./*SSL*/App({ - key_file_name: 'misc/key.pem', - cert_file_name: 'misc/cert.pem', - passphrase: '1234' -}).get('/sintel.mkv', (res, req) => { - /* Log */ - console.time(res.id = ++streamIndex); - console.log('Stream was opened, openStreams: ' + ++openStreams); - /* Create read stream with Node.js and start streaming over Http */ - const readStream = fs.createReadStream(fileName); - pipeStreamOverResponse(res, readStream, totalSize); -}).get('/*', (res, req) => { - /* Make sure to always handle every route */ - res.end('Nothing to see here!'); -}).listen(port, (token) => { - if (token) { - console.log('Listening to port ' + port); - } else { - console.log('Failed to listen to port ' + port); - } -}); diff --git a/examples/VideoStreamerSync.js b/examples/VideoStreamerSync.js deleted file mode 100644 index dd5066e4..00000000 --- a/examples/VideoStreamerSync.js +++ /dev/null @@ -1,42 +0,0 @@ -/* This is an example of sync copying of large files. - * NEVER DO THIS; ONLY FOR TESTING PURPOSES. WILL CAUSE - * SEVERE BACKPRESSURE AND HORRIBLE PERFORMANCE. - * Try navigating to the adderss with Chrome and see the video - * in real time. */ - -const uWS = require('../dist/uws.js'); -const fs = require('fs'); - -const port = 9001; -const fileName = '/home/alexhultman/Downloads/Sintel.2010.720p.mkv'; -const videoFile = toArrayBuffer(fs.readFileSync(fileName)); -const totalSize = videoFile.byteLength; - -console.log('WARNING: NEVER DO LIKE THIS; WILL CAUSE HORRIBLE BACKPRESSURE!'); -console.log('Video size is: ' + totalSize + ' bytes'); - -/* Helper function converting Node.js buffer to ArrayBuffer */ -function toArrayBuffer(buffer) { - return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); -} - -/* Yes, you can easily swap to SSL streaming by uncommenting here */ -const app = uWS.SSLApp({ - key_file_name: 'misc/key.pem', - cert_file_name: 'misc/cert.pem', - passphrase: '1234' -}).get('/sintel.mkv', (res, req) => { - /* Log */ - console.log("Copying Sintel video..."); - /* Copy the entire video to backpressure */ - res.end(videoFile); -}).get('/*', (res, req) => { - /* Make sure to always handle every route */ - res.end('Nothing to see here!'); -}).listen(port, (token) => { - if (token) { - console.log('Listening to port ' + port); - } else { - console.log('Failed to listen to port ' + port); - } -});