first commit
This commit is contained in:
3
libraries/stream-utils/.gitignore
vendored
Normal file
3
libraries/stream-utils/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
# managed by monorepo$ bin/update_build_scripts
|
||||
.npmrc
|
5
libraries/stream-utils/.mocharc.json
Normal file
5
libraries/stream-utils/.mocharc.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"ui": "bdd",
|
||||
"recursive": "true",
|
||||
"reporter": "spec"
|
||||
}
|
1
libraries/stream-utils/.nvmrc
Normal file
1
libraries/stream-utils/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
20.18.2
|
10
libraries/stream-utils/buildscript.txt
Normal file
10
libraries/stream-utils/buildscript.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
stream-utils
|
||||
--dependencies=None
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
158
libraries/stream-utils/index.js
Normal file
158
libraries/stream-utils/index.js
Normal file
@@ -0,0 +1,158 @@
|
||||
const { Writable, Readable, PassThrough, Transform } = require('node:stream')
|
||||
|
||||
/**
|
||||
* A writable stream that stores all data written to it in a node Buffer.
|
||||
* @extends Writable
|
||||
* @example
|
||||
* const { WritableBuffer } = require('@overleaf/stream-utils')
|
||||
* const bufferStream = new WritableBuffer()
|
||||
* bufferStream.write('hello')
|
||||
* bufferStream.write('world')
|
||||
* bufferStream.end()
|
||||
* bufferStream.contents().toString() // 'helloworld'
|
||||
*/
|
||||
class WritableBuffer extends Writable {
|
||||
constructor(options) {
|
||||
super(options)
|
||||
this._buffers = []
|
||||
this._size = 0
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
this._buffers.push(chunk)
|
||||
this._size += chunk.length
|
||||
callback()
|
||||
}
|
||||
|
||||
_final(callback) {
|
||||
callback()
|
||||
}
|
||||
|
||||
size() {
|
||||
return this._size
|
||||
}
|
||||
|
||||
getContents() {
|
||||
return Buffer.concat(this._buffers)
|
||||
}
|
||||
|
||||
contents() {
|
||||
return Buffer.concat(this._buffers)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A readable stream created from a string.
|
||||
* @extends Readable
|
||||
* @example
|
||||
* const { ReadableString } = require('@overleaf/stream-utils')
|
||||
* const stringStream = new ReadableString('hello world')
|
||||
* stringStream.on('data', chunk => console.log(chunk.toString()))
|
||||
* stringStream.on('end', () => console.log('done'))
|
||||
*/
|
||||
class ReadableString extends Readable {
|
||||
constructor(string, options) {
|
||||
super(options)
|
||||
this._string = string
|
||||
}
|
||||
|
||||
_read(size) {
|
||||
this.push(this._string)
|
||||
this.push(null)
|
||||
}
|
||||
}
|
||||
|
||||
class SizeExceededError extends Error {}
|
||||
|
||||
/**
|
||||
* Limited size stream which will emit a SizeExceededError if the size is exceeded
|
||||
* @extends Transform
|
||||
*/
|
||||
class LimitedStream extends Transform {
|
||||
constructor(maxSize) {
|
||||
super()
|
||||
this.maxSize = maxSize
|
||||
this.size = 0
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, callback) {
|
||||
this.size += chunk.byteLength
|
||||
if (this.size > this.maxSize) {
|
||||
callback(
|
||||
new SizeExceededError(
|
||||
`exceeded stream size limit of ${this.maxSize}: ${this.size}`
|
||||
)
|
||||
)
|
||||
} else {
|
||||
callback(null, chunk)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class AbortError extends Error {}
|
||||
|
||||
/**
|
||||
* TimeoutStream which will emit an AbortError if it exceeds a user specified timeout
|
||||
* @extends PassThrough
|
||||
*/
|
||||
class TimeoutStream extends PassThrough {
|
||||
constructor(timeout) {
|
||||
super()
|
||||
this.t = setTimeout(() => {
|
||||
this.destroy(new AbortError('stream timed out'))
|
||||
}, timeout)
|
||||
}
|
||||
|
||||
_final(callback) {
|
||||
clearTimeout(this.t)
|
||||
callback()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* LoggerStream which will call the provided logger function when the stream exceeds a user specified limit. It will call the provided function again when flushing the stream and it exceeded the user specified limit before.
|
||||
* @extends Transform
|
||||
*/
|
||||
class LoggerStream extends Transform {
|
||||
/**
|
||||
* Constructor.
|
||||
* @param {number} maxSize
|
||||
* @param {function(currentSizeOfStream: number, isFlush: boolean)} fn
|
||||
* @param {Object?} options optional options for the Transform stream
|
||||
*/
|
||||
constructor(maxSize, fn, options) {
|
||||
super(options)
|
||||
this.fn = fn
|
||||
this.size = 0
|
||||
this.maxSize = maxSize
|
||||
this.logged = false
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, callback) {
|
||||
this.size += chunk.byteLength
|
||||
if (this.size > this.maxSize && !this.logged) {
|
||||
this.fn(this.size)
|
||||
this.logged = true
|
||||
}
|
||||
callback(null, chunk)
|
||||
}
|
||||
|
||||
_flush(callback) {
|
||||
if (this.size > this.maxSize) {
|
||||
this.fn(this.size, true)
|
||||
}
|
||||
callback()
|
||||
}
|
||||
}
|
||||
|
||||
// Export our classes
|
||||
|
||||
module.exports = {
|
||||
WritableBuffer,
|
||||
ReadableString,
|
||||
LoggerStream,
|
||||
LimitedStream,
|
||||
TimeoutStream,
|
||||
SizeExceededError,
|
||||
AbortError,
|
||||
}
|
24
libraries/stream-utils/package.json
Normal file
24
libraries/stream-utils/package.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "@overleaf/stream-utils",
|
||||
"version": "0.1.0",
|
||||
"description": "stream handling utilities",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "npm run lint && npm run format && npm run types:check && npm run test:unit",
|
||||
"test:unit": "mocha --exit test/**/*.{js,cjs}",
|
||||
"lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .",
|
||||
"lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .",
|
||||
"format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'",
|
||||
"format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'",
|
||||
"test:ci": "npm run test:unit",
|
||||
"types:check": "tsc --noEmit"
|
||||
},
|
||||
"author": "Overleaf (https://www.overleaf.com)",
|
||||
"license": "AGPL-3.0-only",
|
||||
"devDependencies": {
|
||||
"chai": "^4.3.6",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
"mocha": "^11.1.0",
|
||||
"typescript": "^5.0.4"
|
||||
}
|
||||
}
|
30
libraries/stream-utils/test/unit/LimitedStreamTests.js
Normal file
30
libraries/stream-utils/test/unit/LimitedStreamTests.js
Normal file
@@ -0,0 +1,30 @@
|
||||
const { expect } = require('chai')
|
||||
const { LimitedStream, SizeExceededError } = require('../../index')
|
||||
|
||||
describe('LimitedStream', function () {
|
||||
it('should emit an error if the stream size exceeds the limit', function (done) {
|
||||
const maxSize = 10
|
||||
const limitedStream = new LimitedStream(maxSize)
|
||||
limitedStream.on('error', err => {
|
||||
expect(err).to.be.an.instanceOf(SizeExceededError)
|
||||
done()
|
||||
})
|
||||
limitedStream.write(Buffer.alloc(maxSize + 1))
|
||||
})
|
||||
|
||||
it('should pass through data if the stream size does not exceed the limit', function (done) {
|
||||
const maxSize = 15
|
||||
const limitedStream = new LimitedStream(maxSize)
|
||||
let data = ''
|
||||
limitedStream.on('data', chunk => {
|
||||
data += chunk.toString()
|
||||
})
|
||||
limitedStream.on('end', () => {
|
||||
expect(data).to.equal('hello world')
|
||||
done()
|
||||
})
|
||||
limitedStream.write('hello')
|
||||
limitedStream.write(' world')
|
||||
limitedStream.end()
|
||||
})
|
||||
})
|
36
libraries/stream-utils/test/unit/LoggerStreamTests.js
Normal file
36
libraries/stream-utils/test/unit/LoggerStreamTests.js
Normal file
@@ -0,0 +1,36 @@
|
||||
const { expect } = require('chai')
|
||||
const { LoggerStream } = require('../../index')
|
||||
|
||||
describe('LoggerStream', function () {
|
||||
it('should log the size of the stream when it exceeds the limit', function (done) {
|
||||
const maxSize = 10
|
||||
const loggedSizes = []
|
||||
const loggerStream = new LoggerStream(maxSize, (size, isFlush) => {
|
||||
loggedSizes.push([size, isFlush])
|
||||
if (isFlush) {
|
||||
expect(loggedSizes).to.deep.equal([
|
||||
[11, undefined],
|
||||
[11, true],
|
||||
])
|
||||
done()
|
||||
}
|
||||
})
|
||||
loggerStream.write(Buffer.alloc(maxSize))
|
||||
loggerStream.write(Buffer.alloc(1))
|
||||
loggerStream.end()
|
||||
})
|
||||
|
||||
it('should not log the size of the stream if it does not exceed the limit', function (done) {
|
||||
const maxSize = 10
|
||||
const loggedSizes = []
|
||||
const loggerStream = new LoggerStream(maxSize, (size, isFlush) => {
|
||||
loggedSizes.push(size)
|
||||
})
|
||||
loggerStream.write(Buffer.alloc(maxSize))
|
||||
loggerStream.end()
|
||||
loggerStream.on('finish', () => {
|
||||
expect(loggedSizes).to.deep.equal([])
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
16
libraries/stream-utils/test/unit/ReadableStringTests.js
Normal file
16
libraries/stream-utils/test/unit/ReadableStringTests.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const { expect } = require('chai')
|
||||
const { ReadableString } = require('../../index')
|
||||
|
||||
describe('ReadableString', function () {
|
||||
it('should emit the string passed to it', function (done) {
|
||||
const stringStream = new ReadableString('hello world')
|
||||
let data = ''
|
||||
stringStream.on('data', chunk => {
|
||||
data += chunk.toString()
|
||||
})
|
||||
stringStream.on('end', () => {
|
||||
expect(data).to.equal('hello world')
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
22
libraries/stream-utils/test/unit/TimeoutStreamTests.js
Normal file
22
libraries/stream-utils/test/unit/TimeoutStreamTests.js
Normal file
@@ -0,0 +1,22 @@
|
||||
const { expect } = require('chai')
|
||||
const { TimeoutStream, AbortError } = require('../../index')
|
||||
|
||||
describe('TimeoutStream', function () {
|
||||
it('should emit an error if the stream times out', function (done) {
|
||||
const timeout = 10
|
||||
const timeoutStream = new TimeoutStream(timeout)
|
||||
timeoutStream.on('error', err => {
|
||||
expect(err).to.be.an.instanceOf(AbortError)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not emit an error if the stream does not time out', function (done) {
|
||||
const timeout = 100
|
||||
const timeoutStream = new TimeoutStream(timeout)
|
||||
setTimeout(() => {
|
||||
timeoutStream.end()
|
||||
done()
|
||||
}, 1)
|
||||
})
|
||||
})
|
20
libraries/stream-utils/test/unit/WritableBufferTests.js
Normal file
20
libraries/stream-utils/test/unit/WritableBufferTests.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const { expect } = require('chai')
|
||||
const { WritableBuffer } = require('../../index')
|
||||
|
||||
describe('WritableBuffer', function () {
|
||||
it('should store all data written to it in a node Buffer', function () {
|
||||
const bufferStream = new WritableBuffer()
|
||||
bufferStream.write('hello')
|
||||
bufferStream.write('world')
|
||||
bufferStream.end()
|
||||
expect(bufferStream.contents().toString()).to.equal('helloworld')
|
||||
})
|
||||
|
||||
it('should return the size of the data written to it', function () {
|
||||
const bufferStream = new WritableBuffer()
|
||||
bufferStream.write('hello')
|
||||
bufferStream.write('world')
|
||||
bufferStream.end()
|
||||
expect(bufferStream.size()).to.equal(10)
|
||||
})
|
||||
})
|
7
libraries/stream-utils/tsconfig.json
Normal file
7
libraries/stream-utils/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"extends": "../../tsconfig.backend.json",
|
||||
"include": [
|
||||
"**/*.js",
|
||||
"**/*.cjs"
|
||||
]
|
||||
}
|
Reference in New Issue
Block a user