first commit

This commit is contained in:
2025-04-24 13:11:28 +08:00
commit ff9c54d5e4
5960 changed files with 834111 additions and 0 deletions

View File

@@ -0,0 +1,30 @@
const { expect } = require('chai')
const { LimitedStream, SizeExceededError } = require('../../index')
describe('LimitedStream', function () {
it('should emit an error if the stream size exceeds the limit', function (done) {
const maxSize = 10
const limitedStream = new LimitedStream(maxSize)
limitedStream.on('error', err => {
expect(err).to.be.an.instanceOf(SizeExceededError)
done()
})
limitedStream.write(Buffer.alloc(maxSize + 1))
})
it('should pass through data if the stream size does not exceed the limit', function (done) {
const maxSize = 15
const limitedStream = new LimitedStream(maxSize)
let data = ''
limitedStream.on('data', chunk => {
data += chunk.toString()
})
limitedStream.on('end', () => {
expect(data).to.equal('hello world')
done()
})
limitedStream.write('hello')
limitedStream.write(' world')
limitedStream.end()
})
})

View File

@@ -0,0 +1,36 @@
const { expect } = require('chai')
const { LoggerStream } = require('../../index')
describe('LoggerStream', function () {
it('should log the size of the stream when it exceeds the limit', function (done) {
const maxSize = 10
const loggedSizes = []
const loggerStream = new LoggerStream(maxSize, (size, isFlush) => {
loggedSizes.push([size, isFlush])
if (isFlush) {
expect(loggedSizes).to.deep.equal([
[11, undefined],
[11, true],
])
done()
}
})
loggerStream.write(Buffer.alloc(maxSize))
loggerStream.write(Buffer.alloc(1))
loggerStream.end()
})
it('should not log the size of the stream if it does not exceed the limit', function (done) {
const maxSize = 10
const loggedSizes = []
const loggerStream = new LoggerStream(maxSize, (size, isFlush) => {
loggedSizes.push(size)
})
loggerStream.write(Buffer.alloc(maxSize))
loggerStream.end()
loggerStream.on('finish', () => {
expect(loggedSizes).to.deep.equal([])
done()
})
})
})

View File

@@ -0,0 +1,16 @@
const { expect } = require('chai')
const { ReadableString } = require('../../index')
describe('ReadableString', function () {
it('should emit the string passed to it', function (done) {
const stringStream = new ReadableString('hello world')
let data = ''
stringStream.on('data', chunk => {
data += chunk.toString()
})
stringStream.on('end', () => {
expect(data).to.equal('hello world')
done()
})
})
})

View File

@@ -0,0 +1,22 @@
const { expect } = require('chai')
const { TimeoutStream, AbortError } = require('../../index')
describe('TimeoutStream', function () {
it('should emit an error if the stream times out', function (done) {
const timeout = 10
const timeoutStream = new TimeoutStream(timeout)
timeoutStream.on('error', err => {
expect(err).to.be.an.instanceOf(AbortError)
done()
})
})
it('should not emit an error if the stream does not time out', function (done) {
const timeout = 100
const timeoutStream = new TimeoutStream(timeout)
setTimeout(() => {
timeoutStream.end()
done()
}, 1)
})
})

View File

@@ -0,0 +1,20 @@
const { expect } = require('chai')
const { WritableBuffer } = require('../../index')
describe('WritableBuffer', function () {
it('should store all data written to it in a node Buffer', function () {
const bufferStream = new WritableBuffer()
bufferStream.write('hello')
bufferStream.write('world')
bufferStream.end()
expect(bufferStream.contents().toString()).to.equal('helloworld')
})
it('should return the size of the data written to it', function () {
const bufferStream = new WritableBuffer()
bufferStream.write('hello')
bufferStream.write('world')
bufferStream.end()
expect(bufferStream.size()).to.equal(10)
})
})