first commit

This commit is contained in:
2025-04-24 13:11:28 +08:00
commit ff9c54d5e4
5960 changed files with 834111 additions and 0 deletions

View File

@@ -0,0 +1,580 @@
const sinon = require('sinon')
const { expect } = require('chai')
const modulePath = '../../../app/js/DocArchiveManager.js'
const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongodb-legacy')
const Errors = require('../../../app/js/Errors')
const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises
describe('DocArchiveManager', function () {
let DocArchiveManager,
PersistorManager,
MongoManager,
RangeManager,
Settings,
Crypto,
StreamUtils,
HashDigest,
HashUpdate,
archivedDocs,
mongoDocs,
archivedDoc,
archivedDocJson,
md5Sum,
projectId,
readStream,
stream,
streamToBuffer
beforeEach(function () {
md5Sum = 'decafbad'
RangeManager = {
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
}
Settings = {
docstore: {
backend: 'gcs',
bucket: 'wombat',
},
parallelArchiveJobs: 3,
}
HashDigest = sinon.stub().returns(md5Sum)
HashUpdate = sinon.stub().returns({ digest: HashDigest })
Crypto = {
createHash: sinon.stub().returns({ update: HashUpdate }),
}
StreamUtils = {
ReadableString: sinon.stub().returns({ stream: 'readStream' }),
}
projectId = new ObjectId()
archivedDocs = [
{
_id: new ObjectId(),
inS3: true,
rev: 2,
},
{
_id: new ObjectId(),
inS3: true,
rev: 4,
},
{
_id: new ObjectId(),
inS3: true,
rev: 6,
},
]
mongoDocs = [
{
_id: new ObjectId(),
lines: ['one', 'two', 'three'],
rev: 2,
},
{
_id: new ObjectId(),
lines: ['aaa', 'bbb', 'ccc'],
rev: 4,
},
{
_id: new ObjectId(),
inS3: true,
rev: 6,
},
{
_id: new ObjectId(),
inS3: true,
rev: 6,
},
{
_id: new ObjectId(),
lines: ['111', '222', '333'],
rev: 6,
},
]
archivedDoc = {
lines: mongoDocs[0].lines,
rev: mongoDocs[0].rev,
}
archivedDocJson = JSON.stringify({ ...archivedDoc, schema_v: 1 })
stream = {
on: sinon.stub(),
resume: sinon.stub(),
}
stream.on.withArgs('data').yields(Buffer.from(archivedDocJson, 'utf8'))
stream.on.withArgs('end').yields()
readStream = {
stream: 'readStream',
}
PersistorManager = {
getObjectStream: sinon.stub().resolves(stream),
sendStream: sinon.stub().resolves(),
getObjectMd5Hash: sinon.stub().resolves(md5Sum),
deleteObject: sinon.stub().resolves(),
deleteDirectory: sinon.stub().resolves(),
}
const getNonArchivedProjectDocIds = sinon.stub()
getNonArchivedProjectDocIds
.onCall(0)
.resolves(mongoDocs.filter(doc => !doc.inS3).map(doc => doc._id))
getNonArchivedProjectDocIds.onCall(1).resolves([])
const getArchivedProjectDocs = sinon.stub()
getArchivedProjectDocs.onCall(0).resolves(archivedDocs)
getArchivedProjectDocs.onCall(1).resolves([])
const fakeGetDoc = async (_projectId, _docId) => {
if (_projectId.equals(projectId)) {
for (const mongoDoc of mongoDocs.concat(archivedDocs)) {
if (mongoDoc._id.equals(_docId)) {
return mongoDoc
}
}
}
throw new Errors.NotFoundError()
}
MongoManager = {
promises: {
markDocAsArchived: sinon.stub().resolves(),
restoreArchivedDoc: sinon.stub().resolves(),
upsertIntoDocCollection: sinon.stub().resolves(),
getProjectsDocs: sinon.stub().resolves(mongoDocs),
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
getNonArchivedProjectDocIds,
getArchivedProjectDocs,
findDoc: sinon.stub().callsFake(fakeGetDoc),
getDocForArchiving: sinon.stub().callsFake(fakeGetDoc),
destroyProject: sinon.stub().resolves(),
},
}
// Wrap streamToBuffer so that we can pass in something that it expects (in
// this case, a Promise) rather than a stubbed stream object
streamToBuffer = {
promises: {
streamToBuffer: async () => {
const inputStream = new Promise(resolve => {
stream.on('data', data => resolve(data))
})
const value = await StreamToBuffer.streamToBuffer(
'testProjectId',
'testDocId',
inputStream
)
return value
},
},
}
DocArchiveManager = SandboxedModule.require(modulePath, {
requires: {
'@overleaf/settings': Settings,
crypto: Crypto,
'@overleaf/stream-utils': StreamUtils,
'./MongoManager': MongoManager,
'./RangeManager': RangeManager,
'./PersistorManager': PersistorManager,
'./Errors': Errors,
'./StreamToBuffer': streamToBuffer,
},
})
})
describe('archiveDoc', function () {
it('should resolve when passed a valid document', async function () {
await expect(
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
).to.eventually.be.fulfilled
})
it('should throw an error if the doc has no lines', async function () {
const doc = mongoDocs[0]
doc.lines = null
await expect(
DocArchiveManager.promises.archiveDoc(projectId, doc._id)
).to.eventually.be.rejectedWith('doc has no lines')
})
it('should add the schema version', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id)
expect(StreamUtils.ReadableString).to.have.been.calledWith(
sinon.match(/"schema_v":1/)
)
})
it('should calculate the hex md5 sum of the content', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(Crypto.createHash).to.have.been.calledWith('md5')
expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
expect(HashDigest).to.have.been.calledWith('hex')
})
it('should pass the md5 hash to the object persistor for verification', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(PersistorManager.sendStream).to.have.been.calledWith(
sinon.match.any,
sinon.match.any,
sinon.match.any,
{ sourceMd5: md5Sum }
)
})
it('should pass the correct bucket and key to the persistor', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(PersistorManager.sendStream).to.have.been.calledWith(
Settings.docstore.bucket,
`${projectId}/${mongoDocs[0]._id}`
)
})
it('should create a stream from the encoded json and send it', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(StreamUtils.ReadableString).to.have.been.calledWith(
archivedDocJson
)
expect(PersistorManager.sendStream).to.have.been.calledWith(
sinon.match.any,
sinon.match.any,
readStream
)
})
it('should mark the doc as archived', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId,
mongoDocs[0]._id,
mongoDocs[0].rev
)
})
describe('when archiving is not configured', function () {
beforeEach(function () {
Settings.docstore.backend = undefined
})
it('should bail out early', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called
})
})
describe('with null bytes in the result', function () {
const _stringify = JSON.stringify
beforeEach(function () {
JSON.stringify = sinon.stub().returns('{"bad": "\u0000"}')
})
afterEach(function () {
JSON.stringify = _stringify
})
it('should return an error', async function () {
await expect(
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
).to.eventually.be.rejectedWith('null bytes detected')
})
})
})
describe('unarchiveDoc', function () {
let docId, lines, rev
describe('when the doc is in S3', function () {
beforeEach(function () {
MongoManager.promises.findDoc = sinon
.stub()
.resolves({ inS3: true, rev })
docId = mongoDocs[0]._id
lines = ['doc', 'lines']
rev = 123
})
it('should resolve when passed a valid document', async function () {
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
.to.eventually.be.fulfilled
})
it('should test md5 validity with the raw buffer', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(HashUpdate).to.have.been.calledWith(
sinon.match.instanceOf(Buffer)
)
})
it('should throw an error if the md5 does not match', async function () {
PersistorManager.getObjectMd5Hash.resolves('badf00d')
await expect(
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
})
it('should restore the doc in Mongo', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, archivedDoc)
})
describe('when archiving is not configured', function () {
beforeEach(function () {
Settings.docstore.backend = undefined
})
it('should error out on archived doc', async function () {
await expect(
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.match(
/found archived doc, but archiving backend is not configured/
)
})
it('should return early on non-archived doc', async function () {
MongoManager.promises.findDoc = sinon.stub().resolves({ rev })
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
})
})
describe('doc contents', function () {
let archivedDoc
describe('when the doc has the old schema', function () {
beforeEach(function () {
archivedDoc = lines
archivedDocJson = JSON.stringify(archivedDoc)
stream.on
.withArgs('data')
.yields(Buffer.from(archivedDocJson, 'utf8'))
})
it('should return the docs lines', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, { lines, rev })
})
})
describe('with the new schema and ranges', function () {
beforeEach(function () {
archivedDoc = {
lines,
ranges: { json: 'ranges' },
rev: 456,
schema_v: 1,
}
archivedDocJson = JSON.stringify(archivedDoc)
stream.on
.withArgs('data')
.yields(Buffer.from(archivedDocJson, 'utf8'))
})
it('should return the doc lines and ranges', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, {
lines,
ranges: { mongo: 'ranges' },
rev: 456,
})
})
})
describe('with the new schema and no ranges', function () {
beforeEach(function () {
archivedDoc = { lines, rev: 456, schema_v: 1 }
archivedDocJson = JSON.stringify(archivedDoc)
stream.on
.withArgs('data')
.yields(Buffer.from(archivedDocJson, 'utf8'))
})
it('should return only the doc lines', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, { lines, rev: 456 })
})
})
describe('with the new schema and no rev', function () {
beforeEach(function () {
archivedDoc = { lines, schema_v: 1 }
archivedDocJson = JSON.stringify(archivedDoc)
stream.on
.withArgs('data')
.yields(Buffer.from(archivedDocJson, 'utf8'))
})
it('should use the rev obtained from Mongo', async function () {
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(
MongoManager.promises.restoreArchivedDoc
).to.have.been.calledWith(projectId, docId, { lines, rev })
})
})
describe('with an unrecognised schema', function () {
beforeEach(function () {
archivedDoc = { lines, schema_v: 2 }
archivedDocJson = JSON.stringify(archivedDoc)
stream.on
.withArgs('data')
.yields(Buffer.from(archivedDocJson, 'utf8'))
})
it('should throw an error', async function () {
await expect(
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejectedWith(
"I don't understand the doc format in s3"
)
})
})
})
})
it('should not do anything if the file is already unarchived', async function () {
MongoManager.promises.findDoc.resolves({ inS3: false })
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
expect(PersistorManager.getObjectStream).not.to.have.been.called
})
it('should throw an error if the file is not found', async function () {
PersistorManager.getObjectStream = sinon
.stub()
.rejects(new Errors.NotFoundError())
await expect(
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
})
})
describe('destroyProject', function () {
describe('when archiving is enabled', function () {
beforeEach(async function () {
await DocArchiveManager.promises.destroyProject(projectId)
})
it('should delete the project in Mongo', function () {
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
projectId
)
})
it('should delete the project in the persistor', function () {
expect(PersistorManager.deleteDirectory).to.have.been.calledWith(
Settings.docstore.bucket,
projectId
)
})
})
describe('when archiving is disabled', function () {
beforeEach(async function () {
Settings.docstore.backend = ''
await DocArchiveManager.promises.destroyProject(projectId)
})
it('should delete the project in Mongo', function () {
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
projectId
)
})
it('should not delete the project in the persistor', function () {
expect(PersistorManager.deleteDirectory).not.to.have.been.called
})
})
})
describe('archiveAllDocs', function () {
it('should resolve with valid arguments', async function () {
await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to
.eventually.be.fulfilled
})
it('should archive all project docs which are not in s3', async function () {
await DocArchiveManager.promises.archiveAllDocs(projectId)
// not inS3
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId,
mongoDocs[0]._id
)
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId,
mongoDocs[1]._id
)
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
projectId,
mongoDocs[4]._id
)
// inS3
expect(
MongoManager.promises.markDocAsArchived
).not.to.have.been.calledWith(projectId, mongoDocs[2]._id)
expect(
MongoManager.promises.markDocAsArchived
).not.to.have.been.calledWith(projectId, mongoDocs[3]._id)
})
describe('when archiving is not configured', function () {
beforeEach(function () {
Settings.docstore.backend = undefined
})
it('should bail out early', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have
.been.called
})
})
})
describe('unArchiveAllDocs', function () {
it('should resolve with valid arguments', async function () {
await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to
.eventually.be.fulfilled
})
it('should unarchive all inS3 docs', async function () {
await DocArchiveManager.promises.unArchiveAllDocs(projectId)
for (const doc of archivedDocs) {
expect(PersistorManager.getObjectStream).to.have.been.calledWith(
Settings.docstore.bucket,
`${projectId}/${doc._id}`
)
}
})
describe('when archiving is not configured', function () {
beforeEach(function () {
Settings.docstore.backend = undefined
})
it('should bail out early', async function () {
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not
.have.been.called
})
})
})
})

View File

@@ -0,0 +1,755 @@
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
const { expect } = require('chai')
const modulePath = require('node:path').join(
__dirname,
'../../../app/js/DocManager'
)
const { ObjectId } = require('mongodb-legacy')
const Errors = require('../../../app/js/Errors')
describe('DocManager', function () {
beforeEach(function () {
this.doc_id = new ObjectId().toString()
this.project_id = new ObjectId().toString()
this.another_project_id = new ObjectId().toString()
this.stubbedError = new Error('blew up')
this.version = 42
this.MongoManager = {
promises: {
findDoc: sinon.stub(),
getProjectsDocs: sinon.stub(),
patchDoc: sinon.stub().resolves(),
upsertIntoDocCollection: sinon.stub().resolves(),
},
}
this.DocArchiveManager = {
promises: {
unarchiveDoc: sinon.stub(),
unArchiveAllDocs: sinon.stub(),
archiveDoc: sinon.stub().resolves(),
},
}
this.RangeManager = {
jsonRangesToMongo(r) {
return r
},
shouldUpdateRanges: sinon.stub().returns(false),
}
this.settings = { docstore: {} }
this.DocManager = SandboxedModule.require(modulePath, {
requires: {
'./MongoManager': this.MongoManager,
'./DocArchiveManager': this.DocArchiveManager,
'./RangeManager': this.RangeManager,
'@overleaf/settings': this.settings,
'./Errors': Errors,
},
})
})
describe('getFullDoc', function () {
beforeEach(function () {
this.DocManager.promises._getDoc = sinon.stub()
this.doc = {
_id: this.doc_id,
lines: ['2134'],
}
})
it('should call get doc with a quick filter', async function () {
this.DocManager.promises._getDoc.resolves(this.doc)
const doc = await this.DocManager.promises.getFullDoc(
this.project_id,
this.doc_id
)
doc.should.equal(this.doc)
this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, {
lines: true,
rev: true,
deleted: true,
version: true,
ranges: true,
inS3: true,
})
.should.equal(true)
})
it('should return error when get doc errors', async function () {
this.DocManager.promises._getDoc.rejects(this.stubbedError)
await expect(
this.DocManager.promises.getFullDoc(this.project_id, this.doc_id)
).to.be.rejectedWith(this.stubbedError)
})
})
describe('getRawDoc', function () {
beforeEach(function () {
this.DocManager.promises._getDoc = sinon.stub()
this.doc = { lines: ['2134'] }
})
it('should call get doc with a quick filter', async function () {
this.DocManager.promises._getDoc.resolves(this.doc)
const doc = await this.DocManager.promises.getDocLines(
this.project_id,
this.doc_id
)
doc.should.equal(this.doc)
this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, {
lines: true,
inS3: true,
})
.should.equal(true)
})
it('should return error when get doc errors', async function () {
this.DocManager.promises._getDoc.rejects(this.stubbedError)
await expect(
this.DocManager.promises.getDocLines(this.project_id, this.doc_id)
).to.be.rejectedWith(this.stubbedError)
})
})
describe('getDoc', function () {
beforeEach(function () {
this.project = { name: 'mock-project' }
this.doc = {
_id: this.doc_id,
project_id: this.project_id,
lines: ['mock-lines'],
version: this.version,
}
})
describe('when using a filter', function () {
beforeEach(function () {
this.MongoManager.promises.findDoc.resolves(this.doc)
})
it('should error if inS3 is not set to true', async function () {
await expect(
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
inS3: false,
})
).to.be.rejected
})
it('should always get inS3 even when no filter is passed', async function () {
await expect(
this.DocManager.promises._getDoc(this.project_id, this.doc_id)
).to.be.rejected
this.MongoManager.promises.findDoc.called.should.equal(false)
})
it('should not error if inS3 is set to true', async function () {
await this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
inS3: true,
})
})
})
describe('when the doc is in the doc collection', function () {
beforeEach(async function () {
this.MongoManager.promises.findDoc.resolves(this.doc)
this.result = await this.DocManager.promises._getDoc(
this.project_id,
this.doc_id,
{ version: true, inS3: true }
)
})
it('should get the doc from the doc collection', function () {
this.MongoManager.promises.findDoc
.calledWith(this.project_id, this.doc_id)
.should.equal(true)
})
it('should return the doc with the version', function () {
this.result.lines.should.equal(this.doc.lines)
this.result.version.should.equal(this.version)
})
})
describe('when MongoManager.findDoc errors', function () {
it('should return the error', async function () {
this.MongoManager.promises.findDoc.rejects(this.stubbedError)
await expect(
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
version: true,
inS3: true,
})
).to.be.rejectedWith(this.stubbedError)
})
})
describe('when the doc is archived', function () {
beforeEach(async function () {
this.doc = {
_id: this.doc_id,
project_id: this.project_id,
version: 2,
inS3: true,
}
this.unarchivedDoc = {
_id: this.doc_id,
project_id: this.project_id,
lines: ['mock-lines'],
version: 2,
inS3: false,
}
this.MongoManager.promises.findDoc.resolves(this.doc)
this.DocArchiveManager.promises.unarchiveDoc.callsFake(
async (projectId, docId) => {
this.MongoManager.promises.findDoc.resolves({
...this.unarchivedDoc,
})
}
)
this.result = await this.DocManager.promises._getDoc(
this.project_id,
this.doc_id,
{
version: true,
inS3: true,
}
)
})
it('should call the DocArchive to unarchive the doc', function () {
this.DocArchiveManager.promises.unarchiveDoc
.calledWith(this.project_id, this.doc_id)
.should.equal(true)
})
it('should look up the doc twice', function () {
this.MongoManager.promises.findDoc.calledTwice.should.equal(true)
})
it('should return the doc', function () {
expect(this.result).to.deep.equal({
...this.unarchivedDoc,
})
})
})
describe('when the doc does not exist in the docs collection', function () {
it('should return a NotFoundError', async function () {
this.MongoManager.promises.findDoc.resolves(null)
await expect(
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
version: true,
inS3: true,
})
).to.be.rejectedWith(
`No such doc: ${this.doc_id} in project ${this.project_id}`
)
})
})
})
describe('getAllNonDeletedDocs', function () {
describe('when the project exists', function () {
beforeEach(async function () {
this.docs = [
{
_id: this.doc_id,
project_id: this.project_id,
lines: ['mock-lines'],
},
]
this.MongoManager.promises.getProjectsDocs.resolves(this.docs)
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs)
this.filter = { lines: true }
this.result = await this.DocManager.promises.getAllNonDeletedDocs(
this.project_id,
this.filter
)
})
it('should get the project from the database', function () {
this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith(
this.project_id,
{ include_deleted: false },
this.filter
)
})
it('should return the docs', function () {
expect(this.result).to.deep.equal(this.docs)
})
})
describe('when there are no docs for the project', function () {
it('should return a NotFoundError', async function () {
this.MongoManager.promises.getProjectsDocs.resolves(null)
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null)
await expect(
this.DocManager.promises.getAllNonDeletedDocs(
this.project_id,
this.filter
)
).to.be.rejectedWith(`No docs for project ${this.project_id}`)
})
})
})
describe('patchDoc', function () {
describe('when the doc exists', function () {
beforeEach(function () {
this.lines = ['mock', 'doc', 'lines']
this.rev = 77
this.MongoManager.promises.findDoc.resolves({
_id: new ObjectId(this.doc_id),
})
this.meta = {}
})
describe('standard path', function () {
beforeEach(async function () {
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
)
})
it('should get the doc', function () {
expect(this.MongoManager.promises.findDoc).to.have.been.calledWith(
this.project_id,
this.doc_id
)
})
it('should persist the meta', function () {
expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith(
this.project_id,
this.doc_id,
this.meta
)
})
})
describe('background flush disabled and deleting a doc', function () {
beforeEach(async function () {
this.settings.docstore.archiveOnSoftDelete = false
this.meta.deleted = true
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
)
})
it('should not flush the doc out of mongo', function () {
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
.called
})
})
describe('background flush enabled and not deleting a doc', function () {
beforeEach(async function () {
this.settings.docstore.archiveOnSoftDelete = false
this.meta.deleted = false
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
)
})
it('should not flush the doc out of mongo', function () {
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
.called
})
})
describe('background flush enabled and deleting a doc', function () {
beforeEach(function () {
this.settings.docstore.archiveOnSoftDelete = true
this.meta.deleted = true
})
describe('when the background flush succeeds', function () {
beforeEach(async function () {
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
)
})
it('should not log a warning', function () {
expect(this.logger.warn).to.not.have.been.called
})
it('should flush the doc out of mongo', function () {
expect(
this.DocArchiveManager.promises.archiveDoc
).to.have.been.calledWith(this.project_id, this.doc_id)
})
})
describe('when the background flush fails', function () {
beforeEach(async function () {
this.err = new Error('foo')
this.DocArchiveManager.promises.archiveDoc.rejects(this.err)
await this.DocManager.promises.patchDoc(
this.project_id,
this.doc_id,
this.meta
)
})
it('should log a warning', function () {
expect(this.logger.warn).to.have.been.calledWith(
sinon.match({
projectId: this.project_id,
docId: this.doc_id,
err: this.err,
}),
'archiving a single doc in the background failed'
)
})
})
})
})
describe('when the doc does not exist', function () {
it('should return a NotFoundError', async function () {
this.MongoManager.promises.findDoc.resolves(null)
await expect(
this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {})
).to.be.rejectedWith(
`No such project/doc to delete: ${this.project_id}/${this.doc_id}`
)
})
})
})
describe('updateDoc', function () {
beforeEach(function () {
this.oldDocLines = ['old', 'doc', 'lines']
this.newDocLines = ['new', 'doc', 'lines']
this.originalRanges = {
changes: [
{
id: new ObjectId().toString(),
op: { i: 'foo', p: 3 },
meta: {
user_id: new ObjectId().toString(),
ts: new Date().toString(),
},
},
],
}
this.newRanges = {
changes: [
{
id: new ObjectId().toString(),
op: { i: 'bar', p: 6 },
meta: {
user_id: new ObjectId().toString(),
ts: new Date().toString(),
},
},
],
}
this.version = 42
this.doc = {
_id: this.doc_id,
project_id: this.project_id,
lines: this.oldDocLines,
rev: (this.rev = 5),
version: this.version,
ranges: this.originalRanges,
}
this.DocManager.promises._getDoc = sinon.stub()
})
describe('when only the doc lines have changed', function () {
beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
this.version,
this.originalRanges
)
})
it('should get the existing doc', function () {
this.DocManager.promises._getDoc
.calledWith(this.project_id, this.doc_id, {
version: true,
rev: true,
lines: true,
ranges: true,
inS3: true,
})
.should.equal(true)
})
it('should upsert the document to the doc collection', function () {
this.MongoManager.promises.upsertIntoDocCollection
.calledWith(this.project_id, this.doc_id, this.rev, {
lines: this.newDocLines,
})
.should.equal(true)
})
it('should return the new rev', function () {
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
})
})
describe('when the doc ranges have changed', function () {
beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.RangeManager.shouldUpdateRanges.returns(true)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.oldDocLines,
this.version,
this.newRanges
)
})
it('should upsert the ranges', function () {
this.MongoManager.promises.upsertIntoDocCollection
.calledWith(this.project_id, this.doc_id, this.rev, {
ranges: this.newRanges,
})
.should.equal(true)
})
it('should return the new rev', function () {
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
})
})
describe('when only the version has changed', function () {
beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.oldDocLines,
this.version + 1,
this.originalRanges
)
})
it('should update the version', function () {
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id,
this.doc_id,
this.rev,
{ version: this.version + 1 }
)
})
it('should return the old rev', function () {
expect(this.result).to.deep.equal({ modified: true, rev: this.rev })
})
})
describe('when the doc has not changed at all', function () {
beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.oldDocLines,
this.version,
this.originalRanges
)
})
it('should not update the ranges or lines or version', function () {
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
false
)
})
it('should return the old rev and modified == false', function () {
expect(this.result).to.deep.equal({ modified: false, rev: this.rev })
})
})
describe('when the version is null', function () {
it('should return an error', async function () {
await expect(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
null,
this.originalRanges
)
).to.be.rejectedWith('no lines, version or ranges provided')
})
})
describe('when the lines are null', function () {
it('should return an error', async function () {
await expect(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
null,
this.version,
this.originalRanges
)
).to.be.rejectedWith('no lines, version or ranges provided')
})
})
describe('when the ranges are null', function () {
it('should return an error', async function () {
await expect(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
this.version,
null
)
).to.be.rejectedWith('no lines, version or ranges provided')
})
})
describe('when there is a generic error getting the doc', function () {
beforeEach(async function () {
this.error = new Error('doc could not be found')
this.DocManager.promises._getDoc = sinon.stub().rejects(this.error)
await expect(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
this.version,
this.originalRanges
)
).to.be.rejectedWith(this.error)
})
it('should not upsert the document to the doc collection', function () {
this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been
.called
})
})
describe('when the version was decremented', function () {
it('should return an error', async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
await expect(
this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
this.version - 1,
this.originalRanges
)
).to.be.rejectedWith(Errors.DocVersionDecrementedError)
})
})
describe('when the doc lines have not changed', function () {
beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.oldDocLines.slice(),
this.version,
this.originalRanges
)
})
it('should not update the doc', function () {
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
false
)
})
it('should return the existing rev', function () {
expect(this.result).to.deep.equal({ modified: false, rev: this.rev })
})
})
describe('when the doc does not exist', function () {
beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(null)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
this.version,
this.originalRanges
)
})
it('should upsert the document to the doc collection', function () {
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id,
this.doc_id,
undefined,
{
lines: this.newDocLines,
ranges: this.originalRanges,
version: this.version,
}
)
})
it('should return the new rev', function () {
expect(this.result).to.deep.equal({ modified: true, rev: 1 })
})
})
describe('when another update is racing', function () {
beforeEach(async function () {
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
this.MongoManager.promises.upsertIntoDocCollection
.onFirstCall()
.rejects(new Errors.DocRevValueError())
this.RangeManager.shouldUpdateRanges.returns(true)
this.result = await this.DocManager.promises.updateDoc(
this.project_id,
this.doc_id,
this.newDocLines,
this.version + 1,
this.newRanges
)
})
it('should upsert the doc twice', function () {
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
this.project_id,
this.doc_id,
this.rev,
{
ranges: this.newRanges,
lines: this.newDocLines,
version: this.version + 1,
}
)
this.MongoManager.promises.upsertIntoDocCollection.should.have.been
.calledTwice
})
it('should return the new rev', function () {
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
})
})
})
})

View File

@@ -0,0 +1,578 @@
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
const { assert, expect } = require('chai')
const modulePath = require('node:path').join(
__dirname,
'../../../app/js/HttpController'
)
const { ObjectId } = require('mongodb-legacy')
const Errors = require('../../../app/js/Errors')
describe('HttpController', function () {
beforeEach(function () {
const settings = {
max_doc_length: 2 * 1024 * 1024,
}
this.DocArchiveManager = {
unArchiveAllDocs: sinon.stub().yields(),
}
this.DocManager = {}
this.HttpController = SandboxedModule.require(modulePath, {
requires: {
'./DocManager': this.DocManager,
'./DocArchiveManager': this.DocArchiveManager,
'@overleaf/settings': settings,
'./HealthChecker': {},
'./Errors': Errors,
},
})
this.res = {
send: sinon.stub(),
sendStatus: sinon.stub(),
json: sinon.stub(),
setHeader: sinon.stub(),
}
this.res.status = sinon.stub().returns(this.res)
this.req = { query: {} }
this.next = sinon.stub()
this.projectId = 'mock-project-id'
this.docId = 'mock-doc-id'
this.doc = {
_id: this.docId,
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
version: 42,
rev: 5,
}
this.deletedDoc = {
deleted: true,
_id: this.docId,
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
version: 42,
rev: 5,
}
})
describe('getDoc', function () {
describe('without deleted docs', function () {
beforeEach(function () {
this.req.params = {
project_id: this.projectId,
doc_id: this.docId,
}
this.DocManager.getFullDoc = sinon
.stub()
.callsArgWith(2, null, this.doc)
this.HttpController.getDoc(this.req, this.res, this.next)
})
it('should get the document with the version (including deleted)', function () {
this.DocManager.getFullDoc
.calledWith(this.projectId, this.docId)
.should.equal(true)
})
it('should return the doc as JSON', function () {
this.res.json
.calledWith({
_id: this.docId,
lines: this.doc.lines,
rev: this.doc.rev,
version: this.doc.version,
})
.should.equal(true)
})
})
describe('which is deleted', function () {
beforeEach(function () {
this.req.params = {
project_id: this.projectId,
doc_id: this.docId,
}
this.DocManager.getFullDoc = sinon
.stub()
.callsArgWith(2, null, this.deletedDoc)
})
it('should get the doc from the doc manager', function () {
this.HttpController.getDoc(this.req, this.res, this.next)
this.DocManager.getFullDoc
.calledWith(this.projectId, this.docId)
.should.equal(true)
})
it('should return 404 if the query string delete is not set ', function () {
this.HttpController.getDoc(this.req, this.res, this.next)
this.res.sendStatus.calledWith(404).should.equal(true)
})
it('should return the doc as JSON if include_deleted is set to true', function () {
this.req.query.include_deleted = 'true'
this.HttpController.getDoc(this.req, this.res, this.next)
this.res.json
.calledWith({
_id: this.docId,
lines: this.doc.lines,
rev: this.doc.rev,
deleted: true,
version: this.doc.version,
})
.should.equal(true)
})
})
})
describe('getRawDoc', function () {
beforeEach(function () {
this.req.params = {
project_id: this.projectId,
doc_id: this.docId,
}
this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc)
this.HttpController.getRawDoc(this.req, this.res, this.next)
})
it('should get the document without the version', function () {
this.DocManager.getDocLines
.calledWith(this.projectId, this.docId)
.should.equal(true)
})
it('should set the content type header', function () {
this.res.setHeader
.calledWith('content-type', 'text/plain')
.should.equal(true)
})
it('should send the raw version of the doc', function () {
assert.deepEqual(
this.res.send.args[0][0],
`${this.doc.lines[0]}\n${this.doc.lines[1]}\n${this.doc.lines[2]}\n${this.doc.lines[3]}\n${this.doc.lines[4]}\n${this.doc.lines[5]}`
)
})
})
describe('getAllDocs', function () {
describe('normally', function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.docs = [
{
_id: new ObjectId(),
lines: ['mock', 'lines', 'one'],
rev: 2,
},
{
_id: new ObjectId(),
lines: ['mock', 'lines', 'two'],
rev: 4,
},
]
this.DocManager.getAllNonDeletedDocs = sinon
.stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
})
it('should get all the (non-deleted) docs', function () {
this.DocManager.getAllNonDeletedDocs
.calledWith(this.projectId, { lines: true, rev: true })
.should.equal(true)
})
it('should return the doc as JSON', function () {
this.res.json
.calledWith([
{
_id: this.docs[0]._id.toString(),
lines: this.docs[0].lines,
rev: this.docs[0].rev,
},
{
_id: this.docs[1]._id.toString(),
lines: this.docs[1].lines,
rev: this.docs[1].rev,
},
])
.should.equal(true)
})
})
describe('with null lines', function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.docs = [
{
_id: new ObjectId(),
lines: null,
rev: 2,
},
{
_id: new ObjectId(),
lines: ['mock', 'lines', 'two'],
rev: 4,
},
]
this.DocManager.getAllNonDeletedDocs = sinon
.stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
})
it('should return the doc with fallback lines', function () {
this.res.json
.calledWith([
{
_id: this.docs[0]._id.toString(),
lines: [],
rev: this.docs[0].rev,
},
{
_id: this.docs[1]._id.toString(),
lines: this.docs[1].lines,
rev: this.docs[1].rev,
},
])
.should.equal(true)
})
})
describe('with a null doc', function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.docs = [
{
_id: new ObjectId(),
lines: ['mock', 'lines', 'one'],
rev: 2,
},
null,
{
_id: new ObjectId(),
lines: ['mock', 'lines', 'two'],
rev: 4,
},
]
this.DocManager.getAllNonDeletedDocs = sinon
.stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllDocs(this.req, this.res, this.next)
})
it('should return the non null docs as JSON', function () {
this.res.json
.calledWith([
{
_id: this.docs[0]._id.toString(),
lines: this.docs[0].lines,
rev: this.docs[0].rev,
},
{
_id: this.docs[2]._id.toString(),
lines: this.docs[2].lines,
rev: this.docs[2].rev,
},
])
.should.equal(true)
})
it('should log out an error', function () {
this.logger.error
.calledWith(
{
err: sinon.match.has('message', 'null doc'),
projectId: this.projectId,
},
'encountered null doc'
)
.should.equal(true)
})
})
})
describe('getAllRanges', function () {
describe('normally', function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.docs = [
{
_id: new ObjectId(),
ranges: { mock_ranges: 'one' },
},
{
_id: new ObjectId(),
ranges: { mock_ranges: 'two' },
},
]
this.DocManager.getAllNonDeletedDocs = sinon
.stub()
.callsArgWith(2, null, this.docs)
this.HttpController.getAllRanges(this.req, this.res, this.next)
})
it('should get all the (non-deleted) doc ranges', function () {
this.DocManager.getAllNonDeletedDocs
.calledWith(this.projectId, { ranges: true })
.should.equal(true)
})
it('should return the doc as JSON', function () {
this.res.json
.calledWith([
{
_id: this.docs[0]._id.toString(),
ranges: this.docs[0].ranges,
},
{
_id: this.docs[1]._id.toString(),
ranges: this.docs[1].ranges,
},
])
.should.equal(true)
})
})
})
describe('updateDoc', function () {
beforeEach(function () {
this.req.params = {
project_id: this.projectId,
doc_id: this.docId,
}
})
describe('when the doc lines exist and were updated', function () {
beforeEach(function () {
this.req.body = {
lines: (this.lines = ['hello', 'world']),
version: (this.version = 42),
ranges: (this.ranges = { changes: 'mock' }),
}
this.DocManager.updateDoc = sinon
.stub()
.yields(null, true, (this.rev = 5))
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should update the document', function () {
this.DocManager.updateDoc
.calledWith(
this.projectId,
this.docId,
this.lines,
this.version,
this.ranges
)
.should.equal(true)
})
it('should return a modified status', function () {
this.res.json
.calledWith({ modified: true, rev: this.rev })
.should.equal(true)
})
})
describe('when the doc lines exist and were not updated', function () {
beforeEach(function () {
this.req.body = {
lines: (this.lines = ['hello', 'world']),
version: (this.version = 42),
ranges: {},
}
this.DocManager.updateDoc = sinon
.stub()
.yields(null, false, (this.rev = 5))
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should return a modified status', function () {
this.res.json
.calledWith({ modified: false, rev: this.rev })
.should.equal(true)
})
})
describe('when the doc lines are not provided', function () {
beforeEach(function () {
this.req.body = { version: 42, ranges: {} }
this.DocManager.updateDoc = sinon.stub().yields(null, false)
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
this.DocManager.updateDoc.called.should.equal(false)
})
it('should return a 400 (bad request) response', function () {
this.res.sendStatus.calledWith(400).should.equal(true)
})
})
describe('when the doc version are not provided', function () {
beforeEach(function () {
this.req.body = { version: 42, lines: ['hello world'] }
this.DocManager.updateDoc = sinon.stub().yields(null, false)
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
this.DocManager.updateDoc.called.should.equal(false)
})
it('should return a 400 (bad request) response', function () {
this.res.sendStatus.calledWith(400).should.equal(true)
})
})
describe('when the doc ranges is not provided', function () {
beforeEach(function () {
this.req.body = { lines: ['foo'], version: 42 }
this.DocManager.updateDoc = sinon.stub().yields(null, false)
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should not update the document', function () {
this.DocManager.updateDoc.called.should.equal(false)
})
it('should return a 400 (bad request) response', function () {
this.res.sendStatus.calledWith(400).should.equal(true)
})
})
describe('when the doc body is too large', function () {
beforeEach(function () {
this.req.body = {
lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
version: (this.version = 42),
ranges: (this.ranges = { changes: 'mock' }),
}
this.HttpController.updateDoc(this.req, this.res, this.next)
})
it('should return a 413 (too large) response', function () {
sinon.assert.calledWith(this.res.status, 413)
})
it('should report that the document body is too large', function () {
sinon.assert.calledWith(this.res.send, 'document body too large')
})
})
})
describe('patchDoc', function () {
beforeEach(function () {
this.req.params = {
project_id: this.projectId,
doc_id: this.docId,
}
this.req.body = { name: 'foo.tex' }
this.DocManager.patchDoc = sinon.stub().yields(null)
this.HttpController.patchDoc(this.req, this.res, this.next)
})
it('should delete the document', function () {
expect(this.DocManager.patchDoc).to.have.been.calledWith(
this.projectId,
this.docId
)
})
it('should return a 204 (No Content)', function () {
expect(this.res.sendStatus).to.have.been.calledWith(204)
})
describe('with an invalid payload', function () {
beforeEach(function () {
this.req.body = { cannot: 'happen' }
this.DocManager.patchDoc = sinon.stub().yields(null)
this.HttpController.patchDoc(this.req, this.res, this.next)
})
it('should log a message', function () {
expect(this.logger.fatal).to.have.been.calledWith(
{ field: 'cannot' },
'joi validation for pathDoc is broken'
)
})
it('should not pass the invalid field along', function () {
expect(this.DocManager.patchDoc).to.have.been.calledWith(
this.projectId,
this.docId,
{}
)
})
})
})
describe('archiveAllDocs', function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1)
this.HttpController.archiveAllDocs(this.req, this.res, this.next)
})
it('should archive the project', function () {
this.DocArchiveManager.archiveAllDocs
.calledWith(this.projectId)
.should.equal(true)
})
it('should return a 204 (No Content)', function () {
this.res.sendStatus.calledWith(204).should.equal(true)
})
})
describe('unArchiveAllDocs', function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
})
describe('on success', function () {
beforeEach(function (done) {
this.res.sendStatus.callsFake(() => done())
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
})
it('returns a 200', function () {
expect(this.res.sendStatus).to.have.been.calledWith(200)
})
})
describe("when the archived rev doesn't match", function () {
beforeEach(function (done) {
this.res.sendStatus.callsFake(() => done())
this.DocArchiveManager.unArchiveAllDocs.yields(
new Errors.DocRevValueError('bad rev')
)
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
})
it('returns a 409', function () {
expect(this.res.sendStatus).to.have.been.calledWith(409)
})
})
})
describe('destroyProject', function () {
beforeEach(function () {
this.req.params = { project_id: this.projectId }
this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1)
this.HttpController.destroyProject(this.req, this.res, this.next)
})
it('should destroy the docs', function () {
sinon.assert.calledWith(
this.DocArchiveManager.destroyProject,
this.projectId
)
})
it('should return 204', function () {
sinon.assert.calledWith(this.res.sendStatus, 204)
})
})
})

View File

@@ -0,0 +1,407 @@
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
const modulePath = require('node:path').join(
__dirname,
'../../../app/js/MongoManager'
)
const { ObjectId } = require('mongodb-legacy')
const { assert, expect } = require('chai')
const Errors = require('../../../app/js/Errors')
describe('MongoManager', function () {
beforeEach(function () {
this.db = {
docs: {
updateOne: sinon.stub().resolves({ matchedCount: 1 }),
insertOne: sinon.stub().resolves(),
},
}
this.MongoManager = SandboxedModule.require(modulePath, {
requires: {
'./mongodb': {
db: this.db,
ObjectId,
},
'@overleaf/settings': {
max_deleted_docs: 42,
docstore: { archivingLockDurationMs: 5000 },
},
'./Errors': Errors,
},
})
this.projectId = new ObjectId().toString()
this.docId = new ObjectId().toString()
this.rev = 42
this.stubbedErr = new Error('hello world')
this.lines = ['Three French hens', 'Two turtle doves']
})
describe('findDoc', function () {
beforeEach(async function () {
this.doc = { name: 'mock-doc' }
this.db.docs.findOne = sinon.stub().resolves(this.doc)
this.filter = { lines: true }
this.result = await this.MongoManager.promises.findDoc(
this.projectId,
this.docId,
this.filter
)
})
it('should find the doc', function () {
this.db.docs.findOne
.calledWith(
{
_id: new ObjectId(this.docId),
project_id: new ObjectId(this.projectId),
},
{
projection: this.filter,
}
)
.should.equal(true)
})
it('should return the doc', function () {
expect(this.doc).to.deep.equal(this.doc)
})
})
describe('patchDoc', function () {
beforeEach(async function () {
this.meta = { name: 'foo.tex' }
await this.MongoManager.promises.patchDoc(
this.projectId,
this.docId,
this.meta
)
})
it('should pass the parameter along', function () {
this.db.docs.updateOne.should.have.been.calledWith(
{
_id: new ObjectId(this.docId),
project_id: new ObjectId(this.projectId),
},
{
$set: this.meta,
}
)
})
})
describe('getProjectsDocs', function () {
beforeEach(function () {
this.filter = { lines: true }
this.doc1 = { name: 'mock-doc1' }
this.doc2 = { name: 'mock-doc2' }
this.doc3 = { name: 'mock-doc3' }
this.doc4 = { name: 'mock-doc4' }
this.db.docs.find = sinon.stub().returns({
toArray: sinon.stub().resolves([this.doc, this.doc3, this.doc4]),
})
})
describe('with included_deleted = false', function () {
beforeEach(async function () {
this.result = await this.MongoManager.promises.getProjectsDocs(
this.projectId,
{ include_deleted: false },
this.filter
)
})
it('should find the non-deleted docs via the project_id', function () {
this.db.docs.find
.calledWith(
{
project_id: new ObjectId(this.projectId),
deleted: { $ne: true },
},
{
projection: this.filter,
}
)
.should.equal(true)
})
it('should call return the docs', function () {
expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4])
})
})
describe('with included_deleted = true', function () {
beforeEach(async function () {
this.result = await this.MongoManager.promises.getProjectsDocs(
this.projectId,
{ include_deleted: true },
this.filter
)
})
it('should find all via the project_id', function () {
this.db.docs.find
.calledWith(
{
project_id: new ObjectId(this.projectId),
},
{
projection: this.filter,
}
)
.should.equal(true)
})
it('should return the docs', function () {
expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4])
})
})
})
describe('getProjectsDeletedDocs', function () {
beforeEach(async function () {
this.filter = { name: true }
this.doc1 = { _id: '1', name: 'mock-doc1.tex' }
this.doc2 = { _id: '2', name: 'mock-doc2.tex' }
this.doc3 = { _id: '3', name: 'mock-doc3.tex' }
this.db.docs.find = sinon.stub().returns({
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
})
this.result = await this.MongoManager.promises.getProjectsDeletedDocs(
this.projectId,
this.filter
)
})
it('should find the deleted docs via the project_id', function () {
this.db.docs.find
.calledWith({
project_id: new ObjectId(this.projectId),
deleted: true,
})
.should.equal(true)
})
it('should filter, sort by deletedAt and limit', function () {
this.db.docs.find
.calledWith(sinon.match.any, {
projection: this.filter,
sort: { deletedAt: -1 },
limit: 42,
})
.should.equal(true)
})
it('should return the docs', function () {
expect(this.result).to.deep.equal([this.doc1, this.doc2, this.doc3])
})
})
describe('upsertIntoDocCollection', function () {
beforeEach(function () {
this.oldRev = 77
})
it('should upsert the document', async function () {
await this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
this.oldRev,
{ lines: this.lines }
)
const args = this.db.docs.updateOne.args[0]
assert.deepEqual(args[0], {
_id: new ObjectId(this.docId),
project_id: new ObjectId(this.projectId),
rev: this.oldRev,
})
assert.equal(args[1].$set.lines, this.lines)
assert.equal(args[1].$inc.rev, 1)
})
it('should handle update error', async function () {
this.db.docs.updateOne.rejects(this.stubbedErr)
await expect(
this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
this.rev,
{
lines: this.lines,
}
)
).to.be.rejectedWith(this.stubbedErr)
})
it('should insert without a previous rev', async function () {
await this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
null,
{ lines: this.lines, ranges: this.ranges }
)
expect(this.db.docs.insertOne).to.have.been.calledWith({
_id: new ObjectId(this.docId),
project_id: new ObjectId(this.projectId),
rev: 1,
lines: this.lines,
ranges: this.ranges,
})
})
it('should handle generic insert error', async function () {
this.db.docs.insertOne.rejects(this.stubbedErr)
await expect(
this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
null,
{ lines: this.lines, ranges: this.ranges }
)
).to.be.rejectedWith(this.stubbedErr)
})
it('should handle duplicate insert error', async function () {
this.db.docs.insertOne.rejects({ code: 11000 })
await expect(
this.MongoManager.promises.upsertIntoDocCollection(
this.projectId,
this.docId,
null,
{ lines: this.lines, ranges: this.ranges }
)
).to.be.rejectedWith(Errors.DocRevValueError)
})
})
describe('destroyProject', function () {
beforeEach(async function () {
this.projectId = new ObjectId()
this.db.docs.deleteMany = sinon.stub().resolves()
await this.MongoManager.promises.destroyProject(this.projectId)
})
it('should destroy all docs', function () {
sinon.assert.calledWith(this.db.docs.deleteMany, {
project_id: this.projectId,
})
})
})
describe('checkRevUnchanged', function () {
this.beforeEach(function () {
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: 1 }
})
it('should not error when the rev has not changed', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
await this.MongoManager.promises.checkRevUnchanged(this.doc)
})
it('should return an error when the rev has changed', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
await expect(
this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocModifiedError)
})
it('should return a value error if incoming rev is NaN', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
await expect(
this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocRevValueError)
})
it('should return a value error if checked doc rev is NaN', async function () {
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
await expect(
this.MongoManager.promises.checkRevUnchanged(this.doc)
).to.be.rejectedWith(Errors.DocRevValueError)
})
})
describe('restoreArchivedDoc', function () {
beforeEach(function () {
this.archivedDoc = {
lines: ['a', 'b', 'c'],
ranges: { some: 'ranges' },
rev: 2,
}
})
describe('complete doc', function () {
beforeEach(async function () {
await this.MongoManager.promises.restoreArchivedDoc(
this.projectId,
this.docId,
this.archivedDoc
)
})
it('updates Mongo', function () {
expect(this.db.docs.updateOne).to.have.been.calledWith(
{
_id: new ObjectId(this.docId),
project_id: new ObjectId(this.projectId),
rev: this.archivedDoc.rev,
},
{
$set: {
lines: this.archivedDoc.lines,
ranges: this.archivedDoc.ranges,
},
$unset: {
inS3: true,
},
}
)
})
})
describe('without ranges', function () {
beforeEach(async function () {
delete this.archivedDoc.ranges
await this.MongoManager.promises.restoreArchivedDoc(
this.projectId,
this.docId,
this.archivedDoc
)
})
it('sets ranges to an empty object', function () {
expect(this.db.docs.updateOne).to.have.been.calledWith(
{
_id: new ObjectId(this.docId),
project_id: new ObjectId(this.projectId),
rev: this.archivedDoc.rev,
},
{
$set: {
lines: this.archivedDoc.lines,
ranges: {},
},
$unset: {
inS3: true,
},
}
)
})
})
describe("when the update doesn't succeed", function () {
it('throws a DocRevValueError', async function () {
this.db.docs.updateOne.resolves({ matchedCount: 0 })
await expect(
this.MongoManager.promises.restoreArchivedDoc(
this.projectId,
this.docId,
this.archivedDoc
)
).to.be.rejectedWith(Errors.DocRevValueError)
})
})
})
})

View File

@@ -0,0 +1,55 @@
const { expect } = require('chai')
const modulePath = '../../../app/js/PersistorManager.js'
const SandboxedModule = require('sandboxed-module')
describe('PersistorManager', function () {
class FakePersistor {
async sendStream() {
return 'sent'
}
}
describe('configured', function () {
it('should return fake persistor', function () {
const Settings = {
docstore: {
backend: 'gcs',
bucket: 'wombat',
},
}
const PersistorManger = SandboxedModule.require(modulePath, {
requires: {
'@overleaf/settings': Settings,
'@overleaf/object-persistor': () => new FakePersistor(),
'@overleaf/metrics': {},
},
})
expect(PersistorManger).to.be.instanceof(FakePersistor)
expect(PersistorManger.sendStream()).to.eventually.equal('sent')
})
})
describe('not configured', function () {
it('should return abstract persistor', async function () {
const Settings = {
docstore: {
backend: undefined,
bucket: 'wombat',
},
}
const PersistorManger = SandboxedModule.require(modulePath, {
requires: {
'@overleaf/settings': Settings,
'@overleaf/object-persistor': () => new FakePersistor(),
'@overleaf/metrics': {},
},
})
expect(PersistorManger.constructor.name).to.equal('AbstractPersistor')
expect(PersistorManger.sendStream()).to.eventually.be.rejectedWith(
/method not implemented in persistor/
)
})
})
})

View File

@@ -0,0 +1,253 @@
/* eslint-disable
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
const { assert, expect } = require('chai')
const modulePath = require('node:path').join(
__dirname,
'../../../app/js/RangeManager'
)
const { ObjectId } = require('mongodb-legacy')
describe('RangeManager', function () {
beforeEach(function () {
return (this.RangeManager = SandboxedModule.require(modulePath, {
requires: {
'./mongodb': {
ObjectId,
},
},
}))
})
describe('jsonRangesToMongo', function () {
it('should convert ObjectIds and dates to proper objects', function () {
const changeId = new ObjectId().toString()
const commentId = new ObjectId().toString()
const userId = new ObjectId().toString()
const threadId = new ObjectId().toString()
const ts = new Date().toJSON()
return this.RangeManager.jsonRangesToMongo({
changes: [
{
id: changeId,
op: { i: 'foo', p: 3 },
metadata: {
user_id: userId,
ts,
},
},
],
comments: [
{
id: commentId,
op: { c: 'foo', p: 3, t: threadId },
},
],
}).should.deep.equal({
changes: [
{
id: new ObjectId(changeId),
op: { i: 'foo', p: 3 },
metadata: {
user_id: new ObjectId(userId),
ts: new Date(ts),
},
},
],
comments: [
{
id: new ObjectId(commentId),
op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
},
],
})
})
it('should leave malformed ObjectIds as they are', function () {
const changeId = 'foo'
const commentId = 'bar'
const userId = 'baz'
return this.RangeManager.jsonRangesToMongo({
changes: [
{
id: changeId,
metadata: {
user_id: userId,
},
},
],
comments: [
{
id: commentId,
},
],
}).should.deep.equal({
changes: [
{
id: changeId,
metadata: {
user_id: userId,
},
},
],
comments: [
{
id: commentId,
},
],
})
})
return it('should be consistent when transformed through json -> mongo -> json', function () {
const changeId = new ObjectId().toString()
const commentId = new ObjectId().toString()
const userId = new ObjectId().toString()
const threadId = new ObjectId().toString()
const ts = new Date().toJSON()
const ranges1 = {
changes: [
{
id: changeId,
op: { i: 'foo', p: 3 },
metadata: {
user_id: userId,
ts,
},
},
],
comments: [
{
id: commentId,
op: { c: 'foo', p: 3, t: threadId },
},
],
}
const ranges1Copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place
const ranges2 = JSON.parse(
JSON.stringify(this.RangeManager.jsonRangesToMongo(ranges1Copy))
)
return ranges1.should.deep.equal(ranges2)
})
})
return describe('shouldUpdateRanges', function () {
beforeEach(function () {
this.ranges = {
changes: [
{
id: new ObjectId(),
op: { i: 'foo', p: 3 },
metadata: {
user_id: new ObjectId(),
ts: new Date(),
},
},
],
comments: [
{
id: new ObjectId(),
op: { c: 'foo', p: 3, t: new ObjectId() },
},
],
}
return (this.ranges_copy = this.RangeManager.jsonRangesToMongo(
JSON.parse(JSON.stringify(this.ranges))
))
})
describe('with a blank new range', function () {
return it('should throw an error', function () {
return expect(() => {
return this.RangeManager.shouldUpdateRanges(this.ranges, null)
}).to.throw(Error)
})
})
describe('with a blank old range', function () {
return it('should treat it like {}', function () {
this.RangeManager.shouldUpdateRanges(null, {}).should.equal(false)
return this.RangeManager.shouldUpdateRanges(
null,
this.ranges
).should.equal(true)
})
})
describe('with no changes', function () {
return it('should return false', function () {
return this.RangeManager.shouldUpdateRanges(
this.ranges,
this.ranges_copy
).should.equal(false)
})
})
return describe('with changes', function () {
it('should return true when the change id changes', function () {
this.ranges_copy.changes[0].id = new ObjectId()
return this.RangeManager.shouldUpdateRanges(
this.ranges,
this.ranges_copy
).should.equal(true)
})
it('should return true when the change user id changes', function () {
this.ranges_copy.changes[0].metadata.user_id = new ObjectId()
return this.RangeManager.shouldUpdateRanges(
this.ranges,
this.ranges_copy
).should.equal(true)
})
it('should return true when the change ts changes', function () {
this.ranges_copy.changes[0].metadata.ts = new Date(Date.now() + 1000)
return this.RangeManager.shouldUpdateRanges(
this.ranges,
this.ranges_copy
).should.equal(true)
})
it('should return true when the change op changes', function () {
this.ranges_copy.changes[0].op.i = 'bar'
return this.RangeManager.shouldUpdateRanges(
this.ranges,
this.ranges_copy
).should.equal(true)
})
it('should return true when the comment id changes', function () {
this.ranges_copy.comments[0].id = new ObjectId()
return this.RangeManager.shouldUpdateRanges(
this.ranges,
this.ranges_copy
).should.equal(true)
})
it('should return true when the comment offset changes', function () {
this.ranges_copy.comments[0].op.p = 17
return this.RangeManager.shouldUpdateRanges(
this.ranges,
this.ranges_copy
).should.equal(true)
})
return it('should return true when the comment content changes', function () {
this.ranges_copy.comments[0].op.c = 'bar'
return this.RangeManager.shouldUpdateRanges(
this.ranges,
this.ranges_copy
).should.equal(true)
})
})
})
})