first commit
This commit is contained in:
@@ -0,0 +1,5 @@
|
||||
FROM fsouza/fake-gcs-server:latest
|
||||
RUN apk add --update --no-cache curl
|
||||
COPY healthcheck.sh /healthcheck.sh
|
||||
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
|
||||
CMD ["--port=9090", "--scheme=http"]
|
9
services/docstore/test/acceptance/deps/healthcheck.sh
Normal file
9
services/docstore/test/acceptance/deps/healthcheck.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
|
||||
# health check to allow 404 status code as valid
|
||||
STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" "$1")
|
||||
# will be 000 on non-http error (e.g. connection failure)
|
||||
if test "$STATUSCODE" -ge 500 || test "$STATUSCODE" -lt 200; then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
1239
services/docstore/test/acceptance/js/ArchiveDocsTests.js
Normal file
1239
services/docstore/test/acceptance/js/ArchiveDocsTests.js
Normal file
File diff suppressed because it is too large
Load Diff
511
services/docstore/test/acceptance/js/DeletingDocsTests.js
Normal file
511
services/docstore/test/acceptance/js/DeletingDocsTests.js
Normal file
@@ -0,0 +1,511 @@
|
||||
const { db, ObjectId } = require('../../../app/js/mongodb')
|
||||
const { expect } = require('chai')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { Storage } = require('@google-cloud/storage')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
function deleteTestSuite(deleteDoc) {
|
||||
before(async function () {
|
||||
// Create buckets needed by the archiving part of these tests
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.createBucket(Settings.docstore.bucket)
|
||||
await storage.createBucket(`${Settings.docstore.bucket}-deleted`)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
// Tear down the buckets created above
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.bucket(Settings.docstore.bucket).deleteFiles()
|
||||
await storage.bucket(Settings.docstore.bucket).delete()
|
||||
await storage.bucket(`${Settings.docstore.bucket}-deleted`).deleteFiles()
|
||||
await storage.bucket(`${Settings.docstore.bucket}-deleted`).delete()
|
||||
})
|
||||
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.doc_id = new ObjectId()
|
||||
this.lines = ['original', 'lines']
|
||||
this.version = 42
|
||||
this.ranges = []
|
||||
DocstoreApp.ensureRunning(() => {
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should show as not deleted on /deleted', function (done) {
|
||||
DocstoreClient.isDocDeleted(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(200)
|
||||
expect(body).to.have.property('deleted').to.equal(false)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('when the doc exists', function () {
|
||||
beforeEach(function (done) {
|
||||
deleteDoc(this.project_id, this.doc_id, (error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function (done) {
|
||||
db.docs.deleteOne({ _id: this.doc_id }, done)
|
||||
})
|
||||
|
||||
it('should mark the doc as deleted on /deleted', function (done) {
|
||||
DocstoreClient.isDocDeleted(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(200)
|
||||
expect(body).to.have.property('deleted').to.equal(true)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should insert a deleted doc into the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||
if (error) return done(error)
|
||||
docs[0]._id.should.deep.equal(this.doc_id)
|
||||
docs[0].lines.should.deep.equal(this.lines)
|
||||
docs[0].deleted.should.equal(true)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not export the doc to s3', function (done) {
|
||||
setTimeout(() => {
|
||||
DocstoreClient.getS3Doc(this.project_id, this.doc_id, error => {
|
||||
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||
done()
|
||||
})
|
||||
}, 1000)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when archiveOnSoftDelete is enabled', function () {
|
||||
let archiveOnSoftDelete
|
||||
beforeEach('overwrite settings', function () {
|
||||
archiveOnSoftDelete = Settings.docstore.archiveOnSoftDelete
|
||||
Settings.docstore.archiveOnSoftDelete = true
|
||||
})
|
||||
afterEach('restore settings', function () {
|
||||
Settings.docstore.archiveOnSoftDelete = archiveOnSoftDelete
|
||||
})
|
||||
|
||||
beforeEach('delete Doc', function (done) {
|
||||
deleteDoc(this.project_id, this.doc_id, (error, res) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(function waitForBackgroundFlush(done) {
|
||||
setTimeout(done, 500)
|
||||
})
|
||||
|
||||
afterEach(function cleanupDoc(done) {
|
||||
db.docs.deleteOne({ _id: this.doc_id }, done)
|
||||
})
|
||||
|
||||
it('should set the deleted flag in the doc', function (done) {
|
||||
db.docs.findOne({ _id: this.doc_id }, (error, doc) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(doc.deleted).to.equal(true)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should set inS3 and unset lines and ranges in the doc', function (done) {
|
||||
db.docs.findOne({ _id: this.doc_id }, (error, doc) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(doc.lines).to.not.exist
|
||||
expect(doc.ranges).to.not.exist
|
||||
expect(doc.inS3).to.equal(true)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should set the doc in s3 correctly', function (done) {
|
||||
DocstoreClient.getS3Doc(this.project_id, this.doc_id, (error, s3doc) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(s3doc.lines).to.deep.equal(this.lines)
|
||||
expect(s3doc.ranges).to.deep.equal(this.ranges)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc exists in another project', function () {
|
||||
const otherProjectId = new ObjectId()
|
||||
|
||||
it('should show as not existing on /deleted', function (done) {
|
||||
DocstoreClient.isDocDeleted(otherProjectId, this.doc_id, (error, res) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(404)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should return a 404 when trying to delete', function (done) {
|
||||
deleteDoc(otherProjectId, this.doc_id, (error, res) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(404)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
it('should show as not existing on /deleted', function (done) {
|
||||
const missingDocId = new ObjectId()
|
||||
DocstoreClient.isDocDeleted(
|
||||
this.project_id,
|
||||
missingDocId,
|
||||
(error, res) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(404)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a 404', function (done) {
|
||||
const missingDocId = new ObjectId()
|
||||
deleteDoc(this.project_id, missingDocId, (error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(404)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
describe('Delete via PATCH', function () {
|
||||
deleteTestSuite(DocstoreClient.deleteDoc)
|
||||
|
||||
describe('when providing a custom doc name in the delete request', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.deleteDocWithName(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
'wombat.tex',
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should insert the doc name into the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||
if (error) return done(error)
|
||||
expect(docs[0].name).to.equal('wombat.tex')
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when providing a custom deletedAt date in the delete request', function () {
|
||||
beforeEach('record date and delay', function (done) {
|
||||
this.deletedAt = new Date()
|
||||
setTimeout(done, 5)
|
||||
})
|
||||
|
||||
beforeEach('perform deletion with past date', function (done) {
|
||||
DocstoreClient.deleteDocWithDate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.deletedAt,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should insert the date into the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||
if (error) return done(error)
|
||||
expect(docs[0].deletedAt.toISOString()).to.equal(
|
||||
this.deletedAt.toISOString()
|
||||
)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when providing no doc name in the delete request', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.deleteDocWithName(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
'',
|
||||
(error, res) => {
|
||||
this.res = res
|
||||
done(error)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should reject the request', function () {
|
||||
expect(this.res.statusCode).to.equal(400)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when providing no date in the delete request', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.deleteDocWithDate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
'',
|
||||
(error, res) => {
|
||||
this.res = res
|
||||
done(error)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should reject the request', function () {
|
||||
expect(this.res.statusCode).to.equal(400)
|
||||
})
|
||||
})
|
||||
|
||||
describe('before deleting anything', function () {
|
||||
it('should show nothing in deleted docs response', function (done) {
|
||||
DocstoreClient.getAllDeletedDocs(
|
||||
this.project_id,
|
||||
(error, deletedDocs) => {
|
||||
if (error) return done(error)
|
||||
expect(deletedDocs).to.deep.equal([])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc gets a name on delete', function () {
|
||||
beforeEach(function (done) {
|
||||
this.deletedAt = new Date()
|
||||
DocstoreClient.deleteDocWithDate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.deletedAt,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should show the doc in deleted docs response', function (done) {
|
||||
DocstoreClient.getAllDeletedDocs(
|
||||
this.project_id,
|
||||
(error, deletedDocs) => {
|
||||
if (error) return done(error)
|
||||
expect(deletedDocs).to.deep.equal([
|
||||
{
|
||||
_id: this.doc_id.toString(),
|
||||
name: 'main.tex',
|
||||
deletedAt: this.deletedAt.toISOString(),
|
||||
},
|
||||
])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('after deleting multiple docs', function () {
|
||||
beforeEach('create doc2', function (done) {
|
||||
this.doc_id2 = new ObjectId()
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id2,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
done
|
||||
)
|
||||
})
|
||||
beforeEach('delete doc2', function (done) {
|
||||
this.deletedAt2 = new Date()
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
this.project_id,
|
||||
this.doc_id2,
|
||||
this.deletedAt2,
|
||||
'two.tex',
|
||||
done
|
||||
)
|
||||
})
|
||||
beforeEach('create doc3', function (done) {
|
||||
this.doc_id3 = new ObjectId()
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id3,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
done
|
||||
)
|
||||
})
|
||||
beforeEach('delete doc3', function (done) {
|
||||
this.deletedAt3 = new Date()
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
this.project_id,
|
||||
this.doc_id3,
|
||||
this.deletedAt3,
|
||||
'three.tex',
|
||||
done
|
||||
)
|
||||
})
|
||||
it('should show all the docs as deleted', function (done) {
|
||||
DocstoreClient.getAllDeletedDocs(
|
||||
this.project_id,
|
||||
(error, deletedDocs) => {
|
||||
if (error) return done(error)
|
||||
|
||||
expect(deletedDocs).to.deep.equal([
|
||||
{
|
||||
_id: this.doc_id3.toString(),
|
||||
name: 'three.tex',
|
||||
deletedAt: this.deletedAt3.toISOString(),
|
||||
},
|
||||
{
|
||||
_id: this.doc_id2.toString(),
|
||||
name: 'two.tex',
|
||||
deletedAt: this.deletedAt2.toISOString(),
|
||||
},
|
||||
{
|
||||
_id: this.doc_id.toString(),
|
||||
name: 'main.tex',
|
||||
deletedAt: this.deletedAt.toISOString(),
|
||||
},
|
||||
])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('with one more than max_deleted_docs permits', function () {
|
||||
let maxDeletedDocsBefore
|
||||
beforeEach(function () {
|
||||
maxDeletedDocsBefore = Settings.max_deleted_docs
|
||||
Settings.max_deleted_docs = 2
|
||||
})
|
||||
afterEach(function () {
|
||||
Settings.max_deleted_docs = maxDeletedDocsBefore
|
||||
})
|
||||
|
||||
it('should omit the first deleted doc', function (done) {
|
||||
DocstoreClient.getAllDeletedDocs(
|
||||
this.project_id,
|
||||
(error, deletedDocs) => {
|
||||
if (error) return done(error)
|
||||
|
||||
expect(deletedDocs).to.deep.equal([
|
||||
{
|
||||
_id: this.doc_id3.toString(),
|
||||
name: 'three.tex',
|
||||
deletedAt: this.deletedAt3.toISOString(),
|
||||
},
|
||||
{
|
||||
_id: this.doc_id2.toString(),
|
||||
name: 'two.tex',
|
||||
deletedAt: this.deletedAt2.toISOString(),
|
||||
},
|
||||
// dropped main.tex
|
||||
])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Destroying a project's documents", function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.doc_id = new ObjectId()
|
||||
this.lines = ['original', 'lines']
|
||||
this.version = 42
|
||||
this.ranges = []
|
||||
DocstoreApp.ensureRunning(() => {
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc exists', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.destroyAllDoc(this.project_id, done)
|
||||
})
|
||||
|
||||
it('should remove the doc from the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((err, docs) => {
|
||||
expect(err).not.to.exist
|
||||
expect(docs).to.deep.equal([])
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is archived', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.archiveAllDoc(this.project_id, err => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
DocstoreClient.destroyAllDoc(this.project_id, done)
|
||||
})
|
||||
})
|
||||
|
||||
it('should remove the doc from the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((err, docs) => {
|
||||
expect(err).not.to.exist
|
||||
expect(docs).to.deep.equal([])
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should remove the doc contents from s3', function (done) {
|
||||
DocstoreClient.getS3Doc(this.project_id, this.doc_id, error => {
|
||||
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
112
services/docstore/test/acceptance/js/GettingAllDocsTests.js
Normal file
112
services/docstore/test/acceptance/js/GettingAllDocsTests.js
Normal file
@@ -0,0 +1,112 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const async = require('async')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
describe('Getting all docs', function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['one', 'two', 'three'],
|
||||
ranges: { mock: 'one' },
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['aaa', 'bbb', 'ccc'],
|
||||
ranges: { mock: 'two' },
|
||||
rev: 4,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['111', '222', '333'],
|
||||
ranges: { mock: 'three' },
|
||||
rev: 6,
|
||||
},
|
||||
]
|
||||
this.deleted_doc = {
|
||||
_id: new ObjectId(),
|
||||
lines: ['deleted'],
|
||||
ranges: { mock: 'four' },
|
||||
rev: 8,
|
||||
}
|
||||
const version = 42
|
||||
const jobs = Array.from(this.docs).map(doc =>
|
||||
(doc => {
|
||||
return callback => {
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
doc._id,
|
||||
doc.lines,
|
||||
version,
|
||||
doc.ranges,
|
||||
callback
|
||||
)
|
||||
}
|
||||
})(doc)
|
||||
)
|
||||
jobs.push(cb => {
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc._id,
|
||||
this.deleted_doc.lines,
|
||||
version,
|
||||
this.deleted_doc.ranges,
|
||||
err => {
|
||||
if (err) return done(err)
|
||||
return DocstoreClient.deleteDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc._id,
|
||||
cb
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
jobs.unshift(cb => DocstoreApp.ensureRunning(cb))
|
||||
return async.series(jobs, done)
|
||||
})
|
||||
|
||||
it('getAllDocs should return all the (non-deleted) docs', function (done) {
|
||||
return DocstoreClient.getAllDocs(this.project_id, (error, res, docs) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
docs.length.should.equal(this.docs.length)
|
||||
for (let i = 0; i < docs.length; i++) {
|
||||
const doc = docs[i]
|
||||
doc.lines.should.deep.equal(this.docs[i].lines)
|
||||
}
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
|
||||
return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
docs.length.should.equal(this.docs.length)
|
||||
for (let i = 0; i < docs.length; i++) {
|
||||
const doc = docs[i]
|
||||
doc.ranges.should.deep.equal(this.docs[i].ranges)
|
||||
}
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
@@ -0,0 +1,139 @@
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { Storage } = require('@google-cloud/storage')
|
||||
|
||||
describe('Getting A Doc from Archive', function () {
|
||||
before(function (done) {
|
||||
return DocstoreApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
before(async function () {
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.createBucket(Settings.docstore.bucket)
|
||||
await storage.createBucket(`${Settings.docstore.bucket}-deleted`)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
// Tear down the buckets created above
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.bucket(Settings.docstore.bucket).deleteFiles()
|
||||
await storage.bucket(Settings.docstore.bucket).delete()
|
||||
await storage.bucket(`${Settings.docstore.bucket}-deleted`).deleteFiles()
|
||||
await storage.bucket(`${Settings.docstore.bucket}-deleted`).delete()
|
||||
})
|
||||
|
||||
describe('for an archived doc', function () {
|
||||
before(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.timeout(1000 * 30)
|
||||
this.doc = {
|
||||
_id: new ObjectId(),
|
||||
lines: ['foo', 'bar'],
|
||||
ranges: {},
|
||||
version: 2,
|
||||
}
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
this.doc.lines,
|
||||
this.doc.version,
|
||||
this.doc.ranges,
|
||||
error => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
DocstoreClient.archiveDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
(error, res) => {
|
||||
this.res = res
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should successully archive the doc', function (done) {
|
||||
this.res.statusCode.should.equal(204)
|
||||
done()
|
||||
})
|
||||
|
||||
it('should return the doc lines and version from persistent storage', function (done) {
|
||||
return DocstoreClient.peekDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(200)
|
||||
res.headers['x-doc-status'].should.equal('archived')
|
||||
doc.lines.should.deep.equal(this.doc.lines)
|
||||
doc.version.should.equal(this.doc.version)
|
||||
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the doc lines and version from persistent storage on subsequent requests', function (done) {
|
||||
return DocstoreClient.peekDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(200)
|
||||
res.headers['x-doc-status'].should.equal('archived')
|
||||
doc.lines.should.deep.equal(this.doc.lines)
|
||||
doc.version.should.equal(this.doc.version)
|
||||
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('for an non-archived doc', function () {
|
||||
before(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.timeout(1000 * 30)
|
||||
this.doc = {
|
||||
_id: new ObjectId(),
|
||||
lines: ['foo', 'bar'],
|
||||
ranges: {},
|
||||
version: 2,
|
||||
}
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
this.doc.lines,
|
||||
this.doc.version,
|
||||
this.doc.ranges,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the doc lines and version from mongo', function (done) {
|
||||
return DocstoreClient.peekDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(200)
|
||||
res.headers['x-doc-status'].should.equal('active')
|
||||
doc.lines.should.deep.equal(this.doc.lines)
|
||||
doc.version.should.equal(this.doc.version)
|
||||
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
137
services/docstore/test/acceptance/js/GettingDocsTests.js
Normal file
137
services/docstore/test/acceptance/js/GettingDocsTests.js
Normal file
@@ -0,0 +1,137 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
describe('Getting a doc', function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.doc_id = new ObjectId()
|
||||
this.lines = ['original', 'lines']
|
||||
this.version = 42
|
||||
this.ranges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
return DocstoreApp.ensureRunning(() => {
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc exists', function () {
|
||||
return it('should get the doc lines and version', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.lines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.ranges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
return it('should return a 404', function (done) {
|
||||
const missingDocId = new ObjectId()
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
missingDocId,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(404)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the doc is a deleted doc', function () {
|
||||
beforeEach(function (done) {
|
||||
this.deleted_doc_id = new ObjectId()
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc_id,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return DocstoreClient.deleteDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc_id,
|
||||
done
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the doc', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc_id,
|
||||
{ include_deleted: true },
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.lines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.ranges)
|
||||
doc.deleted.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return a 404 when the query string is not set', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(404)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
557
services/docstore/test/acceptance/js/UpdatingDocsTests.js
Normal file
557
services/docstore/test/acceptance/js/UpdatingDocsTests.js
Normal file
@@ -0,0 +1,557 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
describe('Applying updates to a doc', function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.doc_id = new ObjectId()
|
||||
this.originalLines = ['original', 'lines']
|
||||
this.newLines = ['new', 'lines']
|
||||
this.originalRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.newRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'bar', p: 6 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.version = 42
|
||||
return DocstoreApp.ensureRunning(() => {
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when nothing has been updated', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = false', function () {
|
||||
return this.body.modified.should.equal(false)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the lines have changed', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the rev', function () {
|
||||
return this.body.rev.should.equal(2)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.newLines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version has changed', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
this.version + 1,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the rev', function () {
|
||||
return this.body.rev.should.equal(1)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version + 1)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version was decremented', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newLines,
|
||||
this.version - 1,
|
||||
this.newRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 409', function () {
|
||||
this.res.statusCode.should.equal(409)
|
||||
})
|
||||
|
||||
it('should not update the doc in the API', function (done) {
|
||||
DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the ranges have changed', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
this.version,
|
||||
this.newRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the rev', function () {
|
||||
return this.body.rev.should.equal(2)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.newRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
beforeEach(function (done) {
|
||||
this.missing_doc_id = new ObjectId()
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.missing_doc_id,
|
||||
this.originalLines,
|
||||
0,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should create the doc', function () {
|
||||
return this.body.rev.should.equal(1)
|
||||
})
|
||||
|
||||
return it('should be retreivable', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.missing_doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(0)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when malformed doc lines are provided', function () {
|
||||
describe('when the lines are not an array', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{ foo: 'bar' },
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 400', function () {
|
||||
return this.res.statusCode.should.equal(400)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the lines are not present', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 400', function () {
|
||||
return this.res.statusCode.should.equal(400)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when no version is provided', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
null,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 400', function () {
|
||||
return this.res.statusCode.should.equal(400)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the content is large', function () {
|
||||
beforeEach(function (done) {
|
||||
const line = new Array(1025).join('x') // 1kb
|
||||
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1mb
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.largeLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.largeLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a large json payload', function () {
|
||||
beforeEach(function (done) {
|
||||
const line = new Array(1025).join('x') // 1kb
|
||||
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1kb
|
||||
this.originalRanges.padding = Array.apply(null, Array(2049)).map(
|
||||
() => line
|
||||
) // 2mb + 1kb
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.largeLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.largeLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the document body is too large', function () {
|
||||
beforeEach(function (done) {
|
||||
const line = new Array(1025).join('x') // 1kb
|
||||
this.largeLines = Array.apply(null, Array(2049)).map(() => line) // 2mb + 1kb
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.largeLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 413', function () {
|
||||
return this.res.statusCode.should.equal(413)
|
||||
})
|
||||
|
||||
it('should report body too large', function () {
|
||||
return this.res.body.should.equal('document body too large')
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the json payload is too large', function () {
|
||||
beforeEach(function (done) {
|
||||
const line = new Array(1025).join('x') // 1kb
|
||||
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1kb
|
||||
this.originalRanges.padding = Array.apply(null, Array(6144)).map(
|
||||
() => line
|
||||
) // 6mb
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.largeLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
26
services/docstore/test/acceptance/js/helpers/DocstoreApp.js
Normal file
26
services/docstore/test/acceptance/js/helpers/DocstoreApp.js
Normal file
@@ -0,0 +1,26 @@
|
||||
const app = require('../../../../app')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = {
|
||||
running: false,
|
||||
initing: false,
|
||||
callbacks: [],
|
||||
ensureRunning(callback) {
|
||||
if (this.running) {
|
||||
return callback()
|
||||
} else if (this.initing) {
|
||||
return this.callbacks.push(callback)
|
||||
}
|
||||
this.initing = true
|
||||
this.callbacks.push(callback)
|
||||
app.listen(settings.internal.docstore.port, '127.0.0.1', error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.running = true
|
||||
for (callback of Array.from(this.callbacks)) {
|
||||
callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
195
services/docstore/test/acceptance/js/helpers/DocstoreClient.js
Normal file
195
services/docstore/test/acceptance/js/helpers/DocstoreClient.js
Normal file
@@ -0,0 +1,195 @@
|
||||
let DocstoreClient
|
||||
const request = require('request').defaults({ jar: false })
|
||||
const settings = require('@overleaf/settings')
|
||||
const Persistor = require('../../../../app/js/PersistorManager')
|
||||
|
||||
async function streamToString(stream) {
|
||||
const chunks = []
|
||||
return await new Promise((resolve, reject) => {
|
||||
stream.on('data', chunk => chunks.push(chunk))
|
||||
stream.on('error', reject)
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
||||
})
|
||||
}
|
||||
|
||||
async function getStringFromPersistor(persistor, bucket, key) {
|
||||
const stream = await persistor.getObjectStream(bucket, key, {})
|
||||
stream.resume()
|
||||
return await streamToString(stream)
|
||||
}
|
||||
|
||||
module.exports = DocstoreClient = {
|
||||
createDoc(projectId, docId, lines, version, ranges, callback) {
|
||||
return DocstoreClient.updateDoc(
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
getDoc(projectId, docId, qs, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||
json: true,
|
||||
qs,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
peekDoc(projectId, docId, qs, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/peek`,
|
||||
json: true,
|
||||
qs,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
isDocDeleted(projectId, docId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/deleted`,
|
||||
json: true,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
getAllDocs(projectId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc`,
|
||||
json: true,
|
||||
},
|
||||
(req, res, body) => {
|
||||
callback(req, res, body)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getAllDeletedDocs(projectId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc-deleted`,
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) return callback(error)
|
||||
if (res.statusCode !== 200) {
|
||||
return callback(new Error('unexpected statusCode'))
|
||||
}
|
||||
callback(null, body)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getAllRanges(projectId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/ranges`,
|
||||
json: true,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
updateDoc(projectId, docId, lines, version, ranges, callback) {
|
||||
return request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||
json: {
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
},
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteDoc(projectId, docId, callback) {
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
projectId,
|
||||
docId,
|
||||
new Date(),
|
||||
'main.tex',
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteDocWithDate(projectId, docId, date, callback) {
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
projectId,
|
||||
docId,
|
||||
date,
|
||||
'main.tex',
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteDocWithName(projectId, docId, name, callback) {
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
projectId,
|
||||
docId,
|
||||
new Date(),
|
||||
name,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteDocWithDateAndName(projectId, docId, deletedAt, name, callback) {
|
||||
request.patch(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||
json: { name, deleted: true, deletedAt },
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
archiveAllDoc(projectId, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/archive`,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
archiveDoc(projectId, docId, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/archive`,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
destroyAllDoc(projectId, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/destroy`,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
getS3Doc(projectId, docId, callback) {
|
||||
getStringFromPersistor(
|
||||
Persistor,
|
||||
settings.docstore.bucket,
|
||||
`${projectId}/${docId}`
|
||||
)
|
||||
.then(data => {
|
||||
callback(null, JSON.parse(data))
|
||||
})
|
||||
.catch(callback)
|
||||
},
|
||||
}
|
55
services/docstore/test/setup.js
Normal file
55
services/docstore/test/setup.js
Normal file
@@ -0,0 +1,55 @@
|
||||
const chai = require('chai')
|
||||
const sinon = require('sinon')
|
||||
const sinonChai = require('sinon-chai')
|
||||
const chaiAsPromised = require('chai-as-promised')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const timersPromises = require('node:timers/promises')
|
||||
|
||||
// ensure every ObjectId has the id string as a property for correct comparisons
|
||||
require('mongodb-legacy').ObjectId.cacheHexString = true
|
||||
|
||||
process.env.BACKEND = 'gcs'
|
||||
|
||||
// Chai configuration
|
||||
chai.should()
|
||||
chai.use(sinonChai)
|
||||
chai.use(chaiAsPromised)
|
||||
|
||||
// Global stubs
|
||||
const sandbox = sinon.createSandbox()
|
||||
const stubs = {
|
||||
logger: {
|
||||
debug: sandbox.stub(),
|
||||
log: sandbox.stub(),
|
||||
info: sandbox.stub(),
|
||||
warn: sandbox.stub(),
|
||||
err: sandbox.stub(),
|
||||
error: sandbox.stub(),
|
||||
fatal: sandbox.stub(),
|
||||
},
|
||||
}
|
||||
|
||||
// SandboxedModule configuration
|
||||
SandboxedModule.configure({
|
||||
requires: {
|
||||
'@overleaf/logger': stubs.logger,
|
||||
'timers/promises': timersPromises,
|
||||
'mongodb-legacy': require('mongodb-legacy'),
|
||||
},
|
||||
globals: { Buffer, JSON, Math, console, process },
|
||||
sourceTransformers: {
|
||||
removeNodePrefix: function (source) {
|
||||
return source.replace(/require\(['"]node:/g, "require('")
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
exports.mochaHooks = {
|
||||
beforeEach() {
|
||||
this.logger = stubs.logger
|
||||
},
|
||||
|
||||
afterEach() {
|
||||
sandbox.reset()
|
||||
},
|
||||
}
|
580
services/docstore/test/unit/js/DocArchiveManagerTests.js
Normal file
580
services/docstore/test/unit/js/DocArchiveManagerTests.js
Normal file
@@ -0,0 +1,580 @@
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../app/js/DocArchiveManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises
|
||||
|
||||
describe('DocArchiveManager', function () {
|
||||
let DocArchiveManager,
|
||||
PersistorManager,
|
||||
MongoManager,
|
||||
RangeManager,
|
||||
Settings,
|
||||
Crypto,
|
||||
StreamUtils,
|
||||
HashDigest,
|
||||
HashUpdate,
|
||||
archivedDocs,
|
||||
mongoDocs,
|
||||
archivedDoc,
|
||||
archivedDocJson,
|
||||
md5Sum,
|
||||
projectId,
|
||||
readStream,
|
||||
stream,
|
||||
streamToBuffer
|
||||
|
||||
beforeEach(function () {
|
||||
md5Sum = 'decafbad'
|
||||
|
||||
RangeManager = {
|
||||
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
|
||||
}
|
||||
Settings = {
|
||||
docstore: {
|
||||
backend: 'gcs',
|
||||
bucket: 'wombat',
|
||||
},
|
||||
parallelArchiveJobs: 3,
|
||||
}
|
||||
HashDigest = sinon.stub().returns(md5Sum)
|
||||
HashUpdate = sinon.stub().returns({ digest: HashDigest })
|
||||
Crypto = {
|
||||
createHash: sinon.stub().returns({ update: HashUpdate }),
|
||||
}
|
||||
StreamUtils = {
|
||||
ReadableString: sinon.stub().returns({ stream: 'readStream' }),
|
||||
}
|
||||
|
||||
projectId = new ObjectId()
|
||||
archivedDocs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 4,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 6,
|
||||
},
|
||||
]
|
||||
mongoDocs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['one', 'two', 'three'],
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['aaa', 'bbb', 'ccc'],
|
||||
rev: 4,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 6,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 6,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['111', '222', '333'],
|
||||
rev: 6,
|
||||
},
|
||||
]
|
||||
|
||||
archivedDoc = {
|
||||
lines: mongoDocs[0].lines,
|
||||
rev: mongoDocs[0].rev,
|
||||
}
|
||||
|
||||
archivedDocJson = JSON.stringify({ ...archivedDoc, schema_v: 1 })
|
||||
|
||||
stream = {
|
||||
on: sinon.stub(),
|
||||
resume: sinon.stub(),
|
||||
}
|
||||
stream.on.withArgs('data').yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
stream.on.withArgs('end').yields()
|
||||
|
||||
readStream = {
|
||||
stream: 'readStream',
|
||||
}
|
||||
|
||||
PersistorManager = {
|
||||
getObjectStream: sinon.stub().resolves(stream),
|
||||
sendStream: sinon.stub().resolves(),
|
||||
getObjectMd5Hash: sinon.stub().resolves(md5Sum),
|
||||
deleteObject: sinon.stub().resolves(),
|
||||
deleteDirectory: sinon.stub().resolves(),
|
||||
}
|
||||
|
||||
const getNonArchivedProjectDocIds = sinon.stub()
|
||||
getNonArchivedProjectDocIds
|
||||
.onCall(0)
|
||||
.resolves(mongoDocs.filter(doc => !doc.inS3).map(doc => doc._id))
|
||||
getNonArchivedProjectDocIds.onCall(1).resolves([])
|
||||
|
||||
const getArchivedProjectDocs = sinon.stub()
|
||||
getArchivedProjectDocs.onCall(0).resolves(archivedDocs)
|
||||
getArchivedProjectDocs.onCall(1).resolves([])
|
||||
|
||||
const fakeGetDoc = async (_projectId, _docId) => {
|
||||
if (_projectId.equals(projectId)) {
|
||||
for (const mongoDoc of mongoDocs.concat(archivedDocs)) {
|
||||
if (mongoDoc._id.equals(_docId)) {
|
||||
return mongoDoc
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Errors.NotFoundError()
|
||||
}
|
||||
|
||||
MongoManager = {
|
||||
promises: {
|
||||
markDocAsArchived: sinon.stub().resolves(),
|
||||
restoreArchivedDoc: sinon.stub().resolves(),
|
||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||
getProjectsDocs: sinon.stub().resolves(mongoDocs),
|
||||
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
|
||||
getNonArchivedProjectDocIds,
|
||||
getArchivedProjectDocs,
|
||||
findDoc: sinon.stub().callsFake(fakeGetDoc),
|
||||
getDocForArchiving: sinon.stub().callsFake(fakeGetDoc),
|
||||
destroyProject: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
|
||||
// Wrap streamToBuffer so that we can pass in something that it expects (in
|
||||
// this case, a Promise) rather than a stubbed stream object
|
||||
streamToBuffer = {
|
||||
promises: {
|
||||
streamToBuffer: async () => {
|
||||
const inputStream = new Promise(resolve => {
|
||||
stream.on('data', data => resolve(data))
|
||||
})
|
||||
|
||||
const value = await StreamToBuffer.streamToBuffer(
|
||||
'testProjectId',
|
||||
'testDocId',
|
||||
inputStream
|
||||
)
|
||||
|
||||
return value
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DocArchiveManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
crypto: Crypto,
|
||||
'@overleaf/stream-utils': StreamUtils,
|
||||
'./MongoManager': MongoManager,
|
||||
'./RangeManager': RangeManager,
|
||||
'./PersistorManager': PersistorManager,
|
||||
'./Errors': Errors,
|
||||
'./StreamToBuffer': streamToBuffer,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveDoc', function () {
|
||||
it('should resolve when passed a valid document', async function () {
|
||||
await expect(
|
||||
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
).to.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should throw an error if the doc has no lines', async function () {
|
||||
const doc = mongoDocs[0]
|
||||
doc.lines = null
|
||||
|
||||
await expect(
|
||||
DocArchiveManager.promises.archiveDoc(projectId, doc._id)
|
||||
).to.eventually.be.rejectedWith('doc has no lines')
|
||||
})
|
||||
|
||||
it('should add the schema version', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id)
|
||||
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
||||
sinon.match(/"schema_v":1/)
|
||||
)
|
||||
})
|
||||
|
||||
it('should calculate the hex md5 sum of the content', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(Crypto.createHash).to.have.been.calledWith('md5')
|
||||
expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
|
||||
expect(HashDigest).to.have.been.calledWith('hex')
|
||||
})
|
||||
|
||||
it('should pass the md5 hash to the object persistor for verification', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
|
||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
{ sourceMd5: md5Sum }
|
||||
)
|
||||
})
|
||||
|
||||
it('should pass the correct bucket and key to the persistor', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
|
||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
`${projectId}/${mongoDocs[0]._id}`
|
||||
)
|
||||
})
|
||||
|
||||
it('should create a stream from the encoded json and send it', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
||||
archivedDocJson
|
||||
)
|
||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
readStream
|
||||
)
|
||||
})
|
||||
|
||||
it('should mark the doc as archived', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
mongoDocs[0]._id,
|
||||
mongoDocs[0].rev
|
||||
)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('with null bytes in the result', function () {
|
||||
const _stringify = JSON.stringify
|
||||
|
||||
beforeEach(function () {
|
||||
JSON.stringify = sinon.stub().returns('{"bad": "\u0000"}')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
JSON.stringify = _stringify
|
||||
})
|
||||
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
).to.eventually.be.rejectedWith('null bytes detected')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('unarchiveDoc', function () {
|
||||
let docId, lines, rev
|
||||
|
||||
describe('when the doc is in S3', function () {
|
||||
beforeEach(function () {
|
||||
MongoManager.promises.findDoc = sinon
|
||||
.stub()
|
||||
.resolves({ inS3: true, rev })
|
||||
docId = mongoDocs[0]._id
|
||||
lines = ['doc', 'lines']
|
||||
rev = 123
|
||||
})
|
||||
|
||||
it('should resolve when passed a valid document', async function () {
|
||||
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
||||
.to.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should test md5 validity with the raw buffer', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(HashUpdate).to.have.been.calledWith(
|
||||
sinon.match.instanceOf(Buffer)
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error if the md5 does not match', async function () {
|
||||
PersistorManager.getObjectMd5Hash.resolves('badf00d')
|
||||
await expect(
|
||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
|
||||
})
|
||||
|
||||
it('should restore the doc in Mongo', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, archivedDoc)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should error out on archived doc', async function () {
|
||||
await expect(
|
||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejected.and.match(
|
||||
/found archived doc, but archiving backend is not configured/
|
||||
)
|
||||
})
|
||||
|
||||
it('should return early on non-archived doc', async function () {
|
||||
MongoManager.promises.findDoc = sinon.stub().resolves({ rev })
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('doc contents', function () {
|
||||
let archivedDoc
|
||||
|
||||
describe('when the doc has the old schema', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = lines
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return the docs lines', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, { lines, rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and ranges', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = {
|
||||
lines,
|
||||
ranges: { json: 'ranges' },
|
||||
rev: 456,
|
||||
schema_v: 1,
|
||||
}
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return the doc lines and ranges', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, {
|
||||
lines,
|
||||
ranges: { mongo: 'ranges' },
|
||||
rev: 456,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and no ranges', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = { lines, rev: 456, schema_v: 1 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return only the doc lines', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, { lines, rev: 456 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and no rev', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = { lines, schema_v: 1 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should use the rev obtained from Mongo', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, { lines, rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an unrecognised schema', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = { lines, schema_v: 2 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should throw an error', async function () {
|
||||
await expect(
|
||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejectedWith(
|
||||
"I don't understand the doc format in s3"
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should not do anything if the file is already unarchived', async function () {
|
||||
MongoManager.promises.findDoc.resolves({ inS3: false })
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
||||
})
|
||||
|
||||
it('should throw an error if the file is not found', async function () {
|
||||
PersistorManager.getObjectStream = sinon
|
||||
.stub()
|
||||
.rejects(new Errors.NotFoundError())
|
||||
await expect(
|
||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
describe('when archiving is enabled', function () {
|
||||
beforeEach(async function () {
|
||||
await DocArchiveManager.promises.destroyProject(projectId)
|
||||
})
|
||||
|
||||
it('should delete the project in Mongo', function () {
|
||||
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
|
||||
projectId
|
||||
)
|
||||
})
|
||||
|
||||
it('should delete the project in the persistor', function () {
|
||||
expect(PersistorManager.deleteDirectory).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
projectId
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when archiving is disabled', function () {
|
||||
beforeEach(async function () {
|
||||
Settings.docstore.backend = ''
|
||||
await DocArchiveManager.promises.destroyProject(projectId)
|
||||
})
|
||||
|
||||
it('should delete the project in Mongo', function () {
|
||||
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
|
||||
projectId
|
||||
)
|
||||
})
|
||||
|
||||
it('should not delete the project in the persistor', function () {
|
||||
expect(PersistorManager.deleteDirectory).not.to.have.been.called
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveAllDocs', function () {
|
||||
it('should resolve with valid arguments', async function () {
|
||||
await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to
|
||||
.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should archive all project docs which are not in s3', async function () {
|
||||
await DocArchiveManager.promises.archiveAllDocs(projectId)
|
||||
// not inS3
|
||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
mongoDocs[0]._id
|
||||
)
|
||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
mongoDocs[1]._id
|
||||
)
|
||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
mongoDocs[4]._id
|
||||
)
|
||||
|
||||
// inS3
|
||||
expect(
|
||||
MongoManager.promises.markDocAsArchived
|
||||
).not.to.have.been.calledWith(projectId, mongoDocs[2]._id)
|
||||
expect(
|
||||
MongoManager.promises.markDocAsArchived
|
||||
).not.to.have.been.calledWith(projectId, mongoDocs[3]._id)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have
|
||||
.been.called
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('unArchiveAllDocs', function () {
|
||||
it('should resolve with valid arguments', async function () {
|
||||
await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to
|
||||
.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should unarchive all inS3 docs', async function () {
|
||||
await DocArchiveManager.promises.unArchiveAllDocs(projectId)
|
||||
|
||||
for (const doc of archivedDocs) {
|
||||
expect(PersistorManager.getObjectStream).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
`${projectId}/${doc._id}`
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not
|
||||
.have.been.called
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
755
services/docstore/test/unit/js/DocManagerTests.js
Normal file
755
services/docstore/test/unit/js/DocManagerTests.js
Normal file
@@ -0,0 +1,755 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/DocManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('DocManager', function () {
|
||||
beforeEach(function () {
|
||||
this.doc_id = new ObjectId().toString()
|
||||
this.project_id = new ObjectId().toString()
|
||||
this.another_project_id = new ObjectId().toString()
|
||||
this.stubbedError = new Error('blew up')
|
||||
this.version = 42
|
||||
|
||||
this.MongoManager = {
|
||||
promises: {
|
||||
findDoc: sinon.stub(),
|
||||
getProjectsDocs: sinon.stub(),
|
||||
patchDoc: sinon.stub().resolves(),
|
||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.DocArchiveManager = {
|
||||
promises: {
|
||||
unarchiveDoc: sinon.stub(),
|
||||
unArchiveAllDocs: sinon.stub(),
|
||||
archiveDoc: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.RangeManager = {
|
||||
jsonRangesToMongo(r) {
|
||||
return r
|
||||
},
|
||||
shouldUpdateRanges: sinon.stub().returns(false),
|
||||
}
|
||||
this.settings = { docstore: {} }
|
||||
|
||||
this.DocManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./MongoManager': this.MongoManager,
|
||||
'./DocArchiveManager': this.DocArchiveManager,
|
||||
'./RangeManager': this.RangeManager,
|
||||
'@overleaf/settings': this.settings,
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFullDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub()
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
lines: ['2134'],
|
||||
}
|
||||
})
|
||||
|
||||
it('should call get doc with a quick filter', async function () {
|
||||
this.DocManager.promises._getDoc.resolves(this.doc)
|
||||
const doc = await this.DocManager.promises.getFullDoc(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
doc.should.equal(this.doc)
|
||||
this.DocManager.promises._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
lines: true,
|
||||
rev: true,
|
||||
deleted: true,
|
||||
version: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return error when get doc errors', async function () {
|
||||
this.DocManager.promises._getDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager.promises.getFullDoc(this.project_id, this.doc_id)
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRawDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub()
|
||||
this.doc = { lines: ['2134'] }
|
||||
})
|
||||
|
||||
it('should call get doc with a quick filter', async function () {
|
||||
this.DocManager.promises._getDoc.resolves(this.doc)
|
||||
const doc = await this.DocManager.promises.getDocLines(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
doc.should.equal(this.doc)
|
||||
this.DocManager.promises._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
lines: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return error when get doc errors', async function () {
|
||||
this.DocManager.promises._getDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager.promises.getDocLines(this.project_id, this.doc_id)
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.project = { name: 'mock-project' }
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
version: this.version,
|
||||
}
|
||||
})
|
||||
|
||||
describe('when using a filter', function () {
|
||||
beforeEach(function () {
|
||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
||||
})
|
||||
|
||||
it('should error if inS3 is not set to true', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
||||
inS3: false,
|
||||
})
|
||||
).to.be.rejected
|
||||
})
|
||||
|
||||
it('should always get inS3 even when no filter is passed', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id)
|
||||
).to.be.rejected
|
||||
this.MongoManager.promises.findDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should not error if inS3 is set to true', async function () {
|
||||
await this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
||||
inS3: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is in the doc collection', function () {
|
||||
beforeEach(async function () {
|
||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
||||
this.result = await this.DocManager.promises._getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{ version: true, inS3: true }
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc collection', function () {
|
||||
this.MongoManager.promises.findDoc
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc with the version', function () {
|
||||
this.result.lines.should.equal(this.doc.lines)
|
||||
this.result.version.should.equal(this.version)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when MongoManager.findDoc errors', function () {
|
||||
it('should return the error', async function () {
|
||||
this.MongoManager.promises.findDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is archived', function () {
|
||||
beforeEach(async function () {
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
version: 2,
|
||||
inS3: true,
|
||||
}
|
||||
this.unarchivedDoc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
version: 2,
|
||||
inS3: false,
|
||||
}
|
||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
||||
this.DocArchiveManager.promises.unarchiveDoc.callsFake(
|
||||
async (projectId, docId) => {
|
||||
this.MongoManager.promises.findDoc.resolves({
|
||||
...this.unarchivedDoc,
|
||||
})
|
||||
}
|
||||
)
|
||||
this.result = await this.DocManager.promises._getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{
|
||||
version: true,
|
||||
inS3: true,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should call the DocArchive to unarchive the doc', function () {
|
||||
this.DocArchiveManager.promises.unarchiveDoc
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should look up the doc twice', function () {
|
||||
this.MongoManager.promises.findDoc.calledTwice.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc', function () {
|
||||
expect(this.result).to.deep.equal({
|
||||
...this.unarchivedDoc,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist in the docs collection', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.promises.findDoc.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
).to.be.rejectedWith(
|
||||
`No such doc: ${this.doc_id} in project ${this.project_id}`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllNonDeletedDocs', function () {
|
||||
describe('when the project exists', function () {
|
||||
beforeEach(async function () {
|
||||
this.docs = [
|
||||
{
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
},
|
||||
]
|
||||
this.MongoManager.promises.getProjectsDocs.resolves(this.docs)
|
||||
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs)
|
||||
this.filter = { lines: true }
|
||||
this.result = await this.DocManager.promises.getAllNonDeletedDocs(
|
||||
this.project_id,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the project from the database', function () {
|
||||
this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
{ include_deleted: false },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal(this.docs)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there are no docs for the project', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.promises.getProjectsDocs.resolves(null)
|
||||
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.promises.getAllNonDeletedDocs(
|
||||
this.project_id,
|
||||
this.filter
|
||||
)
|
||||
).to.be.rejectedWith(`No docs for project ${this.project_id}`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
describe('when the doc exists', function () {
|
||||
beforeEach(function () {
|
||||
this.lines = ['mock', 'doc', 'lines']
|
||||
this.rev = 77
|
||||
this.MongoManager.promises.findDoc.resolves({
|
||||
_id: new ObjectId(this.doc_id),
|
||||
})
|
||||
this.meta = {}
|
||||
})
|
||||
|
||||
describe('standard path', function () {
|
||||
beforeEach(async function () {
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc', function () {
|
||||
expect(this.MongoManager.promises.findDoc).to.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
})
|
||||
|
||||
it('should persist the meta', function () {
|
||||
expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush disabled and deleting a doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = false
|
||||
this.meta.deleted = true
|
||||
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not flush the doc out of mongo', function () {
|
||||
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
|
||||
.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush enabled and not deleting a doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = false
|
||||
this.meta.deleted = false
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not flush the doc out of mongo', function () {
|
||||
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
|
||||
.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush enabled and deleting a doc', function () {
|
||||
beforeEach(function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = true
|
||||
this.meta.deleted = true
|
||||
})
|
||||
|
||||
describe('when the background flush succeeds', function () {
|
||||
beforeEach(async function () {
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not log a warning', function () {
|
||||
expect(this.logger.warn).to.not.have.been.called
|
||||
})
|
||||
|
||||
it('should flush the doc out of mongo', function () {
|
||||
expect(
|
||||
this.DocArchiveManager.promises.archiveDoc
|
||||
).to.have.been.calledWith(this.project_id, this.doc_id)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the background flush fails', function () {
|
||||
beforeEach(async function () {
|
||||
this.err = new Error('foo')
|
||||
this.DocArchiveManager.promises.archiveDoc.rejects(this.err)
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should log a warning', function () {
|
||||
expect(this.logger.warn).to.have.been.calledWith(
|
||||
sinon.match({
|
||||
projectId: this.project_id,
|
||||
docId: this.doc_id,
|
||||
err: this.err,
|
||||
}),
|
||||
'archiving a single doc in the background failed'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.promises.findDoc.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {})
|
||||
).to.be.rejectedWith(
|
||||
`No such project/doc to delete: ${this.project_id}/${this.doc_id}`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.oldDocLines = ['old', 'doc', 'lines']
|
||||
this.newDocLines = ['new', 'doc', 'lines']
|
||||
this.originalRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.newRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'bar', p: 6 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.version = 42
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: this.oldDocLines,
|
||||
rev: (this.rev = 5),
|
||||
version: this.version,
|
||||
ranges: this.originalRanges,
|
||||
}
|
||||
|
||||
this.DocManager.promises._getDoc = sinon.stub()
|
||||
})
|
||||
|
||||
describe('when only the doc lines have changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the existing doc', function () {
|
||||
this.DocManager.promises._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
rev: true,
|
||||
lines: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should upsert the document to the doc collection', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection
|
||||
.calledWith(this.project_id, this.doc_id, this.rev, {
|
||||
lines: this.newDocLines,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc ranges have changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version,
|
||||
this.newRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the ranges', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection
|
||||
.calledWith(this.project_id, this.doc_id, this.rev, {
|
||||
ranges: this.newRanges,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when only the version has changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version + 1,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should update the version', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.rev,
|
||||
{ version: this.version + 1 }
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the old rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc has not changed at all', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the ranges or lines or version', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
|
||||
false
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the old rev and modified == false', function () {
|
||||
expect(this.result).to.deep.equal({ modified: false, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version is null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
null,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the lines are null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the ranges are null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
null
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a generic error getting the doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.error = new Error('doc could not be found')
|
||||
this.DocManager.promises._getDoc = sinon.stub().rejects(this.error)
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith(this.error)
|
||||
})
|
||||
|
||||
it('should not upsert the document to the doc collection', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been
|
||||
.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version was decremented', function () {
|
||||
it('should return an error', async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version - 1,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocVersionDecrementedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines have not changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines.slice(),
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the doc', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
|
||||
false
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the existing rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: false, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(null)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the document to the doc collection', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
undefined,
|
||||
{
|
||||
lines: this.newDocLines,
|
||||
ranges: this.originalRanges,
|
||||
version: this.version,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when another update is racing', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.MongoManager.promises.upsertIntoDocCollection
|
||||
.onFirstCall()
|
||||
.rejects(new Errors.DocRevValueError())
|
||||
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version + 1,
|
||||
this.newRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the doc twice', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.rev,
|
||||
{
|
||||
ranges: this.newRanges,
|
||||
lines: this.newDocLines,
|
||||
version: this.version + 1,
|
||||
}
|
||||
)
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been
|
||||
.calledTwice
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
578
services/docstore/test/unit/js/HttpControllerTests.js
Normal file
578
services/docstore/test/unit/js/HttpControllerTests.js
Normal file
@@ -0,0 +1,578 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { assert, expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/HttpController'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('HttpController', function () {
|
||||
beforeEach(function () {
|
||||
const settings = {
|
||||
max_doc_length: 2 * 1024 * 1024,
|
||||
}
|
||||
this.DocArchiveManager = {
|
||||
unArchiveAllDocs: sinon.stub().yields(),
|
||||
}
|
||||
this.DocManager = {}
|
||||
this.HttpController = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./DocManager': this.DocManager,
|
||||
'./DocArchiveManager': this.DocArchiveManager,
|
||||
'@overleaf/settings': settings,
|
||||
'./HealthChecker': {},
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
this.res = {
|
||||
send: sinon.stub(),
|
||||
sendStatus: sinon.stub(),
|
||||
json: sinon.stub(),
|
||||
setHeader: sinon.stub(),
|
||||
}
|
||||
this.res.status = sinon.stub().returns(this.res)
|
||||
this.req = { query: {} }
|
||||
this.next = sinon.stub()
|
||||
this.projectId = 'mock-project-id'
|
||||
this.docId = 'mock-doc-id'
|
||||
this.doc = {
|
||||
_id: this.docId,
|
||||
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||
version: 42,
|
||||
rev: 5,
|
||||
}
|
||||
this.deletedDoc = {
|
||||
deleted: true,
|
||||
_id: this.docId,
|
||||
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||
version: 42,
|
||||
rev: 5,
|
||||
}
|
||||
})
|
||||
|
||||
describe('getDoc', function () {
|
||||
describe('without deleted docs', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getFullDoc = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.doc)
|
||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get the document with the version (including deleted)', function () {
|
||||
this.DocManager.getFullDoc
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith({
|
||||
_id: this.docId,
|
||||
lines: this.doc.lines,
|
||||
rev: this.doc.rev,
|
||||
version: this.doc.version,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('which is deleted', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getFullDoc = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.deletedDoc)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc manager', function () {
|
||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.DocManager.getFullDoc
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return 404 if the query string delete is not set ', function () {
|
||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.res.sendStatus.calledWith(404).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON if include_deleted is set to true', function () {
|
||||
this.req.query.include_deleted = 'true'
|
||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.res.json
|
||||
.calledWith({
|
||||
_id: this.docId,
|
||||
lines: this.doc.lines,
|
||||
rev: this.doc.rev,
|
||||
deleted: true,
|
||||
version: this.doc.version,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRawDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc)
|
||||
this.HttpController.getRawDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get the document without the version', function () {
|
||||
this.DocManager.getDocLines
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set the content type header', function () {
|
||||
this.res.setHeader
|
||||
.calledWith('content-type', 'text/plain')
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should send the raw version of the doc', function () {
|
||||
assert.deepEqual(
|
||||
this.res.send.args[0][0],
|
||||
`${this.doc.lines[0]}\n${this.doc.lines[1]}\n${this.doc.lines[2]}\n${this.doc.lines[3]}\n${this.doc.lines[4]}\n${this.doc.lines[5]}`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllDocs', function () {
|
||||
describe('normally', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'one'],
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.docs)
|
||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get all the (non-deleted) docs', function () {
|
||||
this.DocManager.getAllNonDeletedDocs
|
||||
.calledWith(this.projectId, { lines: true, rev: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: this.docs[0].lines,
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
lines: this.docs[1].lines,
|
||||
rev: this.docs[1].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with null lines', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: null,
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.docs)
|
||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return the doc with fallback lines', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: [],
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
lines: this.docs[1].lines,
|
||||
rev: this.docs[1].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a null doc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'one'],
|
||||
rev: 2,
|
||||
},
|
||||
null,
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.docs)
|
||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return the non null docs as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: this.docs[0].lines,
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[2]._id.toString(),
|
||||
lines: this.docs[2].lines,
|
||||
rev: this.docs[2].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should log out an error', function () {
|
||||
this.logger.error
|
||||
.calledWith(
|
||||
{
|
||||
err: sinon.match.has('message', 'null doc'),
|
||||
projectId: this.projectId,
|
||||
},
|
||||
'encountered null doc'
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllRanges', function () {
|
||||
describe('normally', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
ranges: { mock_ranges: 'one' },
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
ranges: { mock_ranges: 'two' },
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.docs)
|
||||
this.HttpController.getAllRanges(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get all the (non-deleted) doc ranges', function () {
|
||||
this.DocManager.getAllNonDeletedDocs
|
||||
.calledWith(this.projectId, { ranges: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
ranges: this.docs[0].ranges,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
ranges: this.docs[1].ranges,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
})
|
||||
|
||||
describe('when the doc lines exist and were updated', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = ['hello', 'world']),
|
||||
version: (this.version = 42),
|
||||
ranges: (this.ranges = { changes: 'mock' }),
|
||||
}
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.yields(null, true, (this.rev = 5))
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should update the document', function () {
|
||||
this.DocManager.updateDoc
|
||||
.calledWith(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return a modified status', function () {
|
||||
this.res.json
|
||||
.calledWith({ modified: true, rev: this.rev })
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines exist and were not updated', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = ['hello', 'world']),
|
||||
version: (this.version = 42),
|
||||
ranges: {},
|
||||
}
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.yields(null, false, (this.rev = 5))
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return a modified status', function () {
|
||||
this.res.json
|
||||
.calledWith({ modified: false, rev: this.rev })
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines are not provided', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = { version: 42, ranges: {} }
|
||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc version are not provided', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = { version: 42, lines: ['hello world'] }
|
||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc ranges is not provided', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = { lines: ['foo'], version: 42 }
|
||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc body is too large', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
|
||||
version: (this.version = 42),
|
||||
ranges: (this.ranges = { changes: 'mock' }),
|
||||
}
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return a 413 (too large) response', function () {
|
||||
sinon.assert.calledWith(this.res.status, 413)
|
||||
})
|
||||
|
||||
it('should report that the document body is too large', function () {
|
||||
sinon.assert.calledWith(this.res.send, 'document body too large')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.req.body = { name: 'foo.tex' }
|
||||
this.DocManager.patchDoc = sinon.stub().yields(null)
|
||||
this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should delete the document', function () {
|
||||
expect(this.DocManager.patchDoc).to.have.been.calledWith(
|
||||
this.projectId,
|
||||
this.docId
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a 204 (No Content)', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(204)
|
||||
})
|
||||
|
||||
describe('with an invalid payload', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = { cannot: 'happen' }
|
||||
|
||||
this.DocManager.patchDoc = sinon.stub().yields(null)
|
||||
this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should log a message', function () {
|
||||
expect(this.logger.fatal).to.have.been.calledWith(
|
||||
{ field: 'cannot' },
|
||||
'joi validation for pathDoc is broken'
|
||||
)
|
||||
})
|
||||
|
||||
it('should not pass the invalid field along', function () {
|
||||
expect(this.DocManager.patchDoc).to.have.been.calledWith(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
{}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveAllDocs', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1)
|
||||
this.HttpController.archiveAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should archive the project', function () {
|
||||
this.DocArchiveManager.archiveAllDocs
|
||||
.calledWith(this.projectId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return a 204 (No Content)', function () {
|
||||
this.res.sendStatus.calledWith(204).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('unArchiveAllDocs', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
})
|
||||
|
||||
describe('on success', function () {
|
||||
beforeEach(function (done) {
|
||||
this.res.sendStatus.callsFake(() => done())
|
||||
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('returns a 200', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(200)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the archived rev doesn't match", function () {
|
||||
beforeEach(function (done) {
|
||||
this.res.sendStatus.callsFake(() => done())
|
||||
this.DocArchiveManager.unArchiveAllDocs.yields(
|
||||
new Errors.DocRevValueError('bad rev')
|
||||
)
|
||||
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('returns a 409', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(409)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1)
|
||||
this.HttpController.destroyProject(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should destroy the docs', function () {
|
||||
sinon.assert.calledWith(
|
||||
this.DocArchiveManager.destroyProject,
|
||||
this.projectId
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 204', function () {
|
||||
sinon.assert.calledWith(this.res.sendStatus, 204)
|
||||
})
|
||||
})
|
||||
})
|
407
services/docstore/test/unit/js/MongoManagerTests.js
Normal file
407
services/docstore/test/unit/js/MongoManagerTests.js
Normal file
@@ -0,0 +1,407 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/MongoManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const { assert, expect } = require('chai')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('MongoManager', function () {
|
||||
beforeEach(function () {
|
||||
this.db = {
|
||||
docs: {
|
||||
updateOne: sinon.stub().resolves({ matchedCount: 1 }),
|
||||
insertOne: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.MongoManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongodb': {
|
||||
db: this.db,
|
||||
ObjectId,
|
||||
},
|
||||
'@overleaf/settings': {
|
||||
max_deleted_docs: 42,
|
||||
docstore: { archivingLockDurationMs: 5000 },
|
||||
},
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
this.projectId = new ObjectId().toString()
|
||||
this.docId = new ObjectId().toString()
|
||||
this.rev = 42
|
||||
this.stubbedErr = new Error('hello world')
|
||||
this.lines = ['Three French hens', 'Two turtle doves']
|
||||
})
|
||||
|
||||
describe('findDoc', function () {
|
||||
beforeEach(async function () {
|
||||
this.doc = { name: 'mock-doc' }
|
||||
this.db.docs.findOne = sinon.stub().resolves(this.doc)
|
||||
this.filter = { lines: true }
|
||||
this.result = await this.MongoManager.promises.findDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the doc', function () {
|
||||
this.db.docs.findOne
|
||||
.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc', function () {
|
||||
expect(this.doc).to.deep.equal(this.doc)
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
beforeEach(async function () {
|
||||
this.meta = { name: 'foo.tex' }
|
||||
await this.MongoManager.promises.patchDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should pass the parameter along', function () {
|
||||
this.db.docs.updateOne.should.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
$set: this.meta,
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectsDocs', function () {
|
||||
beforeEach(function () {
|
||||
this.filter = { lines: true }
|
||||
this.doc1 = { name: 'mock-doc1' }
|
||||
this.doc2 = { name: 'mock-doc2' }
|
||||
this.doc3 = { name: 'mock-doc3' }
|
||||
this.doc4 = { name: 'mock-doc4' }
|
||||
this.db.docs.find = sinon.stub().returns({
|
||||
toArray: sinon.stub().resolves([this.doc, this.doc3, this.doc4]),
|
||||
})
|
||||
})
|
||||
|
||||
describe('with included_deleted = false', function () {
|
||||
beforeEach(async function () {
|
||||
this.result = await this.MongoManager.promises.getProjectsDocs(
|
||||
this.projectId,
|
||||
{ include_deleted: false },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the non-deleted docs via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(
|
||||
{
|
||||
project_id: new ObjectId(this.projectId),
|
||||
deleted: { $ne: true },
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should call return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with included_deleted = true', function () {
|
||||
beforeEach(async function () {
|
||||
this.result = await this.MongoManager.promises.getProjectsDocs(
|
||||
this.projectId,
|
||||
{ include_deleted: true },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find all via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(
|
||||
{
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectsDeletedDocs', function () {
|
||||
beforeEach(async function () {
|
||||
this.filter = { name: true }
|
||||
this.doc1 = { _id: '1', name: 'mock-doc1.tex' }
|
||||
this.doc2 = { _id: '2', name: 'mock-doc2.tex' }
|
||||
this.doc3 = { _id: '3', name: 'mock-doc3.tex' }
|
||||
this.db.docs.find = sinon.stub().returns({
|
||||
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
|
||||
})
|
||||
this.result = await this.MongoManager.promises.getProjectsDeletedDocs(
|
||||
this.projectId,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the deleted docs via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith({
|
||||
project_id: new ObjectId(this.projectId),
|
||||
deleted: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should filter, sort by deletedAt and limit', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(sinon.match.any, {
|
||||
projection: this.filter,
|
||||
sort: { deletedAt: -1 },
|
||||
limit: 42,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc1, this.doc2, this.doc3])
|
||||
})
|
||||
})
|
||||
|
||||
describe('upsertIntoDocCollection', function () {
|
||||
beforeEach(function () {
|
||||
this.oldRev = 77
|
||||
})
|
||||
|
||||
it('should upsert the document', async function () {
|
||||
await this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.oldRev,
|
||||
{ lines: this.lines }
|
||||
)
|
||||
|
||||
const args = this.db.docs.updateOne.args[0]
|
||||
assert.deepEqual(args[0], {
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.oldRev,
|
||||
})
|
||||
assert.equal(args[1].$set.lines, this.lines)
|
||||
assert.equal(args[1].$inc.rev, 1)
|
||||
})
|
||||
|
||||
it('should handle update error', async function () {
|
||||
this.db.docs.updateOne.rejects(this.stubbedErr)
|
||||
await expect(
|
||||
this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.rev,
|
||||
{
|
||||
lines: this.lines,
|
||||
}
|
||||
)
|
||||
).to.be.rejectedWith(this.stubbedErr)
|
||||
})
|
||||
|
||||
it('should insert without a previous rev', async function () {
|
||||
await this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
|
||||
expect(this.db.docs.insertOne).to.have.been.calledWith({
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: 1,
|
||||
lines: this.lines,
|
||||
ranges: this.ranges,
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle generic insert error', async function () {
|
||||
this.db.docs.insertOne.rejects(this.stubbedErr)
|
||||
await expect(
|
||||
this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
).to.be.rejectedWith(this.stubbedErr)
|
||||
})
|
||||
|
||||
it('should handle duplicate insert error', async function () {
|
||||
this.db.docs.insertOne.rejects({ code: 11000 })
|
||||
await expect(
|
||||
this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
beforeEach(async function () {
|
||||
this.projectId = new ObjectId()
|
||||
this.db.docs.deleteMany = sinon.stub().resolves()
|
||||
await this.MongoManager.promises.destroyProject(this.projectId)
|
||||
})
|
||||
|
||||
it('should destroy all docs', function () {
|
||||
sinon.assert.calledWith(this.db.docs.deleteMany, {
|
||||
project_id: this.projectId,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkRevUnchanged', function () {
|
||||
this.beforeEach(function () {
|
||||
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: 1 }
|
||||
})
|
||||
|
||||
it('should not error when the rev has not changed', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
|
||||
await this.MongoManager.promises.checkRevUnchanged(this.doc)
|
||||
})
|
||||
|
||||
it('should return an error when the rev has changed', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||
await expect(
|
||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocModifiedError)
|
||||
})
|
||||
|
||||
it('should return a value error if incoming rev is NaN', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
|
||||
await expect(
|
||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
|
||||
it('should return a value error if checked doc rev is NaN', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
|
||||
await expect(
|
||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('restoreArchivedDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.archivedDoc = {
|
||||
lines: ['a', 'b', 'c'],
|
||||
ranges: { some: 'ranges' },
|
||||
rev: 2,
|
||||
}
|
||||
})
|
||||
|
||||
describe('complete doc', function () {
|
||||
beforeEach(async function () {
|
||||
await this.MongoManager.promises.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
})
|
||||
|
||||
it('updates Mongo', function () {
|
||||
expect(this.db.docs.updateOne).to.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.archivedDoc.rev,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
lines: this.archivedDoc.lines,
|
||||
ranges: this.archivedDoc.ranges,
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('without ranges', function () {
|
||||
beforeEach(async function () {
|
||||
delete this.archivedDoc.ranges
|
||||
await this.MongoManager.promises.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
})
|
||||
|
||||
it('sets ranges to an empty object', function () {
|
||||
expect(this.db.docs.updateOne).to.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.archivedDoc.rev,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
lines: this.archivedDoc.lines,
|
||||
ranges: {},
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the update doesn't succeed", function () {
|
||||
it('throws a DocRevValueError', async function () {
|
||||
this.db.docs.updateOne.resolves({ matchedCount: 0 })
|
||||
await expect(
|
||||
this.MongoManager.promises.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
55
services/docstore/test/unit/js/PersistorManagerTests.js
Normal file
55
services/docstore/test/unit/js/PersistorManagerTests.js
Normal file
@@ -0,0 +1,55 @@
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../app/js/PersistorManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('PersistorManager', function () {
|
||||
class FakePersistor {
|
||||
async sendStream() {
|
||||
return 'sent'
|
||||
}
|
||||
}
|
||||
|
||||
describe('configured', function () {
|
||||
it('should return fake persistor', function () {
|
||||
const Settings = {
|
||||
docstore: {
|
||||
backend: 'gcs',
|
||||
bucket: 'wombat',
|
||||
},
|
||||
}
|
||||
const PersistorManger = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
'@overleaf/object-persistor': () => new FakePersistor(),
|
||||
'@overleaf/metrics': {},
|
||||
},
|
||||
})
|
||||
|
||||
expect(PersistorManger).to.be.instanceof(FakePersistor)
|
||||
expect(PersistorManger.sendStream()).to.eventually.equal('sent')
|
||||
})
|
||||
})
|
||||
|
||||
describe('not configured', function () {
|
||||
it('should return abstract persistor', async function () {
|
||||
const Settings = {
|
||||
docstore: {
|
||||
backend: undefined,
|
||||
bucket: 'wombat',
|
||||
},
|
||||
}
|
||||
const PersistorManger = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
'@overleaf/object-persistor': () => new FakePersistor(),
|
||||
'@overleaf/metrics': {},
|
||||
},
|
||||
})
|
||||
|
||||
expect(PersistorManger.constructor.name).to.equal('AbstractPersistor')
|
||||
expect(PersistorManger.sendStream()).to.eventually.be.rejectedWith(
|
||||
/method not implemented in persistor/
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
253
services/docstore/test/unit/js/RangeManagerTests.js
Normal file
253
services/docstore/test/unit/js/RangeManagerTests.js
Normal file
@@ -0,0 +1,253 @@
|
||||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { assert, expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/RangeManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
|
||||
describe('RangeManager', function () {
|
||||
beforeEach(function () {
|
||||
return (this.RangeManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongodb': {
|
||||
ObjectId,
|
||||
},
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
describe('jsonRangesToMongo', function () {
|
||||
it('should convert ObjectIds and dates to proper objects', function () {
|
||||
const changeId = new ObjectId().toString()
|
||||
const commentId = new ObjectId().toString()
|
||||
const userId = new ObjectId().toString()
|
||||
const threadId = new ObjectId().toString()
|
||||
const ts = new Date().toJSON()
|
||||
return this.RangeManager.jsonRangesToMongo({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
ts,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}).should.deep.equal({
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId(changeId),
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: new ObjectId(userId),
|
||||
ts: new Date(ts),
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: new ObjectId(commentId),
|
||||
op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('should leave malformed ObjectIds as they are', function () {
|
||||
const changeId = 'foo'
|
||||
const commentId = 'bar'
|
||||
const userId = 'baz'
|
||||
return this.RangeManager.jsonRangesToMongo({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
},
|
||||
],
|
||||
}).should.deep.equal({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
return it('should be consistent when transformed through json -> mongo -> json', function () {
|
||||
const changeId = new ObjectId().toString()
|
||||
const commentId = new ObjectId().toString()
|
||||
const userId = new ObjectId().toString()
|
||||
const threadId = new ObjectId().toString()
|
||||
const ts = new Date().toJSON()
|
||||
const ranges1 = {
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
ts,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}
|
||||
const ranges1Copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place
|
||||
const ranges2 = JSON.parse(
|
||||
JSON.stringify(this.RangeManager.jsonRangesToMongo(ranges1Copy))
|
||||
)
|
||||
return ranges1.should.deep.equal(ranges2)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('shouldUpdateRanges', function () {
|
||||
beforeEach(function () {
|
||||
this.ranges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: new ObjectId(),
|
||||
ts: new Date(),
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: new ObjectId(),
|
||||
op: { c: 'foo', p: 3, t: new ObjectId() },
|
||||
},
|
||||
],
|
||||
}
|
||||
return (this.ranges_copy = this.RangeManager.jsonRangesToMongo(
|
||||
JSON.parse(JSON.stringify(this.ranges))
|
||||
))
|
||||
})
|
||||
|
||||
describe('with a blank new range', function () {
|
||||
return it('should throw an error', function () {
|
||||
return expect(() => {
|
||||
return this.RangeManager.shouldUpdateRanges(this.ranges, null)
|
||||
}).to.throw(Error)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a blank old range', function () {
|
||||
return it('should treat it like {}', function () {
|
||||
this.RangeManager.shouldUpdateRanges(null, {}).should.equal(false)
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
null,
|
||||
this.ranges
|
||||
).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with no changes', function () {
|
||||
return it('should return false', function () {
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with changes', function () {
|
||||
it('should return true when the change id changes', function () {
|
||||
this.ranges_copy.changes[0].id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change user id changes', function () {
|
||||
this.ranges_copy.changes[0].metadata.user_id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change ts changes', function () {
|
||||
this.ranges_copy.changes[0].metadata.ts = new Date(Date.now() + 1000)
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change op changes', function () {
|
||||
this.ranges_copy.changes[0].op.i = 'bar'
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment id changes', function () {
|
||||
this.ranges_copy.comments[0].id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment offset changes', function () {
|
||||
this.ranges_copy.comments[0].op.p = 17
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return true when the comment content changes', function () {
|
||||
this.ranges_copy.comments[0].op.c = 'bar'
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
Reference in New Issue
Block a user