first commit

This commit is contained in:
2025-04-24 13:11:28 +08:00
commit ff9c54d5e4
5960 changed files with 834111 additions and 0 deletions

View File

@@ -0,0 +1,160 @@
import sinon from 'sinon'
import { strict as esmock } from 'esmock'
const MODULE_PATH = '../../../../app/js/BlobManager.js'
describe('BlobManager', function () {
beforeEach(async function () {
this.callback = sinon.stub()
this.extendLock = sinon.stub().yields()
this.project_id = 'project-1'
this.historyId = 12345
this.HistoryStoreManager = {
createBlobForUpdate: sinon.stub(),
}
this.UpdateTranslator = {
isAddUpdate: sinon.stub().returns(false),
}
this.BlobManager = await esmock(MODULE_PATH, {
'../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager,
'../../../../app/js/UpdateTranslator.js': this.UpdateTranslator,
})
this.updates = ['update-1', 'update-2']
})
describe('createBlobsForUpdates', function () {
describe('when there are no blobs to create', function () {
beforeEach(function (done) {
this.BlobManager.createBlobsForUpdates(
this.project_id,
this.historyId,
this.updates,
this.extendLock,
(error, updatesWithBlobs) => {
this.callback(error, updatesWithBlobs)
done()
}
)
})
it('should not create any blobs', function () {
this.HistoryStoreManager.createBlobForUpdate.called.should.equal(false)
})
it('should call the callback with the updates', function () {
const updatesWithBlobs = this.updates.map(update => ({
update,
}))
this.callback.calledWith(null, updatesWithBlobs).should.equal(true)
})
})
describe('when there are blobs to create', function () {
beforeEach(function (done) {
this.UpdateTranslator.isAddUpdate.returns(true)
this.blobHash = 'test hash'
this.HistoryStoreManager.createBlobForUpdate.yields(null, {
file: this.blobHash,
})
this.BlobManager.createBlobsForUpdates(
this.project_id,
this.historyId,
this.updates,
this.extendLock,
(error, updatesWithBlobs) => {
this.callback(error, updatesWithBlobs)
done()
}
)
})
it('should create blobs', function () {
this.HistoryStoreManager.createBlobForUpdate
.calledWith(this.project_id, this.historyId, this.updates[0])
.should.equal(true)
})
it('should extend the lock', function () {
this.extendLock.called.should.equal(true)
})
it('should call the callback with the updates', function () {
const updatesWithBlobs = this.updates.map(update => ({
update,
blobHashes: { file: this.blobHash },
}))
this.callback.calledWith(null, updatesWithBlobs).should.equal(true)
})
})
describe('when there are blobs to create and there is a single network error', function () {
beforeEach(function (done) {
this.UpdateTranslator.isAddUpdate.returns(true)
this.blobHash = 'test hash'
this.HistoryStoreManager.createBlobForUpdate
.onFirstCall()
.yields(new Error('random failure'))
this.HistoryStoreManager.createBlobForUpdate.yields(null, {
file: this.blobHash,
})
this.BlobManager.createBlobsForUpdates(
this.project_id,
this.historyId,
this.updates,
this.extendLock,
(error, updatesWithBlobs) => {
this.callback(error, updatesWithBlobs)
done()
}
)
})
it('should create blobs', function () {
this.HistoryStoreManager.createBlobForUpdate
.calledWith(this.project_id, this.historyId, this.updates[0])
.should.equal(true)
})
it('should extend the lock', function () {
this.extendLock.called.should.equal(true)
})
it('should call the callback with the updates', function () {
const updatesWithBlobs = this.updates.map(update => ({
update,
blobHashes: { file: this.blobHash },
}))
this.callback.calledWith(null, updatesWithBlobs).should.equal(true)
})
})
describe('when there are blobs to create and there are multiple network errors', function () {
beforeEach(function (done) {
this.UpdateTranslator.isAddUpdate.returns(true)
this.blobHash = 'test hash'
this.error = new Error('random failure')
this.HistoryStoreManager.createBlobForUpdate.yields(this.error)
this.BlobManager.createBlobsForUpdates(
this.project_id,
this.historyId,
this.updates,
this.extendLock,
(error, updatesWithBlobs) => {
this.callback(error, updatesWithBlobs)
done()
}
)
})
it('should try to create blobs', function () {
this.HistoryStoreManager.createBlobForUpdate
.calledWith(this.project_id, this.historyId, this.updates[0])
.should.equal(true)
})
it('should call the callback with an error', function () {
this.callback.calledWith(this.error).should.equal(true)
})
})
})
})

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,395 @@
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
const MODULE_PATH = '../../../../app/js/DiffGenerator.js'
describe('DiffGenerator', function () {
beforeEach(async function () {
this.DiffGenerator = await esmock(MODULE_PATH, {})
this.ts = Date.now()
this.user_id = 'mock-user-id'
this.user_id_2 = 'mock-user-id-2'
this.meta = {
start_ts: this.ts,
end_ts: this.ts,
user_id: this.user_id,
}
})
describe('buildDiff', function () {
beforeEach(function () {
this.diff = [{ u: 'mock-diff' }]
this.content = 'Hello world'
this.updates = [
{ i: 'mock-update-1' },
{ i: 'mock-update-2' },
{ i: 'mock-update-3' },
]
this.DiffGenerator._mocks.applyUpdateToDiff = sinon
.stub()
.returns(this.diff)
this.DiffGenerator._mocks.compressDiff = sinon.stub().returns(this.diff)
this.result = this.DiffGenerator.buildDiff(this.content, this.updates)
})
it('should return the diff', function () {
this.result.should.deep.equal(this.diff)
})
it('should build the content into an initial diff', function () {
this.DiffGenerator._mocks.applyUpdateToDiff
.calledWith(
[
{
u: this.content,
},
],
this.updates[0]
)
.should.equal(true)
})
it('should apply each update', function () {
this.updates.map(update =>
this.DiffGenerator._mocks.applyUpdateToDiff
.calledWith(sinon.match.any, update)
.should.equal(true)
)
})
it('should compress the diff', function () {
this.DiffGenerator._mocks.compressDiff
.calledWith(this.diff)
.should.equal(true)
})
})
describe('compressDiff', function () {
describe('with adjacent inserts with the same user id', function () {
it('should create one update with combined meta data and min/max timestamps', function () {
const diff = this.DiffGenerator.compressDiff([
{
i: 'foo',
meta: { start_ts: 10, end_ts: 20, users: [this.user_id] },
},
{
i: 'bar',
meta: { start_ts: 5, end_ts: 15, users: [this.user_id] },
},
])
expect(diff).to.deep.equal([
{
i: 'foobar',
meta: { start_ts: 5, end_ts: 20, users: [this.user_id] },
},
])
})
})
describe('with adjacent inserts with different user ids', function () {
it('should leave the inserts unchanged', function () {
const input = [
{
i: 'foo',
meta: { start_ts: 10, end_ts: 20, users: [this.user_id] },
},
{
i: 'bar',
meta: { start_ts: 5, end_ts: 15, users: [this.user_id_2] },
},
]
const output = this.DiffGenerator.compressDiff(input)
expect(output).to.deep.equal(input)
})
})
describe('with adjacent deletes with the same user id', function () {
it('should create one update with combined meta data and min/max timestamps', function () {
const diff = this.DiffGenerator.compressDiff([
{
d: 'foo',
meta: { start_ts: 10, end_ts: 20, users: [this.user_id] },
},
{
d: 'bar',
meta: { start_ts: 5, end_ts: 15, users: [this.user_id] },
},
])
expect(diff).to.deep.equal([
{
d: 'foobar',
meta: { start_ts: 5, end_ts: 20, users: [this.user_id] },
},
])
})
})
describe('with adjacent deletes with different user ids', function () {
it('should leave the deletes unchanged', function () {
const input = [
{
d: 'foo',
meta: { start_ts: 10, end_ts: 20, users: [this.user_id] },
},
{
d: 'bar',
meta: { start_ts: 5, end_ts: 15, users: [this.user_id_2] },
},
]
const output = this.DiffGenerator.compressDiff(input)
expect(output).to.deep.equal(input)
})
})
describe('with history resync updates', function () {
it('should keep only inserts and mark them as unchanged text', function () {
const input = [
{ u: 'untracked text' },
{
i: 'inserted anonymously',
meta: { origin: { kind: 'history-resync' } },
},
{
d: 'deleted anonymously',
meta: { origin: { kind: 'history-resync' } },
},
]
const output = this.DiffGenerator.compressDiff(input)
expect(output).to.deep.equal([
{ u: 'untracked text' },
{ u: 'inserted anonymously' },
])
})
})
})
describe('applyUpdateToDiff', function () {
describe('an insert', function () {
it('should insert into the middle of (u)nchanged text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
op: [{ p: 3, i: 'baz' }],
meta: this.meta,
})
expect(diff).to.deep.equal([
{ u: 'foo' },
{ i: 'baz', meta: this.meta },
{ u: 'bar' },
])
})
it('should insert into the start of (u)changed text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
op: [{ p: 0, i: 'baz' }],
meta: this.meta,
})
expect(diff).to.deep.equal([
{ i: 'baz', meta: this.meta },
{ u: 'foobar' },
])
})
it('should insert into the end of (u)changed text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
op: [{ p: 6, i: 'baz' }],
meta: this.meta,
})
expect(diff).to.deep.equal([
{ u: 'foobar' },
{ i: 'baz', meta: this.meta },
])
})
it('should insert into the middle of (i)inserted text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ i: 'foobar', meta: this.meta }],
{ op: [{ p: 3, i: 'baz' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ i: 'foo', meta: this.meta },
{ i: 'baz', meta: this.meta },
{ i: 'bar', meta: this.meta },
])
})
it('should not count deletes in the running length total', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ d: 'deleted', meta: this.meta }, { u: 'foobar' }],
{ op: [{ p: 3, i: 'baz' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ d: 'deleted', meta: this.meta },
{ u: 'foo' },
{ i: 'baz', meta: this.meta },
{ u: 'bar' },
])
})
})
describe('a delete', function () {
describe('deleting unchanged text', function () {
it('should delete from the middle of (u)nchanged text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ u: 'foobazbar' }],
{ op: [{ p: 3, d: 'baz' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ u: 'foo' },
{ d: 'baz', meta: this.meta },
{ u: 'bar' },
])
})
it('should delete from the start of (u)nchanged text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ u: 'foobazbar' }],
{ op: [{ p: 0, d: 'foo' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ d: 'foo', meta: this.meta },
{ u: 'bazbar' },
])
})
it('should delete from the end of (u)nchanged text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ u: 'foobazbar' }],
{ op: [{ p: 6, d: 'bar' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ u: 'foobaz' },
{ d: 'bar', meta: this.meta },
])
})
it('should delete across multiple (u)changed text parts', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ u: 'foo' }, { u: 'baz' }, { u: 'bar' }],
{ op: [{ p: 2, d: 'obazb' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ u: 'fo' },
{ d: 'o', meta: this.meta },
{ d: 'baz', meta: this.meta },
{ d: 'b', meta: this.meta },
{ u: 'ar' },
])
})
})
describe('deleting inserts', function () {
it('should delete from the middle of (i)nserted text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ i: 'foobazbar', meta: this.meta }],
{ op: [{ p: 3, d: 'baz' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ i: 'foo', meta: this.meta },
{ i: 'bar', meta: this.meta },
])
})
it('should delete from the start of (u)nchanged text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ i: 'foobazbar', meta: this.meta }],
{ op: [{ p: 0, d: 'foo' }], meta: this.meta }
)
expect(diff).to.deep.equal([{ i: 'bazbar', meta: this.meta }])
})
it('should delete from the end of (u)nchanged text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ i: 'foobazbar', meta: this.meta }],
{ op: [{ p: 6, d: 'bar' }], meta: this.meta }
)
expect(diff).to.deep.equal([{ i: 'foobaz', meta: this.meta }])
})
it('should delete across multiple (u)changed and (i)nserted text parts', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ u: 'foo' }, { i: 'baz', meta: this.meta }, { u: 'bar' }],
{ op: [{ p: 2, d: 'obazb' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ u: 'fo' },
{ d: 'o', meta: this.meta },
{ d: 'b', meta: this.meta },
{ u: 'ar' },
])
})
})
describe('deleting over existing deletes', function () {
it('should delete across multiple (u)changed and (d)deleted text parts', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ u: 'foo' }, { d: 'baz', meta: this.meta }, { u: 'bar' }],
{ op: [{ p: 2, d: 'ob' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ u: 'fo' },
{ d: 'o', meta: this.meta },
{ d: 'baz', meta: this.meta },
{ d: 'b', meta: this.meta },
{ u: 'ar' },
])
})
})
describe("deleting when the text doesn't match", function () {
it('should throw an error when deleting from the middle of (u)nchanged text', function () {
expect(() =>
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
op: [{ p: 3, d: 'xxx' }],
meta: this.meta,
})
).to.throw(this.DiffGenerator.ConsistencyError)
})
it('should throw an error when deleting from the start of (u)nchanged text', function () {
expect(() =>
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
op: [{ p: 0, d: 'xxx' }],
meta: this.meta,
})
).to.throw(this.DiffGenerator.ConsistencyError)
})
it('should throw an error when deleting from the end of (u)nchanged text', function () {
expect(() =>
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
op: [{ p: 6, d: 'xxx' }],
meta: this.meta,
})
).to.throw(this.DiffGenerator.ConsistencyError)
})
})
describe('when the last update in the existing diff is a delete', function () {
it('should insert the new update before the delete', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ u: 'foo' }, { d: 'bar', meta: this.meta }],
{ op: [{ p: 3, i: 'baz' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ u: 'foo' },
{ i: 'baz', meta: this.meta },
{ d: 'bar', meta: this.meta },
])
})
})
describe('when the only update in the existing diff is a delete', function () {
it('should insert the new update after the delete', function () {
const diff = this.DiffGenerator.applyUpdateToDiff(
[{ d: 'bar', meta: this.meta }],
{ op: [{ p: 0, i: 'baz' }], meta: this.meta }
)
expect(diff).to.deep.equal([
{ d: 'bar', meta: this.meta },
{ i: 'baz', meta: this.meta },
])
})
})
})
})
})

View File

@@ -0,0 +1,523 @@
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
const MODULE_PATH = '../../../../app/js/DiffManager.js'
describe('DiffManager', function () {
beforeEach(async function () {
this.DocumentUpdaterManager = {}
this.DiffGenerator = {
buildDiff: sinon.stub(),
}
this.UpdatesProcessor = {
processUpdatesForProject: sinon.stub(),
}
this.HistoryStoreManager = {
getChunkAtVersion: sinon.stub(),
}
this.WebApiManager = {
getHistoryId: sinon.stub(),
}
this.ChunkTranslator = {
convertToDiffUpdates: sinon.stub(),
}
this.FileTreeDiffGenerator = {}
this.DiffManager = await esmock(MODULE_PATH, {
'../../../../app/js/DocumentUpdaterManager.js':
this.DocumentUpdaterManager,
'../../../../app/js/DiffGenerator.js': this.DiffGenerator,
'../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor,
'../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager,
'../../../../app/js/WebApiManager.js': this.WebApiManager,
'../../../../app/js/ChunkTranslator.js': this.ChunkTranslator,
'../../../../app/js/FileTreeDiffGenerator.js': this.FileTreeDiffGenerator,
})
this.projectId = 'mock-project-id'
this.callback = sinon.stub()
})
describe('getDiff', function () {
beforeEach(function () {
this.pathname = 'main.tex'
this.fromVersion = 4
this.toVersion = 8
this.initialContent = 'foo bar baz'
this.updates = ['mock-updates']
this.diff = { mock: 'dif' }
this.UpdatesProcessor.processUpdatesForProject
.withArgs(this.projectId)
.yields()
this.DiffGenerator.buildDiff
.withArgs(this.initialContent, this.updates)
.returns(this.diff)
})
describe('with a text file', function () {
beforeEach(function () {
this.DiffManager._mocks._getProjectUpdatesBetweenVersions = sinon.stub()
this.DiffManager._mocks._getProjectUpdatesBetweenVersions
.withArgs(
this.projectId,
this.pathname,
this.fromVersion,
this.toVersion
)
.yields(null, {
initialContent: this.initialContent,
updates: this.updates,
})
this.DiffManager.getDiff(
this.projectId,
this.pathname,
this.fromVersion,
this.toVersion,
this.callback
)
})
it('should make sure all pending updates have been process', function () {
this.UpdatesProcessor.processUpdatesForProject
.calledWith(this.projectId)
.should.equal(true)
})
it('should get the updates from the history backend', function () {
this.DiffManager._mocks._getProjectUpdatesBetweenVersions
.calledWith(
this.projectId,
this.pathname,
this.fromVersion,
this.toVersion
)
.should.equal(true)
})
it('should convert the updates to a diff', function () {
this.DiffGenerator.buildDiff
.calledWith(this.initialContent, this.updates)
.should.equal(true)
})
it('should return the diff', function () {
this.callback.calledWith(null, this.diff).should.equal(true)
})
})
describe('with a binary file', function () {
beforeEach(function () {
this.DiffManager._mocks._getProjectUpdatesBetweenVersions = sinon.stub()
this.DiffManager._mocks._getProjectUpdatesBetweenVersions
.withArgs(
this.projectId,
this.pathname,
this.fromVersion,
this.toVersion
)
.yields(null, { binary: true })
this.DiffManager.getDiff(
this.projectId,
this.pathname,
this.fromVersion,
this.toVersion,
this.callback
)
})
it('should make sure all pending updates have been process', function () {
this.UpdatesProcessor.processUpdatesForProject
.calledWith(this.projectId)
.should.equal(true)
})
it('should get the updates from the history backend', function () {
this.DiffManager._mocks._getProjectUpdatesBetweenVersions
.calledWith(
this.projectId,
this.pathname,
this.fromVersion,
this.toVersion
)
.should.equal(true)
})
it('should not try convert any updates to a diff', function () {
this.DiffGenerator.buildDiff.called.should.equal(false)
})
it('should return the binary diff', function () {
this.callback.calledWith(null, { binary: true }).should.equal(true)
})
})
})
describe('_getProjectUpdatesBetweenVersions', function () {
beforeEach(function () {
this.pathname = 'main.tex'
this.fromVersion = 4
this.toVersion = 8
this.chunks = ['mock-chunk-1', 'mock-chunk-2']
this.concatted_chunk = 'mock-chunk'
this.DiffManager._mocks._concatChunks = sinon.stub()
this.DiffManager._mocks._concatChunks
.withArgs(this.chunks)
.returns(this.concatted_chunk)
this.updates = ['mock-updates']
this.initialContent = 'foo bar baz'
this.ChunkTranslator.convertToDiffUpdates
.withArgs(
this.projectId,
this.concatted_chunk,
this.pathname,
this.fromVersion,
this.toVersion
)
.yields(null, {
initialContent: this.initialContent,
updates: this.updates,
})
})
describe('for the normal case', function () {
beforeEach(function () {
this.DiffManager._mocks._getChunks = sinon.stub()
this.DiffManager._mocks._getChunks
.withArgs(this.projectId, this.fromVersion, this.toVersion)
.yields(null, this.chunks)
this.DiffManager._getProjectUpdatesBetweenVersions(
this.projectId,
this.pathname,
this.fromVersion,
this.toVersion,
this.callback
)
})
it('should get the relevant chunks', function () {
this.DiffManager._mocks._getChunks
.calledWith(this.projectId, this.fromVersion, this.toVersion)
.should.equal(true)
})
it('should get the concat the chunks', function () {
this.DiffManager._mocks._concatChunks
.calledWith(this.chunks)
.should.equal(true)
})
it('should convert the chunks to an initial version and updates', function () {
this.ChunkTranslator.convertToDiffUpdates
.calledWith(
this.projectId,
this.concatted_chunk,
this.pathname,
this.fromVersion,
this.toVersion
)
.should.equal(true)
})
it('should return the initialContent and updates', function () {
this.callback
.calledWith(null, {
initialContent: this.initialContent,
updates: this.updates,
})
.should.equal(true)
})
})
describe('for the error case', function () {
beforeEach(function () {
this.DiffManager._mocks._getChunks = sinon.stub()
this.DiffManager._mocks._getChunks
.withArgs(this.projectId, this.fromVersion, this.toVersion)
.yields(new Error('failed to load chunk'))
this.DiffManager._getProjectUpdatesBetweenVersions(
this.projectId,
this.pathname,
this.fromVersion,
this.toVersion,
this.callback
)
})
it('should call the callback with an error', function () {
this.callback
.calledWith(sinon.match.instanceOf(Error))
.should.equal(true)
})
})
})
describe('_getChunks', function () {
beforeEach(function () {
this.historyId = 'mock-overleaf-id'
this.WebApiManager.getHistoryId.yields(null, this.historyId)
})
describe('where only one chunk is needed', function () {
beforeEach(function (done) {
this.fromVersion = 4
this.toVersion = 8
this.chunk = {
chunk: {
startVersion: 2,
}, // before fromVersion
}
this.HistoryStoreManager.getChunkAtVersion
.withArgs(this.projectId, this.historyId, this.toVersion)
.yields(null, this.chunk)
this.DiffManager._getChunks(
this.projectId,
this.fromVersion,
this.toVersion,
(error, chunks) => {
this.error = error
this.chunks = chunks
done()
}
)
})
it("should the project's overleaf id", function () {
this.WebApiManager.getHistoryId
.calledWith(this.projectId)
.should.equal(true)
})
it('should request the first chunk', function () {
this.HistoryStoreManager.getChunkAtVersion
.calledWith(this.projectId, this.historyId, this.toVersion)
.should.equal(true)
})
it('should return an array of chunks', function () {
expect(this.chunks).to.deep.equal([this.chunk])
})
})
describe('where multiple chunks are needed', function () {
beforeEach(function (done) {
this.fromVersion = 4
this.toVersion = 8
this.chunk1 = {
chunk: {
startVersion: 6,
},
}
this.chunk2 = {
chunk: {
startVersion: 2,
},
}
this.HistoryStoreManager.getChunkAtVersion
.withArgs(this.projectId, this.historyId, this.toVersion)
.yields(null, this.chunk1)
this.HistoryStoreManager.getChunkAtVersion
.withArgs(
this.projectId,
this.historyId,
this.chunk1.chunk.startVersion
)
.yields(null, this.chunk2)
this.DiffManager._mocks._getChunks(
this.projectId,
this.fromVersion,
this.toVersion,
(error, chunks) => {
this.error = error
this.chunks = chunks
done()
}
)
})
it('should request the first chunk', function () {
this.HistoryStoreManager.getChunkAtVersion
.calledWith(this.projectId, this.historyId, this.toVersion)
.should.equal(true)
})
it('should request the second chunk, from where the first one started', function () {
this.HistoryStoreManager.getChunkAtVersion
.calledWith(
this.projectId,
this.historyId,
this.chunk1.chunk.startVersion
)
.should.equal(true)
})
it('should return an array of chunks', function () {
expect(this.chunks).to.deep.equal([this.chunk1, this.chunk2])
})
})
describe('where more than MAX_CHUNKS are requested', function () {
beforeEach(function (done) {
this.fromVersion = 0
this.toVersion = 8
this.chunk1 = {
chunk: {
startVersion: 6,
},
}
this.chunk2 = {
chunk: {
startVersion: 4,
},
}
this.chunk3 = {
chunk: {
startVersion: 2,
},
}
this.DiffManager.setMaxChunkRequests(2)
this.HistoryStoreManager.getChunkAtVersion
.withArgs(this.projectId, this.historyId, this.toVersion)
.yields(null, this.chunk1)
this.HistoryStoreManager.getChunkAtVersion
.withArgs(
this.projectId,
this.historyId,
this.chunk1.chunk.startVersion
)
.yields(null, this.chunk2)
this.DiffManager._mocks._getChunks(
this.projectId,
this.fromVersion,
this.toVersion,
(error, chunks) => {
this.error = error
this.chunks = chunks
done()
}
)
})
it('should request the first chunk', function () {
this.HistoryStoreManager.getChunkAtVersion
.calledWith(this.projectId, this.historyId, this.toVersion)
.should.equal(true)
})
it('should request the second chunk, from where the first one started', function () {
this.HistoryStoreManager.getChunkAtVersion
.calledWith(
this.projectId,
this.historyId,
this.chunk1.chunk.startVersion
)
.should.equal(true)
})
it('should not request the third chunk', function () {
this.HistoryStoreManager.getChunkAtVersion
.calledWith(
this.projectId,
this.historyId,
this.chunk2.chunk.startVersion
)
.should.equal(false)
})
it('should return an error', function () {
expect(this.error).to.exist
expect(this.error.message).to.equal('Diff spans too many chunks')
expect(this.error.name).to.equal('BadRequestError')
})
})
describe('where fromVersion == toVersion', function () {
beforeEach(function (done) {
this.fromVersion = 4
this.toVersion = 4
this.chunk = {
chunk: {
startVersion: 2,
}, // before fromVersion
}
this.HistoryStoreManager.getChunkAtVersion
.withArgs(this.projectId, this.historyId, this.toVersion)
.yields(null, this.chunk)
this.DiffManager._mocks._getChunks(
this.projectId,
this.fromVersion,
this.toVersion,
(error, chunks) => {
this.error = error
this.chunks = chunks
done()
}
)
})
it('should still request the first chunk (because we need the file contents)', function () {
this.HistoryStoreManager.getChunkAtVersion
.calledWith(this.projectId, this.historyId, this.toVersion)
.should.equal(true)
})
it('should return an array of chunks', function () {
expect(this.chunks).to.deep.equal([this.chunk])
})
})
})
describe('_concatChunks', function () {
it('should concat the chunks in reverse order', function () {
const result = this.DiffManager._mocks._concatChunks([
{
chunk: {
history: {
snapshot: {
files: {
mock: 'files-updated-2',
},
},
changes: [7, 8, 9],
},
},
},
{
chunk: {
history: {
snapshot: {
files: {
mock: 'files-updated',
},
},
changes: [4, 5, 6],
},
},
},
{
chunk: {
history: {
snapshot: {
files: {
mock: 'files-original',
},
},
changes: [1, 2, 3],
},
},
},
])
expect(result).to.deep.equal({
chunk: {
history: {
snapshot: {
files: {
mock: 'files-original',
},
},
changes: [1, 2, 3, 4, 5, 6, 7, 8, 9],
},
},
})
})
})
})

View File

@@ -0,0 +1,184 @@
/* eslint-disable
no-return-assign,
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
const MODULE_PATH = '../../../../app/js/DocumentUpdaterManager.js'
describe('DocumentUpdaterManager', function () {
beforeEach(async function () {
this.settings = {
apis: { documentupdater: { url: 'http://example.com' } },
}
this.request = {
get: sinon.stub(),
post: sinon.stub(),
}
this.DocumentUpdaterManager = await esmock(MODULE_PATH, {
request: this.request,
'@overleaf/settings': this.settings,
})
this.callback = sinon.stub()
this.lines = ['one', 'two', 'three']
return (this.version = 42)
})
describe('getDocument', function () {
describe('successfully', function () {
beforeEach(function () {
this.body = JSON.stringify({
lines: this.lines,
version: this.version,
ops: [],
})
this.request.get.yields(null, { statusCode: 200 }, this.body)
return this.DocumentUpdaterManager.getDocument(
this.project_id,
this.doc_id,
this.callback
)
})
it('should get the document from the document updater', function () {
const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}`
return this.request.get.calledWith(url).should.equal(true)
})
return it('should call the callback with the content and version', function () {
return this.callback
.calledWith(null, this.lines.join('\n'), this.version)
.should.equal(true)
})
})
describe('when the document updater API returns an error', function () {
beforeEach(function () {
this.error = new Error('something went wrong')
this.request.get.yields(this.error, null, null)
return this.DocumentUpdaterManager.getDocument(
this.project_id,
this.doc_id,
this.callback
)
})
return it('should return an error to the callback', function () {
return this.callback.calledWith(this.error).should.equal(true)
})
})
return describe('when the document updater returns a failure error code', function () {
beforeEach(function () {
this.request.get.yields(null, { statusCode: 500 }, '')
return this.DocumentUpdaterManager.getDocument(
this.project_id,
this.doc_id,
this.callback
)
})
return it('should return the callback with an error', function () {
return this.callback
.calledWith(
sinon.match.has(
'message',
'doc updater returned a non-success status code: 500'
)
)
.should.equal(true)
})
})
})
return describe('setDocument', function () {
beforeEach(function () {
this.content = 'mock content'
return (this.user_id = 'user-id-123')
})
describe('successfully', function () {
beforeEach(function () {
this.request.post.yields(null, { statusCode: 200 })
return this.DocumentUpdaterManager.setDocument(
this.project_id,
this.doc_id,
this.content,
this.user_id,
this.callback
)
})
it('should set the document in the document updater', function () {
const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}`
return this.request.post
.calledWith({
url,
json: {
lines: this.content.split('\n'),
source: 'restore',
user_id: this.user_id,
undoing: true,
},
})
.should.equal(true)
})
return it('should call the callback', function () {
return this.callback.calledWith(null).should.equal(true)
})
})
describe('when the document updater API returns an error', function () {
beforeEach(function () {
this.error = new Error('something went wrong')
this.request.post.yields(this.error, null, null)
return this.DocumentUpdaterManager.setDocument(
this.project_id,
this.doc_id,
this.content,
this.user_id,
this.callback
)
})
return it('should return an error to the callback', function () {
return this.callback.calledWith(this.error).should.equal(true)
})
})
return describe('when the document updater returns a failure error code', function () {
beforeEach(function () {
this.request.post.yields(null, { statusCode: 500 }, '')
return this.DocumentUpdaterManager.setDocument(
this.project_id,
this.doc_id,
this.content,
this.user_id,
this.callback
)
})
return it('should return the callback with an error', function () {
return this.callback
.calledWith(
sinon.match.has(
'message',
'doc updater returned a non-success status code: 500'
)
)
.should.equal(true)
})
})
})
})

View File

@@ -0,0 +1,96 @@
import sinon from 'sinon'
import { strict as esmock } from 'esmock'
import tk from 'timekeeper'
const MODULE_PATH = '../../../../app/js/ErrorRecorder.js'
describe('ErrorRecorder', function () {
beforeEach(async function () {
this.now = new Date()
tk.freeze(this.now)
this.db = {
projectHistoryFailures: {
deleteOne: sinon.stub().resolves(),
findOneAndUpdate: sinon
.stub()
.resolves({ value: { failure: 'record' } }),
},
}
this.mongodb = { db: this.db }
this.metrics = { gauge: sinon.stub() }
this.ErrorRecorder = await esmock(MODULE_PATH, {
'../../../../app/js/mongodb.js': this.mongodb,
'@overleaf/metrics': this.metrics,
})
this.project_id = 'project-id-123'
this.queueSize = 445
})
afterEach(function () {
tk.reset()
})
describe('record', function () {
beforeEach(async function () {
this.error = new Error('something bad')
await this.ErrorRecorder.promises.record(
this.project_id,
this.queueSize,
this.error
)
})
it('should record the error to mongo', function () {
this.db.projectHistoryFailures.findOneAndUpdate
.calledWithMatch(
{
project_id: this.project_id,
},
{
$set: {
queueSize: this.queueSize,
error: this.error.toString(),
stack: this.error.stack,
ts: this.now,
},
$inc: {
attempts: 1,
},
$push: {
history: {
$each: [
{
queueSize: this.queueSize,
error: this.error.toString(),
stack: this.error.stack,
ts: this.now,
},
],
$position: 0,
$slice: 10,
},
},
},
{
upsert: true,
}
)
.should.equal(true)
})
})
describe('clearError', function () {
beforeEach(async function () {
this.result = await this.ErrorRecorder.promises.clearError(
this.project_id
)
})
it('should remove any error from mongo', function () {
this.db.projectHistoryFailures.deleteOne
.calledWithMatch({ project_id: this.project_id })
.should.equal(true)
})
})
})

View File

@@ -0,0 +1,497 @@
import { expect } from 'chai'
import { createRangeBlobDataFromUpdate } from '../../../../app/js/HistoryBlobTranslator.js'
/**
* @import { AddDocUpdate } from "../../../../app/js/types"
*/
/**
*
* @param {string} pathname s
* @param {string} docLines
* @param {AddDocUpdate["ranges"]} ranges
* @returns {AddDocUpdate}
*/
const update = (pathname, docLines, ranges) => {
return {
pathname,
docLines,
ranges,
version: 'version-1',
projectHistoryId: 'project-id',
doc: 'doc',
meta: {
user_id: 'user-id',
ts: 0,
},
}
}
describe('HistoryBlobTranslator', function () {
describe('createBlobDataFromUpdate', function () {
beforeEach(function () {
this.text = 'the quick brown fox jumps over the lazy dog'
})
describe('for update with no ranges', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, undefined)
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with empty ranges object', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {})
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with ranges object with empty lists', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, { changes: [], comments: [] })
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with zero length comments', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
changes: [],
comments: [
{ op: { c: '', p: 4, t: 'comment-1', resolved: false } },
],
})
)
})
it('should treat them as detached comments', function () {
expect(this.result).to.deep.equal({
comments: [{ id: 'comment-1', ranges: [] }],
trackedChanges: [],
})
})
})
describe('for update with ranges object with only comments', function () {
it('should return unmoved ranges', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [{ pos: 4, length: 5 }],
},
],
trackedChanges: [],
})
})
it('should merge comments ranges into a single comment by id', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: { c: 'jumps', p: 20, t: 'comment-1', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [
{ pos: 4, length: 5 },
{ pos: 20, length: 5 },
],
},
],
trackedChanges: [],
})
})
it('should not merge ranges into a single comment if id differs', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: { c: 'jumps', p: 20, t: 'comment-2', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [{ pos: 4, length: 5 }],
},
{
id: 'comment-2',
ranges: [{ pos: 20, length: 5 }],
},
],
trackedChanges: [],
})
})
})
describe('for update with ranges object with only tracked insertions', function () {
it('should translate into history tracked insertions', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, i: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('for update with ranges object with mixed tracked changes', function () {
describe('with tracked deletions before insertions', function () {
it('should insert tracked deletions before insertions', function () {
const text = 'the quickrapid brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, d: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 4, hpos: 9, i: 'rapid' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'delete',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 9, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('with tracked insertions before deletions', function () {
it('should insert tracked deletions before insertions', function () {
const text = 'the quickrapid brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, hpos: 9, i: 'rapid' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 4, d: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'delete',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 9, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
it('should adjust positions', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, d: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 30, hpos: 35, i: 'lazy' },
metadata: {
ts: '2022-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
{
range: { pos: 35, length: 4 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2022-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('for update with ranges object with mixed tracked changes and comments', function () {
it('should adjust positions', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: {
c: 'fox',
p: 11,
hpos: 16,
t: 'comment-2',
resolved: false,
},
},
],
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, d: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 30, hpos: 35, i: 'lazy' },
metadata: {
ts: '2022-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
ranges: [{ pos: 4, length: 5 }],
id: 'comment-1',
},
{
ranges: [{ pos: 16, length: 3 }],
id: 'comment-2',
},
],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
{
range: { pos: 35, length: 4 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2022-01-01T00:00:00.000Z',
},
},
],
})
})
it('should adjust comment length', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
comments: [
{
op: { c: 'quick fox', p: 4, t: 'comment-1', resolved: false },
},
],
changes: [
{
op: { p: 10, d: 'brown ' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
ranges: [{ pos: 4, length: 9 }],
id: 'comment-1',
},
],
trackedChanges: [
{
range: { pos: 10, length: 6 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
})
})

View File

@@ -0,0 +1,727 @@
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
import EventEmitter from 'node:events'
import { RequestFailedError } from '@overleaf/fetch-utils'
import * as Errors from '../../../../app/js/Errors.js'
const MODULE_PATH = '../../../../app/js/HistoryStoreManager.js'
describe('HistoryStoreManager', function () {
beforeEach(async function () {
this.projectId = '123456789012345678901234'
this.historyId = 'mock-ol-project-id'
this.settings = {
overleaf: {
history: {
host: 'http://example.com',
user: 'overleaf',
pass: 'password',
requestTimeout: 123,
},
},
apis: {
filestore: {
enabled: true,
url: 'http://filestore.overleaf.production',
},
},
}
this.latestChunkRequestArgs = sinon.match({
method: 'GET',
url: `${this.settings.overleaf.history.host}/projects/${this.historyId}/latest/history`,
json: true,
auth: {
user: this.settings.overleaf.history.user,
pass: this.settings.overleaf.history.pass,
sendImmediately: true,
},
})
this.callback = sinon.stub()
this.LocalFileWriter = {
bufferOnDisk: sinon.stub(),
}
this.WebApiManager = {
getHistoryId: sinon.stub(),
}
this.WebApiManager.getHistoryId
.withArgs(this.projectId)
.yields(null, this.historyId)
this.FetchUtils = {
fetchStream: sinon.stub(),
fetchNothing: sinon.stub().resolves(),
RequestFailedError,
}
this.request = sinon.stub()
this.logger = {
debug: sinon.stub(),
warn: sinon.stub(),
}
this.HistoryStoreManager = await esmock(MODULE_PATH, {
'@overleaf/fetch-utils': this.FetchUtils,
request: this.request,
'@overleaf/settings': this.settings,
'../../../../app/js/LocalFileWriter.js': this.LocalFileWriter,
'../../../../app/js/WebApiManager.js': this.WebApiManager,
'../../../../app/js/Errors.js': Errors,
'@overleaf/logger': this.logger,
})
})
describe('getMostRecentChunk', function () {
describe('successfully', function () {
beforeEach(function () {
this.chunk = {
chunk: {
startVersion: 0,
history: {
snapshot: {
files: {},
},
changes: [],
},
},
}
this.request
.withArgs(this.latestChunkRequestArgs)
.yields(null, { statusCode: 200 }, this.chunk)
this.HistoryStoreManager.getMostRecentChunk(
this.projectId,
this.historyId,
this.callback
)
})
it('should call the callback with the chunk', function () {
expect(this.callback).to.have.been.calledWith(null, this.chunk)
})
})
})
describe('getMostRecentVersion', function () {
describe('successfully', function () {
beforeEach(function () {
this.chunk = {
chunk: {
startVersion: 5,
history: {
snapshot: {
files: {},
},
changes: [
{ v2Authors: ['5678'], timestamp: '2017-10-17T10:44:40.227Z' },
{ v2Authors: ['1234'], timestamp: '2017-10-16T10:44:40.227Z' },
],
},
},
}
this.request
.withArgs(this.latestChunkRequestArgs)
.yields(null, { statusCode: 200 }, this.chunk)
this.HistoryStoreManager.getMostRecentVersion(
this.projectId,
this.historyId,
this.callback
)
})
it('should call the callback with the latest version information', function () {
expect(this.callback).to.have.been.calledWith(
null,
7,
{ project: undefined, docs: {} },
{ v2Authors: ['5678'], timestamp: '2017-10-17T10:44:40.227Z' }
)
})
})
describe('out of order doc ops', function () {
beforeEach(function () {
this.chunk = {
chunk: {
startVersion: 5,
history: {
snapshot: {
v2DocVersions: {
mock_doc_id: {
pathname: '/main.tex',
v: 2,
},
},
},
changes: [
{
operations: [],
v2DocVersions: {
mock_doc_id: {
pathname: '/main.tex',
v: 1,
},
},
},
],
},
},
}
this.request
.withArgs(this.latestChunkRequestArgs)
.yields(null, { statusCode: 200 }, this.chunk)
this.HistoryStoreManager.getMostRecentVersion(
this.projectId,
this.historyId,
this.callback
)
})
it('should return an error', function () {
expect(this.callback).to.have.been.calledWith(
sinon.match
.instanceOf(Errors.OpsOutOfOrderError)
.and(sinon.match.has('message', 'doc version out of order'))
)
})
it('should call the callback with the latest version information', function () {
expect(this.callback).to.have.been.calledWith(
sinon.match.instanceOf(Errors.OpsOutOfOrderError),
6,
{
project: undefined,
docs: { mock_doc_id: { pathname: '/main.tex', v: 2 } },
},
this.chunk.chunk.history.changes[0]
)
})
})
describe('out of order project structure versions', function () {
beforeEach(function () {
this.chunk = {
chunk: {
startVersion: 5,
history: {
snapshot: {
projectVersion: 2,
},
changes: [
{
operations: [{ pathname: 'main.tex', newPathname: '' }],
projectVersion: 1,
},
],
},
},
}
this.request
.withArgs(this.latestChunkRequestArgs)
.yields(null, { statusCode: 200 }, this.chunk)
this.HistoryStoreManager.getMostRecentVersion(
this.projectId,
this.historyId,
this.callback
)
})
it('should return an error', function () {
expect(this.callback).to.have.been.calledWith(
sinon.match
.instanceOf(Errors.OpsOutOfOrderError)
.and(
sinon.match.has(
'message',
'project structure version out of order'
)
)
)
})
it('should call the callback with the latest version information', function () {
expect(this.callback).to.have.been.calledWith(
sinon.match.instanceOf(Errors.OpsOutOfOrderError),
6,
{ project: 2, docs: {} },
this.chunk.chunk.history.changes[0]
)
})
})
describe('out of order project structure and doc versions', function () {
beforeEach(function () {
this.chunk = {
chunk: {
startVersion: 5,
history: {
snapshot: {
projectVersion: 1,
},
changes: [
{
operations: [{ pathname: 'main.tex', newPathname: '' }],
projectVersion: 1,
},
{
operations: [{ pathname: 'main.tex', newPathname: '' }],
projectVersion: 2,
},
{
operations: [{ pathname: 'main.tex', newPathname: '' }],
projectVersion: 3,
},
{
operations: [{ pathname: 'main.tex', newPathname: '' }],
projectVersion: 1,
},
{
operations: [],
v2DocVersions: {
mock_doc_id: {
pathname: '/main.tex',
v: 1,
},
},
},
{
operations: [],
v2DocVersions: {
mock_doc_id: {
pathname: '/main.tex',
v: 2,
},
},
},
{
operations: [],
v2DocVersions: {
mock_doc_id: {
pathname: '/main.tex',
v: 1,
},
},
},
],
},
},
}
this.request
.withArgs(this.latestChunkRequestArgs)
.yields(null, { statusCode: 200 }, this.chunk)
this.HistoryStoreManager.getMostRecentVersion(
this.projectId,
this.historyId,
this.callback
)
})
it('should return an error', function () {
expect(this.callback).to.have.been.calledWith(
sinon.match
.instanceOf(Errors.OpsOutOfOrderError)
.and(
sinon.match.has(
'message',
'project structure version out of order'
)
)
)
})
it('should call the callback with the latest version information', function () {
expect(this.callback).to.have.been.calledWith(
sinon.match.instanceOf(Errors.OpsOutOfOrderError),
12,
{
project: 3,
docs: { mock_doc_id: { pathname: '/main.tex', v: 2 } },
},
this.chunk.chunk.history.changes[6]
)
})
})
describe('with an unexpected response', function () {
beforeEach(function () {
this.badChunk = {
chunk: {
foo: 123, // valid chunk should have startVersion property
bar: 456,
},
}
this.request
.withArgs(this.latestChunkRequestArgs)
.yields(null, { statusCode: 200 }, this.badChunk)
this.HistoryStoreManager.getMostRecentVersion(
this.projectId,
this.historyId,
this.callback
)
})
it('should return an error', function () {
expect(this.callback).to.have.been.calledWith(
sinon.match
.instanceOf(Error)
.and(sinon.match.has('message', 'unexpected response'))
)
})
})
})
describe('createBlobForUpdate', function () {
beforeEach(function () {
this.fileStream = {}
this.hash = 'random-hash'
this.LocalFileWriter.bufferOnDisk.callsArgWith(4, null, this.hash)
this.FetchUtils.fetchNothing.rejects(
new RequestFailedError('', {}, { status: 404 })
)
this.FetchUtils.fetchStream.resolves(this.fileStream)
})
describe('for a file update with any filestore location', function () {
beforeEach(function (done) {
this.file_id = '012345678901234567890123'
this.update = {
file: true,
url: `http://filestore.other.cloud.provider/project/${this.projectId}/file/${this.file_id}`,
hash: this.hash,
}
this.HistoryStoreManager.createBlobForUpdate(
this.projectId,
this.historyId,
this.update,
(err, { file: hash }) => {
if (err) {
return done(err)
}
this.actualHash = hash
done()
}
)
})
it('should not log any warnings', function () {
expect(this.logger.warn).to.not.have.been.called
})
it('should request the file from the filestore in settings', function () {
expect(this.FetchUtils.fetchStream).to.have.been.calledWithMatch(
`${this.settings.apis.filestore.url}/project/${this.projectId}/file/${this.file_id}`
)
})
it('should call the callback with the blob', function () {
expect(this.actualHash).to.equal(this.hash)
})
})
describe('with filestore disabled', function () {
beforeEach(function (done) {
this.settings.apis.filestore.enabled = false
this.file_id = '012345678901234567890123'
this.update = {
file: true,
url: `http://filestore.other.cloud.provider/project/${this.projectId}/file/${this.file_id}`,
hash: this.hash,
}
this.HistoryStoreManager.createBlobForUpdate(
this.projectId,
this.historyId,
this.update,
err => {
expect(err).to.match(/blocking filestore read/)
done()
}
)
})
it('should not request the file', function () {
expect(this.FetchUtils.fetchStream).to.not.have.been.called
})
})
describe('for a file update with an invalid filestore location', function () {
beforeEach(function (done) {
this.invalid_id = '000000000000000000000000'
this.file_id = '012345678901234567890123'
this.update = {
file: true,
url: `http://filestore.other.cloud.provider/project/${this.invalid_id}/file/${this.file_id}`,
hash: this.hash,
}
this.HistoryStoreManager.createBlobForUpdate(
this.projectId,
this.historyId,
this.update,
err => {
expect(err).to.exist
done()
}
)
})
it('should not request the file from the filestore', function () {
expect(this.FetchUtils.fetchStream).to.not.have.been.called
})
})
describe('when the hash mismatches', function () {
beforeEach(function (done) {
this.file_id = '012345678901234567890123'
this.update = {
file: true,
url: `http://filestore.other.cloud.provider/project/${this.projectId}/file/${this.file_id}`,
hash: 'another-hash-from-web',
}
this.HistoryStoreManager.createBlobForUpdate(
this.projectId,
this.historyId,
this.update,
(err, { file: hash }) => {
if (err) {
return done(err)
}
this.actualHash = hash
done()
}
)
})
it('should log a warning', function () {
expect(this.logger.warn).to.have.been.calledWith(
{
projectId: this.projectId,
fileId: this.file_id,
webHash: 'another-hash-from-web',
fileHash: this.hash,
},
'hash mismatch between web and project-history'
)
})
it('should request the file from the filestore in settings', function () {
expect(this.FetchUtils.fetchStream).to.have.been.calledWithMatch(
`${this.settings.apis.filestore.url}/project/${this.projectId}/file/${this.file_id}`
)
})
it('should call the callback with the blob', function () {
expect(this.actualHash).to.equal(this.hash)
})
})
describe('when the createdBlob flag is set on the update', function () {
beforeEach(function () {
this.file_id = '012345678901234567890123'
this.update = {
file: true,
createdBlob: true,
url: `http://filestore.other.cloud.provider/project/${this.projectId}/file/${this.file_id}`,
hash: this.hash,
}
})
describe('when history-v1 confirms that the blob exists', function () {
beforeEach(function (done) {
this.FetchUtils.fetchNothing.resolves()
this.HistoryStoreManager.createBlobForUpdate(
this.projectId,
this.historyId,
this.update,
(err, { file: hash }) => {
if (err) {
return done(err)
}
this.actualHash = hash
done()
}
)
})
it('should call the callback with the existing hash', function () {
expect(this.actualHash).to.equal(this.hash)
})
it('should not request the file from the filestore', function () {
expect(this.FetchUtils.fetchStream).to.not.have.been.called
})
it('should log a debug level message', function () {
expect(this.logger.debug).to.have.been.calledWith(
{
projectId: this.projectId,
fileId: this.file_id,
update: this.update,
},
'Skipping blob creation as it has already been created'
)
})
})
describe('when history-v1 does not confirm that the blob exists', function () {
beforeEach(function (done) {
this.FetchUtils.fetchNothing.rejects(
new RequestFailedError(
`${this.settings.overleaf.history.host}/project/${this.projectId}/file/${this.file_id}`,
{ method: 'HEAD' },
{ status: 404 }
)
)
this.HistoryStoreManager.createBlobForUpdate(
this.projectId,
this.historyId,
this.update,
(err, { file: hash }) => {
if (err) {
return done(err)
}
this.actualHash = hash
done()
}
)
})
it('should warn that we will use the filestore', function () {
expect(this.logger.warn).to.have.been.calledWithMatch(
{
fileId: this.file_id,
projectId: this.projectId,
update: this.update,
},
'created blob does not exist, reading from filestore'
)
})
it('should request the file from the filestore in settings', function () {
expect(this.FetchUtils.fetchStream).to.have.been.calledWithMatch(
`${this.settings.apis.filestore.url}/project/${this.projectId}/file/${this.file_id}`
)
})
it('should call the callback with the blob', function () {
expect(this.actualHash).to.equal(this.hash)
})
})
})
})
describe('getProjectBlob', function () {
describe('successfully', function () {
beforeEach(function () {
this.blobContent = 'test content'
this.blobHash = 'test hash'
this.request.yields(null, { statusCode: 200 }, this.blobContent)
this.HistoryStoreManager.getProjectBlob(
this.historyId,
this.blobHash,
this.callback
)
})
it('should get the blob from the overleaf history service', function () {
expect(this.request).to.have.been.calledWithMatch({
method: 'GET',
url: `${this.settings.overleaf.history.host}/projects/${this.historyId}/blobs/${this.blobHash}`,
auth: {
user: this.settings.overleaf.history.user,
pass: this.settings.overleaf.history.pass,
sendImmediately: true,
},
})
})
it('should call the callback with the blob', function () {
expect(this.callback).to.have.been.calledWith(null, this.blobContent)
})
})
})
describe('getProjectBlobStream', function () {
describe('successfully', function () {
beforeEach(function (done) {
this.historyResponse = new EventEmitter()
this.blobHash = 'test hash'
this.FetchUtils.fetchStream.resolves(this.historyResponse)
this.HistoryStoreManager.getProjectBlobStream(
this.historyId,
this.blobHash,
(err, stream) => {
if (err) {
return done(err)
}
this.stream = stream
done()
}
)
})
it('should get the blob from the overleaf history service', function () {
expect(this.FetchUtils.fetchStream).to.have.been.calledWithMatch(
`${this.settings.overleaf.history.host}/projects/${this.historyId}/blobs/${this.blobHash}`
)
})
it('should return a stream of the blob contents', function () {
expect(this.stream).to.equal(this.historyResponse)
})
})
})
describe('initializeProject', function () {
describe('successfully', function () {
beforeEach(function () {
this.response_body = { projectId: this.historyId }
this.request.callsArgWith(
1,
null,
{ statusCode: 200 },
this.response_body
)
this.HistoryStoreManager.initializeProject(
this.historyId,
this.callback
)
})
it('should send the change to the history store', function () {
expect(this.request).to.have.been.calledWithMatch({
method: 'POST',
url: `${this.settings.overleaf.history.host}/projects`,
auth: {
user: this.settings.overleaf.history.user,
pass: this.settings.overleaf.history.pass,
sendImmediately: true,
},
json: { projectId: this.historyId },
})
})
it('should call the callback with the new overleaf id', function () {
expect(this.callback).to.have.been.calledWith(null, this.historyId)
})
})
})
describe('deleteProject', function () {
beforeEach(function (done) {
this.request.yields(null, { statusCode: 204 }, '')
this.HistoryStoreManager.deleteProject(this.historyId, done)
})
it('should ask the history store to delete the project', function () {
expect(this.request).to.have.been.calledWithMatch({
method: 'DELETE',
url: `${this.settings.overleaf.history.host}/projects/${this.historyId}`,
})
})
})
})

View File

@@ -0,0 +1,573 @@
import sinon from 'sinon'
import { strict as esmock } from 'esmock'
import mongodb from 'mongodb-legacy'
const { ObjectId } = mongodb
const MODULE_PATH = '../../../../app/js/HttpController.js'
describe('HttpController', function () {
beforeEach(async function () {
this.UpdatesProcessor = {
processUpdatesForProject: sinon.stub().yields(),
}
this.SummarizedUpdatesManager = {
getSummarizedProjectUpdates: sinon.stub(),
}
this.DiffManager = {
getDiff: sinon.stub(),
}
this.HistoryStoreManager = {
deleteProject: sinon.stub().yields(),
getMostRecentVersion: sinon.stub(),
getProjectBlobStream: sinon.stub(),
initializeProject: sinon.stub(),
}
this.SnapshotManager = {
getFileSnapshotStream: sinon.stub(),
getProjectSnapshot: sinon.stub(),
}
this.HealthChecker = {}
this.SyncManager = {
clearResyncState: sinon.stub().yields(),
startResync: sinon.stub().yields(),
}
this.WebApiManager = {
getHistoryId: sinon.stub(),
}
this.RedisManager = {
destroyDocUpdatesQueue: sinon.stub().yields(),
clearFirstOpTimestamp: sinon.stub().yields(),
clearCachedHistoryId: sinon.stub().yields(),
}
this.ErrorRecorder = {
clearError: sinon.stub().yields(),
}
this.LabelsManager = {
createLabel: sinon.stub(),
deleteLabel: sinon.stub().yields(),
deleteLabelForUser: sinon.stub().yields(),
getLabels: sinon.stub(),
}
this.HistoryApiManager = {
shouldUseProjectHistory: sinon.stub(),
}
this.RetryManager = {}
this.FlushManager = {}
this.request = {}
this.pipeline = sinon.stub()
this.HttpController = await esmock(MODULE_PATH, {
request: this.request,
stream: { pipeline: this.pipeline },
'../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor,
'../../../../app/js/SummarizedUpdatesManager.js':
this.SummarizedUpdatesManager,
'../../../../app/js/DiffManager.js': this.DiffManager,
'../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager,
'../../../../app/js/SnapshotManager.js': this.SnapshotManager,
'../../../../app/js/HealthChecker.js': this.HealthChecker,
'../../../../app/js/SyncManager.js': this.SyncManager,
'../../../../app/js/WebApiManager.js': this.WebApiManager,
'../../../../app/js/RedisManager.js': this.RedisManager,
'../../../../app/js/ErrorRecorder.js': this.ErrorRecorder,
'../../../../app/js/LabelsManager.js': this.LabelsManager,
'../../../../app/js/HistoryApiManager.js': this.HistoryApiManager,
'../../../../app/js/RetryManager.js': this.RetryManager,
'../../../../app/js/FlushManager.js': this.FlushManager,
})
this.pathname = 'doc-id-123'
this.projectId = new ObjectId().toString()
this.projectOwnerId = new ObjectId().toString()
this.next = sinon.stub()
this.userId = new ObjectId().toString()
this.now = Date.now()
this.res = {
json: sinon.stub(),
send: sinon.stub(),
sendStatus: sinon.stub(),
setHeader: sinon.stub(),
}
})
describe('getProjectBlob', function () {
beforeEach(function () {
this.blobHash = 'abcd'
this.stream = {}
this.historyId = 1337
this.HistoryStoreManager.getProjectBlobStream.yields(null, this.stream)
this.HttpController.getProjectBlob(
{ params: { history_id: this.historyId, hash: this.blobHash } },
this.res,
this.next
)
})
it('should get a blob stream', function () {
this.HistoryStoreManager.getProjectBlobStream
.calledWith(this.historyId, this.blobHash)
.should.equal(true)
this.pipeline.should.have.been.calledWith(this.stream, this.res)
})
it('should set caching header', function () {
this.res.setHeader.should.have.been.calledWith(
'Cache-Control',
'private, max-age=86400'
)
})
})
describe('initializeProject', function () {
beforeEach(function () {
this.historyId = new ObjectId().toString()
this.req = { body: { historyId: this.historyId } }
this.HistoryStoreManager.initializeProject.yields(null, this.historyId)
this.HttpController.initializeProject(this.req, this.res, this.next)
})
it('should initialize the project', function () {
this.HistoryStoreManager.initializeProject.calledWith().should.equal(true)
})
it('should return the new overleaf id', function () {
this.res.json
.calledWith({ project: { id: this.historyId } })
.should.equal(true)
})
})
describe('flushProject', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.projectId,
},
query: {},
}
this.HttpController.flushProject(this.req, this.res, this.next)
})
it('should process the updates', function () {
this.UpdatesProcessor.processUpdatesForProject
.calledWith(this.projectId)
.should.equal(true)
})
it('should return a success code', function () {
this.res.sendStatus.calledWith(204).should.equal(true)
})
})
describe('getDiff', function () {
beforeEach(function () {
this.from = 42
this.to = 45
this.req = {
params: {
project_id: this.projectId,
},
query: {
pathname: this.pathname,
from: this.from,
to: this.to,
},
}
this.diff = [{ u: 'mock-diff' }]
this.DiffManager.getDiff.yields(null, this.diff)
this.HttpController.getDiff(this.req, this.res, this.next)
})
it('should get the diff', function () {
this.DiffManager.getDiff.should.have.been.calledWith(
this.projectId,
this.pathname,
this.from,
this.to
)
})
it('should return the diff', function () {
this.res.json.calledWith({ diff: this.diff }).should.equal(true)
})
})
describe('getUpdates', function () {
beforeEach(function () {
this.before = Date.now()
this.nextBeforeTimestamp = this.before - 100
this.min_count = 10
this.req = {
params: {
project_id: this.projectId,
},
query: {
before: this.before,
min_count: this.min_count,
},
}
this.updates = [{ i: 'mock-summarized-updates', p: 10 }]
this.SummarizedUpdatesManager.getSummarizedProjectUpdates.yields(
null,
this.updates,
this.nextBeforeTimestamp
)
this.HttpController.getUpdates(this.req, this.res, this.next)
})
it('should get the updates', function () {
this.SummarizedUpdatesManager.getSummarizedProjectUpdates.should.have.been.calledWith(
this.projectId,
{
before: this.before,
min_count: this.min_count,
}
)
})
it('should return the formatted updates', function () {
this.res.json.should.have.been.calledWith({
updates: this.updates,
nextBeforeTimestamp: this.nextBeforeTimestamp,
})
})
})
describe('latestVersion', function () {
beforeEach(function () {
this.historyId = 1234
this.req = {
params: {
project_id: this.projectId,
},
}
this.version = 99
this.lastChange = {
v2Authors: ['1234'],
timestamp: '2016-08-16T10:44:40.227Z',
}
this.versionInfo = {
version: this.version,
v2Authors: ['1234'],
timestamp: '2016-08-16T10:44:40.227Z',
}
this.WebApiManager.getHistoryId.yields(null, this.historyId)
this.HistoryStoreManager.getMostRecentVersion.yields(
null,
this.version,
{},
this.lastChange
)
this.HttpController.latestVersion(this.req, this.res, this.next)
})
it('should process the updates', function () {
this.UpdatesProcessor.processUpdatesForProject
.calledWith(this.projectId)
.should.equal(true)
})
it('should get the ol project id', function () {
this.WebApiManager.getHistoryId
.calledWith(this.projectId)
.should.equal(true)
})
it('should get the latest version', function () {
this.HistoryStoreManager.getMostRecentVersion
.calledWith(this.projectId, this.historyId)
.should.equal(true)
})
it('should return version number', function () {
this.res.json.calledWith(this.versionInfo).should.equal(true)
})
})
describe('resyncProject', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.projectId,
},
query: {},
body: {},
}
this.HttpController.resyncProject(this.req, this.res, this.next)
})
it('should resync the project', function () {
this.SyncManager.startResync.calledWith(this.projectId).should.equal(true)
})
it('should flush the queue', function () {
this.UpdatesProcessor.processUpdatesForProject
.calledWith(this.projectId)
.should.equal(true)
})
it('should return 204', function () {
this.res.sendStatus.calledWith(204).should.equal(true)
})
})
describe('getFileSnapshot', function () {
beforeEach(function () {
this.version = 42
this.pathname = 'foo.tex'
this.req = {
params: {
project_id: this.projectId,
version: this.version,
pathname: this.pathname,
},
}
this.res = { mock: 'res' }
this.stream = {}
this.SnapshotManager.getFileSnapshotStream.yields(null, this.stream)
this.HttpController.getFileSnapshot(this.req, this.res, this.next)
})
it('should get the snapshot', function () {
this.SnapshotManager.getFileSnapshotStream.should.have.been.calledWith(
this.projectId,
this.version,
this.pathname
)
})
it('should pipe the returned stream into the response', function () {
this.pipeline.should.have.been.calledWith(this.stream, this.res)
})
})
describe('getProjectSnapshot', function () {
beforeEach(function () {
this.version = 42
this.req = {
params: {
project_id: this.projectId,
version: this.version,
},
}
this.res = { json: sinon.stub() }
this.snapshotData = { one: 1 }
this.SnapshotManager.getProjectSnapshot.yields(null, this.snapshotData)
this.HttpController.getProjectSnapshot(this.req, this.res, this.next)
})
it('should get the snapshot', function () {
this.SnapshotManager.getProjectSnapshot.should.have.been.calledWith(
this.projectId,
this.version
)
})
it('should send json response', function () {
this.res.json.calledWith(this.snapshotData).should.equal(true)
})
})
describe('getLabels', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.projectId,
},
}
this.labels = ['label-1', 'label-2']
this.LabelsManager.getLabels.yields(null, this.labels)
})
describe('project history is enabled', function () {
beforeEach(function () {
this.HistoryApiManager.shouldUseProjectHistory.yields(null, true)
this.HttpController.getLabels(this.req, this.res, this.next)
})
it('should get the labels for a project', function () {
this.LabelsManager.getLabels
.calledWith(this.projectId)
.should.equal(true)
})
it('should return the labels', function () {
this.res.json.calledWith(this.labels).should.equal(true)
})
})
describe('project history is not enabled', function () {
beforeEach(function () {
this.HistoryApiManager.shouldUseProjectHistory.yields(null, false)
this.HttpController.getLabels(this.req, this.res, this.next)
})
it('should return 409', function () {
this.res.sendStatus.calledWith(409).should.equal(true)
})
})
})
describe('createLabel', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.projectId,
},
body: {
version: (this.version = 'label-1'),
comment: (this.comment = 'a comment'),
created_at: (this.created_at = Date.now().toString()),
validate_exists: true,
user_id: this.userId,
},
}
this.label = { _id: new ObjectId() }
this.LabelsManager.createLabel.yields(null, this.label)
})
describe('project history is enabled', function () {
beforeEach(function () {
this.HistoryApiManager.shouldUseProjectHistory.yields(null, true)
this.HttpController.createLabel(this.req, this.res, this.next)
})
it('should create a label for a project', function () {
this.LabelsManager.createLabel.should.have.been.calledWith(
this.projectId,
this.userId,
this.version,
this.comment,
this.created_at,
true
)
})
it('should return the label', function () {
this.res.json.calledWith(this.label).should.equal(true)
})
})
describe('validate_exists = false is passed', function () {
beforeEach(function () {
this.req.body.validate_exists = false
this.HistoryApiManager.shouldUseProjectHistory.yields(null, true)
this.HttpController.createLabel(this.req, this.res, this.next)
})
it('should create a label for a project', function () {
this.LabelsManager.createLabel
.calledWith(
this.projectId,
this.userId,
this.version,
this.comment,
this.created_at,
false
)
.should.equal(true)
})
it('should return the label', function () {
this.res.json.calledWith(this.label).should.equal(true)
})
})
describe('project history is not enabled', function () {
beforeEach(function () {
this.HistoryApiManager.shouldUseProjectHistory.yields(null, false)
this.HttpController.createLabel(this.req, this.res, this.next)
})
it('should return 409', function () {
this.res.sendStatus.calledWith(409).should.equal(true)
})
})
})
describe('deleteLabelForUser', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.projectId,
user_id: this.userId,
label_id: (this.label_id = new ObjectId()),
},
}
this.HttpController.deleteLabelForUser(this.req, this.res, this.next)
})
it('should delete a label for a project', function () {
this.LabelsManager.deleteLabelForUser
.calledWith(this.projectId, this.userId, this.label_id)
.should.equal(true)
})
it('should return 204', function () {
this.res.sendStatus.calledWith(204).should.equal(true)
})
})
describe('deleteLabel', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.projectId,
label_id: (this.label_id = new ObjectId()),
},
}
this.HttpController.deleteLabel(this.req, this.res, this.next)
})
it('should delete a label for a project', function () {
this.LabelsManager.deleteLabel
.calledWith(this.projectId, this.label_id)
.should.equal(true)
})
it('should return 204', function () {
this.res.sendStatus.calledWith(204).should.equal(true)
})
})
describe('deleteProject', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.projectId,
},
}
this.WebApiManager.getHistoryId
.withArgs(this.projectId)
.yields(null, this.historyId)
this.HttpController.deleteProject(this.req, this.res, this.next)
})
it('should delete the updates queue', function () {
this.RedisManager.destroyDocUpdatesQueue.should.have.been.calledWith(
this.projectId
)
})
it('should clear the first op timestamp', function () {
this.RedisManager.clearFirstOpTimestamp.should.have.been.calledWith(
this.projectId
)
})
it('should clear the cached history id', function () {
this.RedisManager.clearCachedHistoryId.should.have.been.calledWith(
this.projectId
)
})
it('should clear the resync state', function () {
this.SyncManager.clearResyncState.should.have.been.calledWith(
this.projectId
)
})
it('should clear any failure record', function () {
this.ErrorRecorder.clearError.should.have.been.calledWith(this.projectId)
})
})
})

View File

@@ -0,0 +1,293 @@
import sinon from 'sinon'
import { expect } from 'chai'
import mongodb from 'mongodb-legacy'
import tk from 'timekeeper'
import { strict as esmock } from 'esmock'
const { ObjectId } = mongodb
const MODULE_PATH = '../../../../app/js/LabelsManager.js'
describe('LabelsManager', function () {
beforeEach(async function () {
this.now = new Date()
tk.freeze(this.now)
this.db = {
projectHistoryLabels: {
deleteOne: sinon.stub(),
find: sinon.stub(),
insertOne: sinon.stub(),
},
}
this.mongodb = {
ObjectId,
db: this.db,
}
this.HistoryStoreManager = {
getChunkAtVersion: sinon.stub().yields(),
}
this.UpdatesProcessor = {
processUpdatesForProject: sinon.stub().yields(),
}
this.WebApiManager = {
getHistoryId: sinon.stub(),
}
this.LabelsManager = await esmock(MODULE_PATH, {
'../../../../app/js/mongodb.js': this.mongodb,
'../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager,
'../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor,
'../../../../app/js/WebApiManager.js': this.WebApiManager,
})
this.project_id = new ObjectId().toString()
this.historyId = 123
this.user_id = new ObjectId().toString()
this.label_id = new ObjectId().toString()
this.callback = sinon.stub()
})
afterEach(function () {
tk.reset()
})
describe('getLabels', function () {
beforeEach(function () {
this.label = {
_id: new ObjectId(),
comment: 'some comment',
version: 123,
user_id: new ObjectId(),
created_at: new Date(),
}
this.db.projectHistoryLabels.find.returns({
toArray: sinon.stub().yields(null, [this.label]),
})
})
describe('with valid project id', function () {
beforeEach(function () {
this.LabelsManager.getLabels(this.project_id, this.callback)
})
it('gets the labels state from mongo', function () {
expect(this.db.projectHistoryLabels.find).to.have.been.calledWith({
project_id: new ObjectId(this.project_id),
})
})
it('returns formatted labels', function () {
expect(this.callback).to.have.been.calledWith(null, [
sinon.match({
id: this.label._id,
comment: this.label.comment,
version: this.label.version,
user_id: this.label.user_id,
created_at: this.label.created_at,
}),
])
})
})
describe('with invalid project id', function () {
it('returns an error', function (done) {
this.LabelsManager.getLabels('invalid id', error => {
expect(error).to.exist
done()
})
})
})
})
describe('createLabel', function () {
beforeEach(function () {
this.version = 123
this.comment = 'a comment'
this.WebApiManager.getHistoryId.yields(null, this.historyId)
})
describe('with createdAt', function () {
beforeEach(function () {
this.createdAt = new Date(1)
this.db.projectHistoryLabels.insertOne.yields(null, {
insertedId: new ObjectId(this.label_id),
})
this.LabelsManager.createLabel(
this.project_id,
this.user_id,
this.version,
this.comment,
this.createdAt,
true,
this.callback
)
})
it('flushes unprocessed updates', function () {
expect(
this.UpdatesProcessor.processUpdatesForProject
).to.have.been.calledWith(this.project_id)
})
it('finds the V1 project id', function () {
expect(this.WebApiManager.getHistoryId).to.have.been.calledWith(
this.project_id
)
})
it('checks there is a chunk for the project + version', function () {
expect(
this.HistoryStoreManager.getChunkAtVersion
).to.have.been.calledWith(this.project_id, this.historyId, this.version)
})
it('create the label in mongo', function () {
expect(this.db.projectHistoryLabels.insertOne).to.have.been.calledWith(
sinon.match({
project_id: new ObjectId(this.project_id),
comment: this.comment,
version: this.version,
user_id: new ObjectId(this.user_id),
created_at: this.createdAt,
}),
sinon.match.any
)
})
it('returns the label', function () {
expect(this.callback).to.have.been.calledWith(null, {
id: new ObjectId(this.label_id),
comment: this.comment,
version: this.version,
user_id: new ObjectId(this.user_id),
created_at: this.createdAt,
})
})
})
describe('without createdAt', function () {
beforeEach(function () {
this.db.projectHistoryLabels.insertOne.yields(null, {
insertedId: new ObjectId(this.label_id),
})
this.LabelsManager.createLabel(
this.project_id,
this.user_id,
this.version,
this.comment,
undefined,
true,
this.callback
)
})
it('create the label with the current date', function () {
expect(this.db.projectHistoryLabels.insertOne).to.have.been.calledWith(
sinon.match({
project_id: new ObjectId(this.project_id),
comment: this.comment,
version: this.version,
user_id: new ObjectId(this.user_id),
created_at: this.now,
})
)
})
})
describe('with shouldValidateExists = false', function () {
beforeEach(function () {
this.createdAt = new Date(1)
this.db.projectHistoryLabels.insertOne.yields(null, {
insertedId: new ObjectId(this.label_id),
})
this.LabelsManager.createLabel(
this.project_id,
this.user_id,
this.version,
this.comment,
this.createdAt,
false,
this.callback
)
})
it('checks there is a chunk for the project + version', function () {
expect(this.HistoryStoreManager.getChunkAtVersion).to.not.have.been
.called
})
})
describe('with no userId', function () {
beforeEach(function () {
this.db.projectHistoryLabels.insertOne.yields(null, {
insertedId: new ObjectId(this.label_id),
})
const userId = undefined
this.LabelsManager.createLabel(
this.project_id,
userId,
this.version,
this.comment,
this.createdAt,
false,
this.callback
)
})
it('creates the label without user_id', function () {
expect(this.db.projectHistoryLabels.insertOne).to.have.been.calledWith(
sinon.match({
project_id: new ObjectId(this.project_id),
comment: this.comment,
version: this.version,
user_id: undefined,
created_at: this.now,
})
)
})
})
})
describe('deleteLabelForUser', function () {
beforeEach(function () {
this.db.projectHistoryLabels.deleteOne.yields()
this.LabelsManager.deleteLabelForUser(
this.project_id,
this.user_id,
this.label_id,
this.callback
)
})
it('removes the label from the database', function () {
expect(this.db.projectHistoryLabels.deleteOne).to.have.been.calledWith(
{
_id: new ObjectId(this.label_id),
project_id: new ObjectId(this.project_id),
user_id: new ObjectId(this.user_id),
},
this.callback
)
})
})
describe('deleteLabel', function () {
beforeEach(function () {
this.db.projectHistoryLabels.deleteOne.yields()
this.LabelsManager.deleteLabel(
this.project_id,
this.label_id,
this.callback
)
})
it('removes the label from the database', function () {
expect(this.db.projectHistoryLabels.deleteOne).to.have.been.calledWith(
{
_id: new ObjectId(this.label_id),
project_id: new ObjectId(this.project_id),
},
this.callback
)
})
})
})

View File

@@ -0,0 +1,422 @@
/* eslint-disable
mocha/no-nested-tests,
no-return-assign,
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS206: Consider reworking classes to avoid initClass
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import async from 'async'
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
const MODULE_PATH = '../../../../app/js/LockManager.js'
describe('LockManager', function () {
beforeEach(async function () {
let Timer
this.Settings = {
redis: {
lock: {},
},
}
this.rclient = {
auth: sinon.stub(),
del: sinon.stub().yields(),
eval: sinon.stub(),
exists: sinon.stub(),
set: sinon.stub(),
}
this.RedisWrapper = {
createClient: sinon.stub().returns(this.rclient),
}
this.Metrics = {
inc: sinon.stub(),
gauge: sinon.stub(),
Timer: (Timer = (function () {
Timer = class Timer {
static initClass() {
this.prototype.done = sinon.stub()
}
}
Timer.initClass()
return Timer
})()),
}
this.logger = {
debug: sinon.stub(),
}
this.LockManager = await esmock(MODULE_PATH, {
'@overleaf/redis-wrapper': this.RedisWrapper,
'@overleaf/settings': this.Settings,
'@overleaf/metrics': this.Metrics,
'@overleaf/logger': this.logger,
})
this.key = 'lock-key'
this.callback = sinon.stub()
this.clock = sinon.useFakeTimers()
})
afterEach(function () {
this.clock.restore()
})
describe('checkLock', function () {
describe('when the lock is taken', function () {
beforeEach(function () {
this.rclient.exists.yields(null, '1')
return this.LockManager.checkLock(this.key, this.callback)
})
it('should check the lock in redis', function () {
return this.rclient.exists.calledWith(this.key).should.equal(true)
})
return it('should return the callback with false', function () {
return this.callback.calledWith(null, false).should.equal(true)
})
})
return describe('when the lock is free', function () {
beforeEach(function () {
this.rclient.exists.yields(null, '0')
return this.LockManager.checkLock(this.key, this.callback)
})
return it('should return the callback with true', function () {
return this.callback.calledWith(null, true).should.equal(true)
})
})
})
describe('tryLock', function () {
describe('when the lock is taken', function () {
beforeEach(function () {
this.rclient.set.yields(null, null)
this.LockManager._mocks.randomLock = sinon
.stub()
.returns('locked-random-value')
return this.LockManager.tryLock(this.key, this.callback)
})
it('should check the lock in redis', function () {
return this.rclient.set.should.have.been.calledWith(
this.key,
'locked-random-value',
'EX',
this.LockManager.LOCK_TTL,
'NX'
)
})
return it('should return the callback with false', function () {
return this.callback.calledWith(null, false).should.equal(true)
})
})
return describe('when the lock is free', function () {
beforeEach(function () {
this.rclient.set.yields(null, 'OK')
return this.LockManager.tryLock(this.key, this.callback)
})
return it('should return the callback with true', function () {
return this.callback.calledWith(null, true).should.equal(true)
})
})
})
describe('deleteLock', function () {
return beforeEach(function () {
beforeEach(function () {
return this.LockManager.deleteLock(this.key, this.callback)
})
it('should delete the lock in redis', function () {
return this.rclient.del.calledWith(key).should.equal(true)
})
return it('should call the callback', function () {
return this.callback.called.should.equal(true)
})
})
})
describe('getLock', function () {
describe('when the lock is not taken', function () {
beforeEach(function (done) {
this.LockManager._mocks.tryLock = sinon.stub().yields(null, true)
return this.LockManager.getLock(this.key, (...args) => {
this.callback(...Array.from(args || []))
return done()
})
})
it('should try to get the lock', function () {
return this.LockManager._mocks.tryLock
.calledWith(this.key)
.should.equal(true)
})
it('should only need to try once', function () {
return this.LockManager._mocks.tryLock.callCount.should.equal(1)
})
return it('should return the callback', function () {
return this.callback.calledWith(null).should.equal(true)
})
})
describe('when the lock is initially set', function () {
beforeEach(function (done) {
this.LockManager._mocks.tryLock = sinon.stub()
this.LockManager._mocks.tryLock.onCall(0).yields(null, false)
this.LockManager._mocks.tryLock.onCall(1).yields(null, false)
this.LockManager._mocks.tryLock.onCall(2).yields(null, false)
this.LockManager._mocks.tryLock.onCall(3).yields(null, true)
this.LockManager.getLock(this.key, (...args) => {
this.callback(...args)
return done()
})
this.clock.runAll()
})
it('should call tryLock multiple times until free', function () {
this.LockManager._mocks.tryLock.callCount.should.equal(4)
})
return it('should return the callback', function () {
return this.callback.calledWith(null).should.equal(true)
})
})
return describe('when the lock times out', function () {
beforeEach(function (done) {
const time = Date.now()
this.LockManager._mocks.tryLock = sinon.stub().yields(null, false)
this.LockManager.getLock(this.key, (...args) => {
this.callback(...args)
return done()
})
this.clock.runAll()
})
return it('should return the callback with an error', function () {
return this.callback
.calledWith(sinon.match.instanceOf(Error))
.should.equal(true)
})
})
})
return describe('runWithLock', function () {
describe('with successful run', function () {
beforeEach(function () {
this.result = 'mock-result'
this.runner = sinon.stub().callsFake((extendLock, releaseLock) => {
return releaseLock(null, this.result)
})
this.LockManager._mocks.getLock = sinon.stub().yields()
this.LockManager._mocks.releaseLock = sinon.stub().yields()
return this.LockManager.runWithLock(
this.key,
this.runner,
this.callback
)
})
it('should get the lock', function () {
return this.LockManager._mocks.getLock
.calledWith(this.key)
.should.equal(true)
})
it('should run the passed function', function () {
return this.runner.called.should.equal(true)
})
it('should release the lock', function () {
return this.LockManager._mocks.releaseLock
.calledWith(this.key)
.should.equal(true)
})
return it('should call the callback', function () {
return this.callback.calledWith(null, this.result).should.equal(true)
})
})
describe('when the runner function returns an error', function () {
beforeEach(function () {
this.error = new Error('oops')
this.result = 'mock-result'
this.runner = sinon.stub().callsFake((extendLock, releaseLock) => {
return releaseLock(this.error, this.result)
})
this.LockManager._mocks.getLock = sinon.stub().yields()
this.LockManager._mocks.releaseLock = sinon.stub().yields()
return this.LockManager.runWithLock(
this.key,
this.runner,
this.callback
)
})
it('should release the lock', function () {
return this.LockManager._mocks.releaseLock
.calledWith(this.key)
.should.equal(true)
})
return it('should call the callback with the error', function () {
return this.callback
.calledWith(this.error, this.result)
.should.equal(true)
})
})
describe('extending the lock whilst running', function () {
beforeEach(function () {
this.lockValue = 'lock-value'
this.LockManager._mocks.getLock = sinon
.stub()
.yields(null, this.lockValue)
this.LockManager._mocks.extendLock = sinon.stub().callsArg(2)
this.LockManager._mocks.releaseLock = sinon.stub().callsArg(2)
})
it('should extend the lock if the minimum interval has been passed', function (done) {
const runner = (extendLock, releaseLock) => {
this.clock.tick(this.LockManager.MIN_LOCK_EXTENSION_INTERVAL + 1)
return extendLock(releaseLock)
}
return this.LockManager.runWithLock(this.key, runner, () => {
this.LockManager._mocks.extendLock
.calledWith(this.key, this.lockValue)
.should.equal(true)
return done()
})
})
return it('should not extend the lock if the minimum interval has not been passed', function (done) {
const runner = (extendLock, releaseLock) => {
this.clock.tick(this.LockManager.MIN_LOCK_EXTENSION_INTERVAL - 1)
return extendLock(releaseLock)
}
return this.LockManager.runWithLock(this.key, runner, () => {
this.LockManager._mocks.extendLock.callCount.should.equal(0)
return done()
})
})
})
describe('exceeding the lock ttl', function () {
beforeEach(function () {
this.lockValue = 'lock-value'
this.LockManager._mocks.getLock = sinon
.stub()
.yields(null, this.lockValue)
this.LockManager._mocks.extendLock = sinon.stub().yields()
this.LockManager._mocks.releaseLock = sinon.stub().yields()
return (this.LOCK_TTL_MS = this.LockManager.LOCK_TTL * 1000)
})
it("doesn't log if the ttl wasn't exceeded", function (done) {
const runner = (extendLock, releaseLock) => {
this.clock.tick(this.LOCK_TTL_MS - 1)
return releaseLock()
}
return this.LockManager.runWithLock(this.key, runner, () => {
this.logger.debug.callCount.should.equal(0)
return done()
})
})
it("doesn't log if the lock was extended", function (done) {
const runner = (extendLock, releaseLock) => {
this.clock.tick(this.LOCK_TTL_MS - 1)
return extendLock(() => {
this.clock.tick(2)
return releaseLock()
})
}
return this.LockManager.runWithLock(this.key, runner, () => {
this.logger.debug.callCount.should.equal(0)
return done()
})
})
return it('logs that the excecution exceeded the lock', function (done) {
const runner = (extendLock, releaseLock) => {
this.clock.tick(this.LOCK_TTL_MS + 1)
return releaseLock()
}
return this.LockManager.runWithLock(this.key, runner, () => {
const slowExecutionError = new Error('slow execution during lock')
this.logger.debug
.calledWithMatch('exceeded lock timeout', { key: this.key })
.should.equal(true)
return done()
})
})
})
return describe('releaseLock', function () {
describe('when the lock is current', function () {
beforeEach(function () {
this.rclient.eval.yields(null, 1)
return this.LockManager.releaseLock(
this.key,
this.lockValue,
this.callback
)
})
it('should clear the data from redis', function () {
return this.rclient.eval
.calledWith(
this.LockManager.UNLOCK_SCRIPT,
1,
this.key,
this.lockValue
)
.should.equal(true)
})
return it('should call the callback', function () {
return this.callback.called.should.equal(true)
})
})
return describe('when the lock has expired', function () {
beforeEach(function () {
this.rclient.eval.yields(null, 0)
return this.LockManager.releaseLock(
this.key,
this.lockValue,
this.callback
)
})
return it('should return an error if the lock has expired', function () {
return this.callback
.calledWith(
sinon.match.has('message', 'tried to release timed out lock')
)
.should.equal(true)
})
})
})
})
})

View File

@@ -0,0 +1,76 @@
import { expect } from 'chai'
import Core from 'overleaf-editor-core'
import * as OperationsCompressor from '../../../../app/js/OperationsCompressor.js'
describe('OperationsCompressor', function () {
function edit(pathname, textOperationJsonObject) {
return Core.Operation.editFile(
pathname,
Core.TextOperation.fromJSON({ textOperation: textOperationJsonObject })
)
}
it('collapses edit operations', function () {
const compressedOperations = OperationsCompressor.compressOperations([
edit('main.tex', [3, 'foo', 17]),
edit('main.tex', [10, -5, 8]),
])
expect(compressedOperations).to.have.length(1)
expect(compressedOperations[0]).to.deep.equal(
edit('main.tex', [3, 'foo', 4, -5, 8])
)
})
it('only collapses consecutive composable edit operations', function () {
const compressedOperations = OperationsCompressor.compressOperations([
edit('main.tex', [3, 'foo', 17]),
edit('main.tex', [10, -5, 8]),
edit('not-main.tex', [3, 'foo', 17]),
edit('not-main.tex', [10, -5, 8]),
])
expect(compressedOperations).to.have.length(2)
expect(compressedOperations[0]).to.deep.equal(
edit('main.tex', [3, 'foo', 4, -5, 8])
)
expect(compressedOperations[1]).to.deep.equal(
edit('not-main.tex', [3, 'foo', 4, -5, 8])
)
})
it("don't collapses text operations around non-composable operations", function () {
const compressedOperations = OperationsCompressor.compressOperations([
edit('main.tex', [3, 'foo', 17]),
Core.Operation.moveFile('main.tex', 'new-main.tex'),
edit('new-main.tex', [10, -5, 8]),
edit('new-main.tex', [6, 'bar', 12]),
])
expect(compressedOperations).to.have.length(3)
expect(compressedOperations[0]).to.deep.equal(
edit('main.tex', [3, 'foo', 17])
)
expect(compressedOperations[1].newPathname).to.deep.equal('new-main.tex')
expect(compressedOperations[2]).to.deep.equal(
edit('new-main.tex', [6, 'bar', 4, -5, 8])
)
})
it('handle empty operations', function () {
const compressedOperations = OperationsCompressor.compressOperations([])
expect(compressedOperations).to.have.length(0)
})
it('handle single operations', function () {
const compressedOperations = OperationsCompressor.compressOperations([
edit('main.tex', [3, 'foo', 17]),
])
expect(compressedOperations).to.have.length(1)
expect(compressedOperations[0]).to.deep.equal(
edit('main.tex', [3, 'foo', 17])
)
})
})

View File

@@ -0,0 +1,556 @@
import { expect } from 'chai'
import sinon from 'sinon'
import { strict as esmock } from 'esmock'
const MODULE_PATH = '../../../../app/js/RedisManager.js'
describe('RedisManager', function () {
beforeEach(async function () {
this.rclient = new FakeRedis()
this.RedisWrapper = {
createClient: sinon.stub().returns(this.rclient),
}
this.Settings = {
redis: {
project_history: {
key_schema: {
projectHistoryOps({ project_id: projectId }) {
return `Project:HistoryOps:{${projectId}}`
},
projectHistoryFirstOpTimestamp({ project_id: projectId }) {
return `ProjectHistory:FirstOpTimestamp:{${projectId}}`
},
},
},
},
}
this.Metrics = {
timing: sinon.stub(),
summary: sinon.stub(),
globalGauge: sinon.stub(),
}
this.RedisManager = await esmock(MODULE_PATH, {
'@overleaf/redis-wrapper': this.RedisWrapper,
'@overleaf/settings': this.Settings,
'@overleaf/metrics': this.Metrics,
})
this.projectId = 'project-id-123'
this.batchSize = 100
this.historyOpsKey = `Project:HistoryOps:{${this.projectId}}`
this.firstOpTimestampKey = `ProjectHistory:FirstOpTimestamp:{${this.projectId}}`
this.updates = [
{ v: 42, op: ['a', 'b', 'c', 'd'] },
{ v: 45, op: ['e', 'f', 'g', 'h'] },
]
this.extraUpdates = [{ v: 100, op: ['i', 'j', 'k'] }]
this.rawUpdates = this.updates.map(update => JSON.stringify(update))
this.extraRawUpdates = this.extraUpdates.map(update =>
JSON.stringify(update)
)
})
describe('getRawUpdatesBatch', function () {
it('gets a small number of updates in one batch', async function () {
const updates = makeUpdates(2)
const rawUpdates = makeRawUpdates(updates)
this.rclient.setList(this.historyOpsKey, rawUpdates)
const result = await this.RedisManager.promises.getRawUpdatesBatch(
this.projectId,
100
)
expect(result).to.deep.equal({ rawUpdates, hasMore: false })
})
it('gets a larger number of updates in several batches', async function () {
const updates = makeUpdates(
this.RedisManager.RAW_UPDATES_BATCH_SIZE * 2 + 12
)
const rawUpdates = makeRawUpdates(updates)
this.rclient.setList(this.historyOpsKey, rawUpdates)
const result = await this.RedisManager.promises.getRawUpdatesBatch(
this.projectId,
5000
)
expect(result).to.deep.equal({ rawUpdates, hasMore: false })
})
it("doesn't return more than the number of updates requested", async function () {
const updates = makeUpdates(100)
const rawUpdates = makeRawUpdates(updates)
this.rclient.setList(this.historyOpsKey, rawUpdates)
const result = await this.RedisManager.promises.getRawUpdatesBatch(
this.projectId,
75
)
expect(result).to.deep.equal({
rawUpdates: rawUpdates.slice(0, 75),
hasMore: true,
})
})
})
describe('parseDocUpdates', function () {
it('should return the parsed ops', function () {
const updates = makeUpdates(12)
const rawUpdates = makeRawUpdates(updates)
this.RedisManager.parseDocUpdates(rawUpdates).should.deep.equal(updates)
})
})
describe('getUpdatesInBatches', function () {
beforeEach(function () {
this.runner = sinon.stub().resolves()
})
describe('single batch smaller than batch size', function () {
beforeEach(async function () {
this.updates = makeUpdates(2)
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
3,
this.runner
)
})
it('calls the runner once', function () {
this.runner.callCount.should.equal(1)
})
it('calls the runner with the updates', function () {
this.runner.should.have.been.calledWith(this.updates)
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
it('deletes the first op timestamp', function () {
expect(this.rclient.del).to.have.been.calledWith(
this.firstOpTimestampKey
)
})
})
describe('single batch at batch size', function () {
beforeEach(async function () {
this.updates = makeUpdates(123)
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
123,
this.runner
)
})
it('calls the runner once', function () {
this.runner.callCount.should.equal(1)
})
it('calls the runner with the updates', function () {
this.runner.should.have.been.calledWith(this.updates)
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
it('deletes the first op timestamp', function () {
expect(this.rclient.del).to.have.been.calledWith(
this.firstOpTimestampKey
)
})
})
describe('single batch exceeding size limit on updates', function () {
beforeEach(async function () {
this.updates = makeUpdates(2, [
'x'.repeat(this.RedisManager.RAW_UPDATE_SIZE_THRESHOLD),
])
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
123,
this.runner
)
})
it('calls the runner twice', function () {
this.runner.callCount.should.equal(2)
})
it('calls the runner with the first update', function () {
this.runner
.getCall(0)
.should.have.been.calledWith(this.updates.slice(0, 1))
})
it('calls the runner with the second update', function () {
this.runner
.getCall(1)
.should.have.been.calledWith(this.updates.slice(1))
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
})
describe('two batches with first update below and second update above the size limit on updates', function () {
beforeEach(async function () {
this.updates = makeUpdates(2, [
'x'.repeat(this.RedisManager.RAW_UPDATE_SIZE_THRESHOLD / 2),
])
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
123,
this.runner
)
})
it('calls the runner twice', function () {
this.runner.callCount.should.equal(2)
})
it('calls the runner with the first update', function () {
this.runner
.getCall(0)
.should.have.been.calledWith(this.updates.slice(0, 1))
})
it('calls the runner with the second update', function () {
this.runner
.getCall(1)
.should.have.been.calledWith(this.updates.slice(1))
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
})
describe('single batch exceeding op count limit on updates', function () {
beforeEach(async function () {
const ops = Array(this.RedisManager.MAX_UPDATE_OP_LENGTH + 1).fill('op')
this.updates = makeUpdates(2, { op: ops })
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
123,
this.runner
)
})
it('calls the runner twice', function () {
this.runner.callCount.should.equal(2)
})
it('calls the runner with the first update', function () {
this.runner
.getCall(0)
.should.have.been.calledWith(this.updates.slice(0, 1))
})
it('calls the runner with the second update', function () {
this.runner
.getCall(1)
.should.have.been.calledWith(this.updates.slice(1))
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
})
describe('single batch exceeding doc content count', function () {
beforeEach(async function () {
this.updates = makeUpdates(
this.RedisManager.MAX_NEW_DOC_CONTENT_COUNT + 3,
{ resyncDocContent: 123 }
)
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
123,
this.runner
)
})
it('calls the runner twice', function () {
this.runner.callCount.should.equal(2)
})
it('calls the runner with the first batch of updates', function () {
this.runner.should.have.been.calledWith(
this.updates.slice(0, this.RedisManager.MAX_NEW_DOC_CONTENT_COUNT)
)
})
it('calls the runner with the second batch of updates', function () {
this.runner.should.have.been.calledWith(
this.updates.slice(this.RedisManager.MAX_NEW_DOC_CONTENT_COUNT)
)
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
})
describe('two batches with first update below and second update above the ops length limit on updates', function () {
beforeEach(async function () {
// set the threshold below the size of the first update
this.updates = makeUpdates(2, { op: ['op1', 'op2'] })
this.updates[1].op = Array(
this.RedisManager.MAX_UPDATE_OP_LENGTH + 2
).fill('op')
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
123,
this.runner
)
})
it('calls the runner twice', function () {
this.runner.callCount.should.equal(2)
})
it('calls the runner with the first update', function () {
this.runner.should.have.been.calledWith(this.updates.slice(0, 1))
})
it('calls the runner with the second update', function () {
this.runner.should.have.been.calledWith(this.updates.slice(1))
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
})
describe('two batches, one partial', function () {
beforeEach(async function () {
this.updates = makeUpdates(15)
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
10,
this.runner
)
})
it('calls the runner twice', function () {
this.runner.callCount.should.equal(2)
})
it('calls the runner with the updates', function () {
this.runner
.getCall(0)
.should.have.been.calledWith(this.updates.slice(0, 10))
this.runner
.getCall(1)
.should.have.been.calledWith(this.updates.slice(10))
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
})
describe('two full batches', function () {
beforeEach(async function () {
this.updates = makeUpdates(20)
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
10,
this.runner
)
})
it('calls the runner twice', function () {
this.runner.callCount.should.equal(2)
})
it('calls the runner with the updates', function () {
this.runner
.getCall(0)
.should.have.been.calledWith(this.updates.slice(0, 10))
this.runner
.getCall(1)
.should.have.been.calledWith(this.updates.slice(10))
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
})
describe('three full bathches, bigger than the Redis read batch size', function () {
beforeEach(async function () {
this.batchSize = this.RedisManager.RAW_UPDATES_BATCH_SIZE * 2
this.updates = makeUpdates(this.batchSize * 3)
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
await this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
this.batchSize,
this.runner
)
})
it('calls the runner twice', function () {
this.runner.callCount.should.equal(3)
})
it('calls the runner with the updates', function () {
this.runner
.getCall(0)
.should.have.been.calledWith(this.updates.slice(0, this.batchSize))
this.runner
.getCall(1)
.should.have.been.calledWith(
this.updates.slice(this.batchSize, this.batchSize * 2)
)
this.runner
.getCall(2)
.should.have.been.calledWith(this.updates.slice(this.batchSize * 2))
})
it('deletes the applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([])
})
})
describe('error when first reading updates', function () {
beforeEach(async function () {
this.updates = makeUpdates(10)
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
this.rclient.throwErrorOnLrangeCall(0)
await expect(
this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
2,
this.runner
)
).to.be.rejected
})
it('does not delete any updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal(
this.rawUpdates
)
})
})
describe('error when reading updates for a second batch', function () {
beforeEach(async function () {
this.batchSize = this.RedisManager.RAW_UPDATES_BATCH_SIZE - 1
this.updates = makeUpdates(this.RedisManager.RAW_UPDATES_BATCH_SIZE * 2)
this.rawUpdates = makeRawUpdates(this.updates)
this.rclient.setList(this.historyOpsKey, this.rawUpdates)
this.rclient.throwErrorOnLrangeCall(1)
await expect(
this.RedisManager.promises.getUpdatesInBatches(
this.projectId,
this.batchSize,
this.runner
)
).to.be.rejected
})
it('calls the runner with the first batch of updates', function () {
this.runner.should.have.been.calledOnce
this.runner
.getCall(0)
.should.have.been.calledWith(this.updates.slice(0, this.batchSize))
})
it('deletes only the first batch of applied updates', function () {
expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal(
this.rawUpdates.slice(this.batchSize)
)
})
})
})
})
class FakeRedis {
constructor() {
this.data = new Map()
this.del = sinon.stub()
this.lrangeCallCount = -1
}
setList(key, list) {
this.data.set(key, list)
}
getList(key) {
return this.data.get(key)
}
throwErrorOnLrangeCall(callNum) {
this.lrangeCallThrowingError = callNum
}
async lrange(key, start, stop) {
this.lrangeCallCount += 1
if (
this.lrangeCallThrowingError != null &&
this.lrangeCallThrowingError === this.lrangeCallCount
) {
throw new Error('LRANGE failed!')
}
const list = this.data.get(key) ?? []
return list.slice(start, stop + 1)
}
async lrem(key, count, elementToRemove) {
expect(count).to.be.greaterThan(0)
const original = this.data.get(key) ?? []
const filtered = original.filter(element => {
if (count > 0 && element === elementToRemove) {
count--
return false
}
return true
})
this.data.set(key, filtered)
}
async exec() {
// Nothing to do
}
multi() {
return this
}
}
function makeUpdates(updateCount, extraFields = {}) {
const updates = []
for (let i = 0; i < updateCount; i++) {
updates.push({ v: i, ...extraFields })
}
return updates
}
function makeRawUpdates(updates) {
return updates.map(JSON.stringify)
}

View File

@@ -0,0 +1,145 @@
import sinon from 'sinon'
import { expect } from 'chai'
import mongodb from 'mongodb-legacy'
import { strict as esmock } from 'esmock'
const { ObjectId } = mongodb
const MODULE_PATH = '../../../../app/js/RetryManager.js'
describe('RetryManager', function () {
beforeEach(async function () {
this.projectId1 = new ObjectId().toString()
this.projectId2 = new ObjectId().toString()
this.projectId3 = new ObjectId().toString()
this.projectId4 = new ObjectId().toString()
this.historyId = 12345
this.WebApiManager = {
promises: {
getHistoryId: sinon.stub().resolves(this.historyId),
},
}
this.RedisManager = {
promises: {
countUnprocessedUpdates: sinon.stub().resolves(0),
},
}
this.ErrorRecorder = {
promises: {
getFailedProjects: sinon.stub().resolves([
{
project_id: this.projectId1,
error: 'Error: Timeout',
attempts: 1,
},
{
project_id: this.projectId2,
error: 'Error: Timeout',
attempts: 25,
},
{
project_id: this.projectId3,
error: 'sync ongoing',
attempts: 10,
resyncAttempts: 1,
},
{
project_id: this.projectId4,
error: 'sync ongoing',
attempts: 10,
resyncAttempts: 2,
},
]),
getFailureRecord: sinon.stub().resolves(),
},
}
this.SyncManager = {
promises: {
startResync: sinon.stub().resolves(),
startHardResync: sinon.stub().resolves(),
},
}
this.UpdatesProcessor = {
promises: {
processUpdatesForProject: sinon.stub().resolves(),
},
}
this.settings = {
redis: {
lock: {
key_schema: {
projectHistoryLock({ projectId }) {
return `ProjectHistoryLock:${projectId}`
},
},
},
},
}
this.request = {}
this.RetryManager = await esmock(MODULE_PATH, {
'../../../../app/js/WebApiManager.js': this.WebApiManager,
'../../../../app/js/RedisManager.js': this.RedisManager,
'../../../../app/js/ErrorRecorder.js': this.ErrorRecorder,
'../../../../app/js/SyncManager.js': this.SyncManager,
'../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor,
'@overleaf/settings': this.settings,
request: this.request,
})
})
describe('RetryManager', function () {
describe('for a soft failure', function () {
beforeEach(async function () {
await this.RetryManager.promises.retryFailures({ failureType: 'soft' })
})
it('should flush the queue', function () {
expect(
this.UpdatesProcessor.promises.processUpdatesForProject
).to.have.been.calledWith(this.projectId1)
})
})
describe('for a hard failure', function () {
beforeEach(async function () {
await this.RetryManager.promises.retryFailures({ failureType: 'hard' })
})
it('should check the overleaf project id', function () {
expect(
this.WebApiManager.promises.getHistoryId
).to.have.been.calledWith(this.projectId2)
})
it("should start a soft resync when a resync hasn't been tried yet", function () {
expect(this.SyncManager.promises.startResync).to.have.been.calledWith(
this.projectId2
)
})
it('should start a hard resync when a resync has already been tried', function () {
expect(
this.SyncManager.promises.startHardResync
).to.have.been.calledWith(this.projectId3)
})
it("shouldn't try a resync after a hard resync attempt failed", function () {
expect(
this.SyncManager.promises.startHardResync
).not.to.have.been.calledWith(this.projectId4)
})
it('should count the unprocessed updates', function () {
expect(
this.RedisManager.promises.countUnprocessedUpdates
).to.have.been.calledWith(this.projectId2)
})
it('should check the failure record', function () {
expect(
this.ErrorRecorder.promises.getFailureRecord
).to.have.been.calledWith(this.projectId2)
})
})
})
})

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,874 @@
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
const MODULE_PATH = '../../../../app/js/SummarizedUpdatesManager.js'
// A sufficiently large amount of time to make the algorithm process updates
// separately
const LATER = 1000000
describe('SummarizedUpdatesManager', function () {
beforeEach(async function () {
this.historyId = 'history-id-123'
this.projectId = 'project-id-123'
this.firstChunk = { chunk: { startVersion: 0 } }
this.secondChunk = { chunk: { startVersion: 1 } }
this.ChunkTranslator = {
convertToSummarizedUpdates: sinon.stub(),
}
this.HistoryApiManager = {
shouldUseProjectHistory: sinon.stub().yields(null, true),
}
this.HistoryStoreManager = {
getMostRecentChunk: sinon.stub(),
getChunkAtVersion: sinon.stub(),
}
this.UpdatesProcessor = {
processUpdatesForProject: sinon.stub().withArgs(this.projectId).yields(),
}
this.WebApiManager = {
getHistoryId: sinon.stub().yields(null, this.historyId),
}
this.LabelsManager = {
getLabels: sinon.stub().yields(null, []),
}
this.SummarizedUpdatesManager = await esmock(MODULE_PATH, {
'../../../../app/js/ChunkTranslator.js': this.ChunkTranslator,
'../../../../app/js/HistoryApiManager.js': this.HistoryApiManager,
'../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager,
'../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor,
'../../../../app/js/WebApiManager.js': this.WebApiManager,
'../../../../app/js/LabelsManager.js': this.LabelsManager,
})
this.callback = sinon.stub()
})
describe('getSummarizedProjectUpdates', function () {
describe('chunk management', function () {
describe('when there is a single empty chunk', function () {
setupChunks([[]])
expectSummaries('returns an empty list of updates', {}, [])
})
describe('when there is a single non-empty chunk', function () {
setupChunks([[makeUpdate()]])
expectSummaries('returns summarized updates', {}, [makeSummary()])
})
describe('when there are multiple chunks', function () {
setupChunks([
[makeUpdate({ startTs: 0, v: 1 })],
[makeUpdate({ startTs: LATER, v: 2 })],
])
describe('and requesting many summaries', function () {
expectSummaries('returns many update summaries', {}, [
makeSummary({ startTs: LATER, fromV: 2 }),
makeSummary({ startTs: 0, fromV: 1 }),
])
})
describe('and requesting a single summary', function () {
expectSummaries('returns a single update summary', { min_count: 1 }, [
makeSummary({ startTs: LATER, fromV: 2 }),
])
})
})
describe('when there are too many chunks', function () {
// Set up 10 chunks
const chunks = []
for (let v = 1; v <= 10; v++) {
chunks.push([
makeUpdate({
startTs: v * 100, // values: 100 - 1000
v, // values: 1 - 10
}),
])
}
setupChunks(chunks)
// Verify that we stop summarizing after 5 chunks
expectSummaries('summarizes the 5 latest chunks', {}, [
makeSummary({ startTs: 600, endTs: 1010, fromV: 6, toV: 11 }),
])
})
describe('when requesting updates before a specific version', function () {
// Chunk 1 contains 5 updates that were made close to each other and 5
// other updates that were made later.
const chunk1 = []
for (let v = 1; v <= 5; v++) {
chunk1.push(
makeUpdate({
startTs: v * 100, // values: 100 - 500
v, // values: 1 - 5
})
)
}
for (let v = 6; v <= 10; v++) {
chunk1.push(
makeUpdate({
startTs: LATER + v * 100, // values: 1000600 - 1001000
v, // values: 6 - 10
})
)
}
// Chunk 2 contains 5 updates that were made close to the latest updates in
// chunk 1.
const chunk2 = []
for (let v = 11; v <= 15; v++) {
chunk2.push(
makeUpdate({
startTs: LATER + v * 100, // values: 1001100 - 1001500
v, // values: 11 - 15
})
)
}
setupChunks([chunk1, chunk2])
expectSummaries(
'summarizes the updates in a single chunk if the chunk is sufficient',
{ before: 14, min_count: 1 },
[
makeSummary({
startTs: LATER + 1100,
endTs: LATER + 1310,
fromV: 11,
toV: 14,
}),
]
)
expectSummaries(
'summarizes the updates in many chunks otherwise',
{ before: 14, min_count: 2 },
[
makeSummary({
startTs: LATER + 600,
endTs: LATER + 1310,
fromV: 6,
toV: 14,
}),
makeSummary({
startTs: 100,
endTs: 510,
fromV: 1,
toV: 6,
}),
]
)
})
})
describe('update summarization', function () {
describe('updates that are close in time', function () {
setupChunks([
[
makeUpdate({
users: ['user1'],
startTs: 0,
v: 4,
}),
makeUpdate({
users: ['user2'],
startTs: 20,
v: 5,
}),
],
])
expectSummaries('should merge the updates', {}, [
makeSummary({
users: ['user1', 'user2'],
startTs: 0,
endTs: 30,
fromV: 4,
toV: 6,
}),
])
})
describe('updates that are far apart in time', function () {
setupChunks([
[
makeUpdate({
users: ['user1'],
startTs: 100,
v: 4,
}),
makeUpdate({
users: ['user2'],
startTs: LATER,
v: 5,
}),
],
])
expectSummaries('should not merge the updates', {}, [
makeSummary({
users: ['user2'],
startTs: LATER,
endTs: LATER + 10,
fromV: 5,
toV: 6,
}),
makeSummary({
users: ['user1'],
startTs: 100,
endTs: 110,
fromV: 4,
toV: 5,
}),
])
})
describe('mergeable updates in different chunks', function () {
setupChunks([
[
makeUpdate({
pathnames: ['main.tex'],
users: ['user1'],
startTs: 10,
v: 4,
}),
makeUpdate({
pathnames: ['main.tex'],
users: ['user2'],
startTs: 30,
v: 5,
}),
],
[
makeUpdate({
pathnames: ['chapter.tex'],
users: ['user1'],
startTs: 40,
v: 6,
}),
makeUpdate({
pathnames: ['chapter.tex'],
users: ['user1'],
startTs: 50,
v: 7,
}),
],
])
expectSummaries('should merge the updates', {}, [
makeSummary({
pathnames: ['main.tex', 'chapter.tex'],
users: ['user1', 'user2'],
startTs: 10,
endTs: 60,
fromV: 4,
toV: 8,
}),
])
})
describe('null user values after regular users', function () {
setupChunks([
[
makeUpdate({
users: ['user1'],
startTs: 0,
v: 4,
}),
makeUpdate({
users: [null],
startTs: 20,
v: 5,
}),
],
])
expectSummaries('should include the null values', {}, [
makeSummary({
users: [null, 'user1'],
startTs: 0,
endTs: 30,
fromV: 4,
toV: 6,
}),
])
})
describe('null user values before regular users', function () {
setupChunks([
[
makeUpdate({
users: [null],
startTs: 0,
v: 4,
}),
makeUpdate({
users: ['user1'],
startTs: 20,
v: 5,
}),
],
])
expectSummaries('should include the null values', {}, [
makeSummary({
users: [null, 'user1'],
startTs: 0,
endTs: 30,
fromV: 4,
toV: 6,
}),
])
})
describe('multiple null user values', function () {
setupChunks([
[
makeUpdate({
users: ['user1'],
startTs: 10,
v: 4,
}),
makeUpdate({
users: [null],
startTs: 20,
v: 5,
}),
makeUpdate({
users: [null],
startTs: 70,
v: 6,
}),
],
])
expectSummaries('should merge the null values', {}, [
makeSummary({
users: [null, 'user1'],
startTs: 10,
endTs: 80,
fromV: 4,
toV: 7,
}),
])
})
describe('multiple users', function () {
setupChunks([
[
makeUpdate({
users: ['user1'],
startTs: 0,
v: 4,
}),
makeUpdate({
users: ['user2'],
startTs: 20,
v: 5,
}),
],
])
expectSummaries('should merge the users', {}, [
makeSummary({
users: ['user1', 'user2'],
startTs: 0,
endTs: 30,
fromV: 4,
toV: 6,
}),
])
})
describe('duplicate updates with the same v1 user', function () {
setupChunks([
[
makeUpdate({
users: [{ id: 'user1' }],
startTs: 0,
v: 4,
}),
makeUpdate({
users: [{ id: 'user1' }],
startTs: 20,
v: 5,
}),
],
])
expectSummaries('should deduplicate the users', {}, [
makeSummary({
users: [{ id: 'user1' }],
startTs: 0,
endTs: 30,
fromV: 4,
toV: 6,
}),
])
})
describe('duplicate updates with the same v2 user', function () {
setupChunks([
[
makeUpdate({
users: ['user1'],
startTs: 0,
v: 4,
}),
makeUpdate({
users: ['user1'],
startTs: 20,
v: 5,
}),
],
])
expectSummaries('should deduplicate the users', {}, [
makeSummary({
users: ['user1'],
startTs: 0,
endTs: 30,
fromV: 4,
toV: 6,
}),
])
})
describe('mixed v1 and v2 users with the same id', function () {
setupChunks([
[
makeUpdate({
users: ['user1'],
startTs: 0,
v: 4,
}),
makeUpdate({
users: [{ id: 'user1' }],
startTs: 20,
v: 5,
}),
],
])
expectSummaries('should deduplicate the users', {}, [
makeSummary({
users: [{ id: 'user1' }],
startTs: 0,
endTs: 30,
fromV: 4,
toV: 6,
}),
])
})
describe('project ops in mergeable updates', function () {
setupChunks([
[
makeUpdate({
pathnames: [],
projectOps: [
{ rename: { pathname: 'C.tex', newPathname: 'D.tex' } },
],
users: ['user2'],
startTs: 0,
v: 4,
}),
makeUpdate({
pathnames: [],
projectOps: [
{ rename: { pathname: 'A.tex', newPathname: 'B.tex' } },
],
users: ['user1'],
startTs: 20,
v: 5,
}),
],
])
expectSummaries('should merge project ops', {}, [
makeSummary({
pathnames: [],
projectOps: [
{
atV: 5,
rename: {
pathname: 'A.tex',
newPathname: 'B.tex',
},
},
{
atV: 4,
rename: {
pathname: 'C.tex',
newPathname: 'D.tex',
},
},
],
users: ['user1', 'user2'],
startTs: 0,
endTs: 30,
fromV: 4,
toV: 6,
}),
])
})
describe('mergable updates with a mix of project ops and doc ops', function () {
setupChunks([
[
makeUpdate({
pathnames: ['main.tex'],
users: ['user1'],
startTs: 0,
v: 4,
}),
makeUpdate({
pathnames: [],
users: ['user2'],
projectOps: [
{ rename: { pathname: 'A.tex', newPathname: 'B.tex' } },
],
startTs: 20,
v: 5,
}),
makeUpdate({
pathnames: ['chapter.tex'],
users: ['user2'],
startTs: 40,
v: 6,
}),
],
])
expectSummaries('should keep updates separate', {}, [
makeSummary({
pathnames: ['chapter.tex'],
users: ['user2'],
startTs: 40,
fromV: 6,
}),
makeSummary({
pathnames: [],
users: ['user2'],
projectOps: [
{ atV: 5, rename: { pathname: 'A.tex', newPathname: 'B.tex' } },
],
startTs: 20,
fromV: 5,
}),
makeSummary({
pathnames: ['main.tex'],
users: ['user1'],
startTs: 0,
fromV: 4,
}),
])
})
describe('label on an update', function () {
const label = {
id: 'mock-id',
comment: 'an example comment',
version: 5,
}
setupChunks([
[
makeUpdate({ startTs: 0, v: 3 }),
makeUpdate({ startTs: 20, v: 4 }),
makeUpdate({ startTs: 40, v: 5 }),
makeUpdate({ startTs: 60, v: 6 }),
],
])
setupLabels([label])
expectSummaries('should split the updates at the label', {}, [
makeSummary({ startTs: 40, endTs: 70, fromV: 5, toV: 7 }),
makeSummary({
startTs: 0,
endTs: 30,
fromV: 3,
toV: 5,
labels: [label],
}),
])
})
describe('updates with origin', function () {
setupChunks([
[
makeUpdate({ startTs: 0, v: 1 }),
makeUpdate({ startTs: 10, v: 2 }),
makeUpdate({
startTs: 20,
v: 3,
origin: { kind: 'history-resync' },
}),
makeUpdate({
startTs: 30,
v: 4,
origin: { kind: 'history-resync' },
}),
makeUpdate({ startTs: 40, v: 5 }),
makeUpdate({ startTs: 50, v: 6 }),
],
])
expectSummaries(
'should split the updates where the origin appears or disappears',
{},
[
makeSummary({ startTs: 40, endTs: 60, fromV: 5, toV: 7 }),
makeSummary({
startTs: 20,
endTs: 40,
fromV: 3,
toV: 5,
origin: { kind: 'history-resync' },
}),
makeSummary({ startTs: 0, endTs: 20, fromV: 1, toV: 3 }),
]
)
})
describe('updates with different origins', function () {
setupChunks([
[
makeUpdate({ startTs: 0, v: 1, origin: { kind: 'origin-a' } }),
makeUpdate({ startTs: 10, v: 2, origin: { kind: 'origin-a' } }),
makeUpdate({ startTs: 20, v: 3, origin: { kind: 'origin-b' } }),
makeUpdate({ startTs: 30, v: 4, origin: { kind: 'origin-b' } }),
],
])
expectSummaries(
'should split the updates when the origin kind changes',
{},
[
makeSummary({
startTs: 20,
endTs: 40,
fromV: 3,
toV: 5,
origin: { kind: 'origin-b' },
}),
makeSummary({
startTs: 0,
endTs: 20,
fromV: 1,
toV: 3,
origin: { kind: 'origin-a' },
}),
]
)
})
describe('empty updates', function () {
setupChunks([
[
makeUpdate({ startTs: 0, v: 1, pathnames: ['main.tex'] }),
makeUpdate({ startTs: 10, v: 2, pathnames: [] }),
makeUpdate({ startTs: 20, v: 3, pathnames: ['main.tex'] }),
makeUpdate({ startTs: 30, v: 4, pathnames: [] }),
makeUpdate({ startTs: 40, v: 5, pathnames: [] }),
],
[
makeUpdate({ startTs: 50, v: 6, pathnames: [] }),
makeUpdate({ startTs: LATER, v: 7, pathnames: [] }),
makeUpdate({ startTs: LATER + 10, v: 8, pathnames: ['main.tex'] }),
makeUpdate({ startTs: LATER + 20, v: 9, pathnames: ['main.tex'] }),
makeUpdate({ startTs: LATER + 30, v: 10, pathnames: [] }),
],
])
expectSummaries('should skip empty updates', {}, [
makeSummary({
startTs: LATER + 10,
endTs: LATER + 30,
fromV: 8,
toV: 11,
}),
makeSummary({ startTs: 0, endTs: 30, fromV: 1, toV: 8 }),
])
})
describe('history resync updates', function () {
setupChunks([
[
makeUpdate({
startTs: 0,
v: 1,
origin: { kind: 'history-resync' },
projectOps: [{ add: { pathname: 'file1.tex' } }],
pathnames: [],
}),
makeUpdate({
startTs: 20,
v: 2,
origin: { kind: 'history-resync' },
projectOps: [
{ add: { pathname: 'file2.tex' } },
{ add: { pathname: 'file3.tex' } },
],
pathnames: [],
}),
makeUpdate({
startTs: 40,
v: 3,
origin: { kind: 'history-resync' },
projectOps: [{ add: { pathname: 'file4.tex' } }],
pathnames: [],
}),
makeUpdate({
startTs: 60,
v: 4,
origin: { kind: 'history-resync' },
projectOps: [],
pathnames: ['file1.tex', 'file2.tex', 'file5.tex'],
}),
makeUpdate({
startTs: 80,
v: 5,
origin: { kind: 'history-resync' },
projectOps: [],
pathnames: ['file4.tex'],
}),
makeUpdate({ startTs: 100, v: 6, pathnames: ['file1.tex'] }),
],
])
expectSummaries('should merge creates and edits', {}, [
makeSummary({
startTs: 100,
endTs: 110,
fromV: 6,
toV: 7,
pathnames: ['file1.tex'],
}),
makeSummary({
startTs: 0,
endTs: 90,
fromV: 1,
toV: 6,
origin: { kind: 'history-resync' },
pathnames: ['file5.tex'],
projectOps: [
{ add: { pathname: 'file4.tex' }, atV: 3 },
{ add: { pathname: 'file2.tex' }, atV: 2 },
{ add: { pathname: 'file3.tex' }, atV: 2 },
{ add: { pathname: 'file1.tex' }, atV: 1 },
],
}),
])
})
})
})
})
/**
* Set up mocks as if the project had a number of chunks.
*
* Each parameter represents a chunk and the value of the parameter is the list
* of updates in that chunk.
*/
function setupChunks(updatesByChunk) {
beforeEach('set up chunks', function () {
let startVersion = 0
for (let i = 0; i < updatesByChunk.length; i++) {
const updates = updatesByChunk[i]
const chunk = { chunk: { startVersion } }
// Find the chunk by any update version
for (const update of updates) {
this.HistoryStoreManager.getChunkAtVersion
.withArgs(this.projectId, this.historyId, update.v)
.yields(null, chunk)
startVersion = update.v
}
if (i === updatesByChunk.length - 1) {
this.HistoryStoreManager.getMostRecentChunk
.withArgs(this.projectId, this.historyId)
.yields(null, chunk)
}
this.ChunkTranslator.convertToSummarizedUpdates
.withArgs(chunk)
.yields(null, updates)
}
})
}
function setupLabels(labels) {
beforeEach('set up labels', function () {
this.LabelsManager.getLabels.withArgs(this.projectId).yields(null, labels)
})
}
function expectSummaries(description, options, expectedSummaries) {
it(`${description}`, function (done) {
this.SummarizedUpdatesManager.getSummarizedProjectUpdates(
this.projectId,
options,
(err, summaries) => {
if (err) {
return done(err)
}
// The order of the users array is not significant
for (const summary of summaries) {
summary.meta.users.sort()
}
for (const summary of expectedSummaries) {
summary.meta.users.sort()
}
expect(summaries).to.deep.equal(expectedSummaries)
done()
}
)
})
}
function makeUpdate(options = {}) {
const {
pathnames = ['main.tex'],
users = ['user1'],
projectOps = [],
startTs = 0,
endTs = startTs + 10,
v = 1,
origin,
} = options
const update = {
pathnames,
project_ops: projectOps,
meta: { users, start_ts: startTs, end_ts: endTs },
v,
}
if (origin) {
update.meta.origin = origin
}
return update
}
function makeSummary(options = {}) {
const {
pathnames = ['main.tex'],
users = ['user1'],
startTs = 0,
endTs = startTs + 10,
fromV = 1,
toV = fromV + 1,
labels = [],
projectOps = [],
origin,
} = options
const summary = {
pathnames: new Set(pathnames),
meta: {
users,
start_ts: startTs,
end_ts: endTs,
},
fromV,
toV,
labels,
project_ops: projectOps,
}
if (origin) {
summary.meta.origin = origin
}
return summary
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,552 @@
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
import * as Errors from '../../../../app/js/Errors.js'
const MODULE_PATH = '../../../../app/js/UpdatesProcessor.js'
describe('UpdatesProcessor', function () {
before(async function () {
this.extendLock = sinon.stub()
this.BlobManager = {
createBlobsForUpdates: sinon.stub(),
}
this.HistoryStoreManager = {
getMostRecentVersion: sinon.stub(),
sendChanges: sinon.stub().yields(),
}
this.LockManager = {
runWithLock: sinon.spy((key, runner, callback) =>
runner(this.extendLock, callback)
),
}
this.RedisManager = {}
this.UpdateCompressor = {
compressRawUpdates: sinon.stub(),
}
this.UpdateTranslator = {
convertToChanges: sinon.stub(),
isProjectStructureUpdate: sinon.stub(),
isTextUpdate: sinon.stub(),
}
this.WebApiManager = {
getHistoryId: sinon.stub(),
}
this.SyncManager = {
expandSyncUpdates: sinon.stub(),
setResyncState: sinon.stub().yields(),
skipUpdatesDuringSync: sinon.stub(),
}
this.ErrorRecorder = {
getLastFailure: sinon.stub(),
record: sinon.stub().yields(null, { attempts: 1 }),
}
this.RetryManager = {
isFirstFailure: sinon.stub().returns(true),
isHardFailure: sinon.stub().returns(false),
}
this.Profiler = {
Profiler: class {
log() {
return this
}
wrap(label, cb) {
return cb
}
getTimeDelta() {
return 0
}
end() {
return 0
}
},
}
this.Metrics = {
gauge: sinon.stub(),
inc: sinon.stub(),
timing: sinon.stub(),
}
this.Settings = {
redis: {
lock: {
key_schema: {
projectHistoryLock({ project_id: projectId }) {
return `ProjectHistoryLock:${projectId}`
},
},
},
},
}
this.UpdatesProcessor = await esmock(MODULE_PATH, {
'../../../../app/js/BlobManager.js': this.BlobManager,
'../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager,
'../../../../app/js/LockManager.js': this.LockManager,
'../../../../app/js/RedisManager.js': this.RedisManager,
'../../../../app/js/UpdateCompressor.js': this.UpdateCompressor,
'../../../../app/js/UpdateTranslator.js': this.UpdateTranslator,
'../../../../app/js/WebApiManager.js': this.WebApiManager,
'../../../../app/js/SyncManager.js': this.SyncManager,
'../../../../app/js/ErrorRecorder.js': this.ErrorRecorder,
'../../../../app/js/Profiler.js': this.Profiler,
'../../../../app/js/RetryManager.js': this.RetryManager,
'../../../../app/js/Errors.js': Errors,
'@overleaf/metrics': this.Metrics,
'@overleaf/settings': this.Settings,
})
this.doc_id = 'doc-id-123'
this.project_id = 'project-id-123'
this.ol_project_id = 'ol-project-id-234'
this.callback = sinon.stub()
this.temporary = 'temp-mock'
})
describe('processUpdatesForProject', function () {
beforeEach(function () {
this.error = new Error('error')
this.queueSize = 445
this.UpdatesProcessor._mocks._countAndProcessUpdates = sinon
.stub()
.callsArgWith(3, this.error, this.queueSize)
})
describe('when there is no existing error', function () {
beforeEach(function (done) {
this.ErrorRecorder.getLastFailure.yields()
this.UpdatesProcessor.processUpdatesForProject(this.project_id, err => {
expect(err).to.equal(this.error)
done()
})
})
it('processes updates', function () {
this.UpdatesProcessor._mocks._countAndProcessUpdates
.calledWith(this.project_id)
.should.equal(true)
})
it('records errors', function () {
this.ErrorRecorder.record
.calledWith(this.project_id, this.queueSize, this.error)
.should.equal(true)
})
})
})
describe('_getHistoryId', function () {
describe('projectHistoryId is not present', function () {
beforeEach(function () {
this.updates = [
{ p: 0, i: 'a' },
{ p: 1, i: 's' },
]
this.WebApiManager.getHistoryId.yields(null)
})
it('returns null', function (done) {
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
(error, projectHistoryId) => {
expect(error).to.be.null
expect(projectHistoryId).to.be.null
done()
}
)
})
})
describe('projectHistoryId is not present in updates', function () {
beforeEach(function () {
this.updates = [
{ p: 0, i: 'a' },
{ p: 1, i: 's' },
]
})
it('returns the id from web', function (done) {
this.projectHistoryId = '1234'
this.WebApiManager.getHistoryId.yields(null, this.projectHistoryId)
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
(error, projectHistoryId) => {
expect(error).to.be.null
expect(projectHistoryId).equal(this.projectHistoryId)
done()
}
)
})
it('returns errors from web', function (done) {
this.error = new Error('oh no!')
this.WebApiManager.getHistoryId.yields(this.error)
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
error => {
expect(error).to.equal(this.error)
done()
}
)
})
})
describe('projectHistoryId is present in some updates', function () {
beforeEach(function () {
this.projectHistoryId = '1234'
this.updates = [
{ p: 0, i: 'a' },
{ p: 1, i: 's', projectHistoryId: this.projectHistoryId },
{ p: 2, i: 'd', projectHistoryId: this.projectHistoryId },
]
})
it('returns an error if the id is inconsistent between updates', function (done) {
this.updates[1].projectHistoryId = 2345
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
error => {
expect(error.message).to.equal(
'inconsistent project history id between updates'
)
done()
}
)
})
it('returns an error if the id is inconsistent between updates and web', function (done) {
this.WebApiManager.getHistoryId.yields(null, 2345)
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
error => {
expect(error.message).to.equal(
'inconsistent project history id between updates and web'
)
done()
}
)
})
it('returns the id if it is consistent between updates and web', function (done) {
this.WebApiManager.getHistoryId.yields(null, this.projectHistoryId)
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
(error, projectHistoryId) => {
expect(error).to.be.null
expect(projectHistoryId).equal(this.projectHistoryId)
done()
}
)
})
it('returns the id if it is consistent between updates but unavaiable in web', function (done) {
this.WebApiManager.getHistoryId.yields(new Error('oh no!'))
this.UpdatesProcessor._getHistoryId(
this.project_id,
this.updates,
(error, projectHistoryId) => {
expect(error).to.be.null
expect(projectHistoryId).equal(this.projectHistoryId)
done()
}
)
})
})
})
describe('_processUpdates', function () {
beforeEach(function () {
this.mostRecentVersionInfo = { version: 1 }
this.rawUpdates = ['raw updates']
this.expandedUpdates = ['expanded updates']
this.filteredUpdates = ['filtered updates']
this.compressedUpdates = ['compressed updates']
this.updatesWithBlobs = ['updates with blob']
this.changes = [
{
toRaw() {
return 'change'
},
},
]
this.newSyncState = { resyncProjectStructure: false }
this.extendLock = sinon.stub().yields()
this.mostRecentChunk = 'fake-chunk'
this.HistoryStoreManager.getMostRecentVersion.yields(
null,
this.mostRecentVersionInfo,
null,
'_lastChange',
this.mostRecentChunk
)
this.SyncManager.skipUpdatesDuringSync.yields(
null,
this.filteredUpdates,
this.newSyncState
)
this.SyncManager.expandSyncUpdates.callsArgWith(
5,
null,
this.expandedUpdates
)
this.UpdateCompressor.compressRawUpdates.returns(this.compressedUpdates)
this.BlobManager.createBlobsForUpdates.callsArgWith(
4,
null,
this.updatesWithBlobs
)
this.UpdateTranslator.convertToChanges.returns(this.changes)
})
describe('happy path', function () {
beforeEach(function (done) {
this.UpdatesProcessor._processUpdates(
this.project_id,
this.ol_project_id,
this.rawUpdates,
this.extendLock,
err => {
this.callback(err)
done()
}
)
})
it('should get the latest version id', function () {
this.HistoryStoreManager.getMostRecentVersion.should.have.been.calledWith(
this.project_id,
this.ol_project_id
)
})
it('should skip updates when resyncing', function () {
this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith(
this.project_id,
this.rawUpdates
)
})
it('should expand sync updates', function () {
this.SyncManager.expandSyncUpdates.should.have.been.calledWith(
this.project_id,
this.ol_project_id,
this.mostRecentChunk,
this.filteredUpdates,
this.extendLock
)
})
it('should compress updates', function () {
this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith(
this.expandedUpdates
)
})
it('should create any blobs for the updates', function () {
this.BlobManager.createBlobsForUpdates.should.have.been.calledWith(
this.project_id,
this.ol_project_id,
this.compressedUpdates
)
})
it('should convert the updates into a change requests', function () {
this.UpdateTranslator.convertToChanges.should.have.been.calledWith(
this.project_id,
this.updatesWithBlobs
)
})
it('should send the change request to the history store', function () {
this.HistoryStoreManager.sendChanges.should.have.been.calledWith(
this.project_id,
this.ol_project_id,
['change']
)
})
it('should set the sync state', function () {
this.SyncManager.setResyncState.should.have.been.calledWith(
this.project_id,
this.newSyncState
)
})
it('should call the callback with no error', function () {
this.callback.should.have.been.called
})
})
describe('with an error converting changes', function () {
beforeEach(function (done) {
this.err = new Error()
this.UpdateTranslator.convertToChanges.throws(this.err)
this.callback = sinon.stub()
this.UpdatesProcessor._processUpdates(
this.project_id,
this.ol_project_id,
this.rawUpdates,
this.extendLock,
err => {
this.callback(err)
done()
}
)
})
it('should call the callback with the error', function () {
this.callback.should.have.been.calledWith(this.err)
})
})
})
describe('_skipAlreadyAppliedUpdates', function () {
before(function () {
this.UpdateTranslator.isProjectStructureUpdate.callsFake(
update => update.version != null
)
this.UpdateTranslator.isTextUpdate.callsFake(update => update.v != null)
})
describe('with all doc ops in order', function () {
before(function () {
this.updates = [
{ doc: 'id', v: 1 },
{ doc: 'id', v: 2 },
{ doc: 'id', v: 3 },
{ doc: 'id', v: 4 },
]
this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
)
})
it('should return the original updates', function () {
expect(this.updatesToApply).to.eql(this.updates)
})
})
describe('with all project ops in order', function () {
before(function () {
this.updates = [
{ version: 1 },
{ version: 2 },
{ version: 3 },
{ version: 4 },
]
this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
)
})
it('should return the original updates', function () {
expect(this.updatesToApply).to.eql(this.updates)
})
})
describe('with all multiple doc and ops in order', function () {
before(function () {
this.updates = [
{ doc: 'id1', v: 1 },
{ doc: 'id1', v: 2 },
{ doc: 'id1', v: 3 },
{ doc: 'id1', v: 4 },
{ doc: 'id2', v: 1 },
{ doc: 'id2', v: 2 },
{ doc: 'id2', v: 3 },
{ doc: 'id2', v: 4 },
{ version: 1 },
{ version: 2 },
{ version: 3 },
{ version: 4 },
]
this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
)
})
it('should return the original updates', function () {
expect(this.updatesToApply).to.eql(this.updates)
})
})
describe('with doc ops out of order', function () {
before(function () {
this.updates = [
{ doc: 'id', v: 1 },
{ doc: 'id', v: 2 },
{ doc: 'id', v: 4 },
{ doc: 'id', v: 3 },
]
this.skipFn = sinon.spy(
this.UpdatesProcessor._mocks,
'_skipAlreadyAppliedUpdates'
)
try {
this.updatesToApply =
this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
)
} catch (error) {}
})
after(function () {
this.skipFn.restore()
})
it('should throw an exception', function () {
this.skipFn.threw('OpsOutOfOrderError').should.equal(true)
})
})
describe('with project ops out of order', function () {
before(function () {
this.updates = [
{ version: 1 },
{ version: 2 },
{ version: 4 },
{ version: 3 },
]
this.skipFn = sinon.spy(
this.UpdatesProcessor._mocks,
'_skipAlreadyAppliedUpdates'
)
try {
this.updatesToApply =
this.UpdatesProcessor._skipAlreadyAppliedUpdates(
this.project_id,
this.updates,
{ docs: {} }
)
} catch (error) {}
})
after(function () {
this.skipFn.restore()
})
it('should throw an exception', function () {
this.skipFn.threw('OpsOutOfOrderError').should.equal(true)
})
})
})
})

View File

@@ -0,0 +1,170 @@
/* eslint-disable
no-return-assign,
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
const MODULE_PATH = '../../../../app/js/Versions.js'
describe('Versions', function () {
beforeEach(async function () {
return (this.Versions = await esmock(MODULE_PATH))
})
describe('compare', function () {
describe('for greater major version', function () {
return it('should return +1', function () {
return this.Versions.compare('2.1', '1.1').should.equal(+1)
})
})
describe('for lesser major version', function () {
return it('should return -1', function () {
return this.Versions.compare('1.1', '2.1').should.equal(-1)
})
})
describe('for equal major versions with no minor version', function () {
return it('should return 0', function () {
return this.Versions.compare('2', '2').should.equal(0)
})
})
describe('for equal major versions with greater minor version', function () {
return it('should return +1', function () {
return this.Versions.compare('2.3', '2.1').should.equal(+1)
})
})
describe('for equal major versions with lesser minor version', function () {
return it('should return -1', function () {
return this.Versions.compare('2.1', '2.3').should.equal(-1)
})
})
describe('for equal major versions with greater minor version (non lexical)', function () {
return it('should return +1', function () {
return this.Versions.compare('2.10', '2.9').should.equal(+1)
})
})
describe('for equal major versions with lesser minor version (non lexical)', function () {
return it('should return +1', function () {
return this.Versions.compare('2.9', '2.10').should.equal(-1)
})
})
describe('for a single major version vs a major+minor version', function () {
return it('should return +1', function () {
return this.Versions.compare('2.1', '1').should.equal(+1)
})
})
describe('for a major+minor version vs a single major version', function () {
return it('should return -1', function () {
return this.Versions.compare('1', '2.1').should.equal(-1)
})
})
describe('for equal major versions with greater minor version vs zero', function () {
return it('should return +1', function () {
return this.Versions.compare('2.3', '2.0').should.equal(+1)
})
})
return describe('for equal major versions with lesser minor version of zero', function () {
return it('should return -1', function () {
return this.Versions.compare('2.0', '2.3').should.equal(-1)
})
})
})
describe('gt', function () {
describe('for greater major version', function () {
return it('should return true', function () {
return this.Versions.gt('2.1', '1.1').should.equal(true)
})
})
describe('for lesser major version', function () {
return it('should return false', function () {
return this.Versions.gt('1.1', '2.1').should.equal(false)
})
})
return describe('for equal major versions with no minor version', function () {
return it('should return false', function () {
return this.Versions.gt('2', '2').should.equal(false)
})
})
})
describe('gte', function () {
describe('for greater major version', function () {
return it('should return true', function () {
return this.Versions.gte('2.1', '1.1').should.equal(true)
})
})
describe('for lesser major version', function () {
return it('should return false', function () {
return this.Versions.gte('1.1', '2.1').should.equal(false)
})
})
return describe('for equal major versions with no minor version', function () {
return it('should return true', function () {
return this.Versions.gte('2', '2').should.equal(true)
})
})
})
describe('lt', function () {
describe('for greater major version', function () {
return it('should return false', function () {
return this.Versions.lt('2.1', '1.1').should.equal(false)
})
})
describe('for lesser major version', function () {
return it('should return true', function () {
return this.Versions.lt('1.1', '2.1').should.equal(true)
})
})
return describe('for equal major versions with no minor version', function () {
return it('should return false', function () {
return this.Versions.lt('2', '2').should.equal(false)
})
})
})
return describe('lte', function () {
describe('for greater major version', function () {
return it('should return false', function () {
return this.Versions.lte('2.1', '1.1').should.equal(false)
})
})
describe('for lesser major version', function () {
return it('should return true', function () {
return this.Versions.lte('1.1', '2.1').should.equal(true)
})
})
return describe('for equal major versions with no minor version', function () {
return it('should return true', function () {
return this.Versions.lte('2', '2').should.equal(true)
})
})
})
})

View File

@@ -0,0 +1,153 @@
import sinon from 'sinon'
import { expect } from 'chai'
import { strict as esmock } from 'esmock'
import { RequestFailedError } from '@overleaf/fetch-utils'
const MODULE_PATH = '../../../../app/js/WebApiManager.js'
describe('WebApiManager', function () {
beforeEach(async function () {
this.settings = {
apis: {
web: {
url: 'http://example.com',
user: 'overleaf',
pass: 'password',
},
},
}
this.userId = 'mock-user-id'
this.projectId = 'mock-project-id'
this.project = { features: 'mock-features' }
this.olProjectId = 12345
this.Metrics = { inc: sinon.stub() }
this.RedisManager = {
promises: {
getCachedHistoryId: sinon.stub(),
setCachedHistoryId: sinon.stub().resolves(),
},
}
this.FetchUtils = {
fetchNothing: sinon.stub().resolves(),
fetchJson: sinon.stub(),
RequestFailedError,
}
this.WebApiManager = await esmock(MODULE_PATH, {
'@overleaf/fetch-utils': this.FetchUtils,
'@overleaf/settings': this.settings,
'@overleaf/metrics': this.Metrics,
'../../../../app/js/RedisManager.js': this.RedisManager,
})
this.WebApiManager.setRetryTimeoutMs(100)
})
describe('getHistoryId', function () {
describe('when there is no cached value and the web request is successful', function () {
beforeEach(function () {
this.RedisManager.promises.getCachedHistoryId
.withArgs(this.projectId) // first call, no cached value returned
.onCall(0)
.resolves(null)
this.RedisManager.promises.getCachedHistoryId
.withArgs(this.projectId) // subsequent calls, return cached value
.resolves(this.olProjectId)
this.RedisManager.promises.getCachedHistoryId
.withArgs('mock-project-id-2') // no cached value for other project
.resolves(null)
this.FetchUtils.fetchJson.resolves({
overleaf: { history: { id: this.olProjectId } },
})
})
it('should only request project details once per project', async function () {
for (let i = 0; i < 5; i++) {
await this.WebApiManager.promises.getHistoryId(this.projectId)
}
this.FetchUtils.fetchJson.should.have.been.calledOnce
await this.WebApiManager.promises.getHistoryId('mock-project-id-2')
this.FetchUtils.fetchJson.should.have.been.calledTwice
})
it('should cache the history id', async function () {
const olProjectId = await this.WebApiManager.promises.getHistoryId(
this.projectId
)
this.RedisManager.promises.setCachedHistoryId
.calledWith(this.projectId, olProjectId)
.should.equal(true)
})
it("should return the project's history id", async function () {
const olProjectId = await this.WebApiManager.promises.getHistoryId(
this.projectId
)
expect(this.FetchUtils.fetchJson).to.have.been.calledWithMatch(
`${this.settings.apis.web.url}/project/${this.projectId}/details`,
{
basicAuth: {
user: this.settings.apis.web.user,
password: this.settings.apis.web.pass,
},
}
)
expect(olProjectId).to.equal(this.olProjectId)
})
})
describe('when the web API returns an error', function () {
beforeEach(function () {
this.error = new Error('something went wrong')
this.FetchUtils.fetchJson.rejects(this.error)
this.RedisManager.promises.getCachedHistoryId.resolves(null)
})
it('should throw an error', async function () {
await expect(
this.WebApiManager.promises.getHistoryId(this.projectId)
).to.be.rejectedWith(this.error)
})
})
describe('when web returns a 404', function () {
beforeEach(function () {
this.FetchUtils.fetchJson.rejects(
new RequestFailedError(
'http://some-url',
{},
{ status: 404 },
'Not found'
)
)
this.RedisManager.promises.getCachedHistoryId.resolves(null)
})
it('should throw an error', async function () {
await expect(
this.WebApiManager.promises.getHistoryId(this.projectId)
).to.be.rejectedWith('got a 404 from web api')
})
})
describe('when web returns a failure error code', function () {
beforeEach(function () {
this.RedisManager.promises.getCachedHistoryId.resolves(null)
this.FetchUtils.fetchJson.rejects(
new RequestFailedError(
'http://some-url',
{},
{ status: 500 },
'Error'
)
)
})
it('should throw an error', async function () {
await expect(
this.WebApiManager.promises.getHistoryId(this.projectId)
).to.be.rejectedWith(RequestFailedError)
})
})
})
})