first commit
This commit is contained in:
5
libraries/overleaf-editor-core/.gitignore
vendored
Normal file
5
libraries/overleaf-editor-core/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/coverage
|
||||
/node_modules
|
||||
|
||||
# managed by monorepo$ bin/update_build_scripts
|
||||
.npmrc
|
||||
1
libraries/overleaf-editor-core/.nvmrc
Normal file
1
libraries/overleaf-editor-core/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
20.18.2
|
||||
10
libraries/overleaf-editor-core/buildscript.txt
Normal file
10
libraries/overleaf-editor-core/buildscript.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
overleaf-editor-core
|
||||
--dependencies=None
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--is-library=True
|
||||
--node-version=20.18.2
|
||||
--public-repo=False
|
||||
--script-version=4.7.0
|
||||
89
libraries/overleaf-editor-core/index.js
Normal file
89
libraries/overleaf-editor-core/index.js
Normal file
@@ -0,0 +1,89 @@
|
||||
const AddCommentOperation = require('./lib/operation/add_comment_operation')
|
||||
const Author = require('./lib/author')
|
||||
const AuthorList = require('./lib/author_list')
|
||||
const Blob = require('./lib/blob')
|
||||
const Change = require('./lib/change')
|
||||
const ChangeRequest = require('./lib/change_request')
|
||||
const ChangeNote = require('./lib/change_note')
|
||||
const Chunk = require('./lib/chunk')
|
||||
const ChunkResponse = require('./lib/chunk_response')
|
||||
const Comment = require('./lib/comment')
|
||||
const DeleteCommentOperation = require('./lib/operation/delete_comment_operation')
|
||||
const File = require('./lib/file')
|
||||
const FileMap = require('./lib/file_map')
|
||||
const History = require('./lib/history')
|
||||
const Label = require('./lib/label')
|
||||
const AddFileOperation = require('./lib/operation/add_file_operation')
|
||||
const MoveFileOperation = require('./lib/operation/move_file_operation')
|
||||
const SetCommentStateOperation = require('./lib/operation/set_comment_state_operation')
|
||||
const EditFileOperation = require('./lib/operation/edit_file_operation')
|
||||
const EditNoOperation = require('./lib/operation/edit_no_operation')
|
||||
const SetFileMetadataOperation = require('./lib/operation/set_file_metadata_operation')
|
||||
const NoOperation = require('./lib/operation/no_operation')
|
||||
const Operation = require('./lib/operation')
|
||||
const RestoreOrigin = require('./lib/origin/restore_origin')
|
||||
const RestoreFileOrigin = require('./lib/origin/restore_file_origin')
|
||||
const Origin = require('./lib/origin')
|
||||
const OtClient = require('./lib/ot_client')
|
||||
const TextOperation = require('./lib/operation/text_operation')
|
||||
const EditOperation = require('./lib/operation/edit_operation')
|
||||
const safePathname = require('./lib/safe_pathname')
|
||||
const Snapshot = require('./lib/snapshot')
|
||||
const util = require('./lib/util')
|
||||
const V2DocVersions = require('./lib/v2_doc_versions')
|
||||
const {
|
||||
InsertOp,
|
||||
RemoveOp,
|
||||
RetainOp,
|
||||
ScanOp,
|
||||
} = require('./lib/operation/scan_op')
|
||||
const TrackedChange = require('./lib/file_data/tracked_change')
|
||||
const TrackedChangeList = require('./lib/file_data/tracked_change_list')
|
||||
const TrackingProps = require('./lib/file_data/tracking_props')
|
||||
const Range = require('./lib/range')
|
||||
const CommentList = require('./lib/file_data/comment_list')
|
||||
const LazyStringFileData = require('./lib/file_data/lazy_string_file_data')
|
||||
|
||||
exports.AddCommentOperation = AddCommentOperation
|
||||
exports.Author = Author
|
||||
exports.AuthorList = AuthorList
|
||||
exports.Blob = Blob
|
||||
exports.Change = Change
|
||||
exports.ChangeRequest = ChangeRequest
|
||||
exports.ChangeNote = ChangeNote
|
||||
exports.Chunk = Chunk
|
||||
exports.ChunkResponse = ChunkResponse
|
||||
exports.Comment = Comment
|
||||
exports.DeleteCommentOperation = DeleteCommentOperation
|
||||
exports.File = File
|
||||
exports.FileMap = FileMap
|
||||
exports.LazyStringFileData = LazyStringFileData
|
||||
exports.History = History
|
||||
exports.Label = Label
|
||||
exports.AddFileOperation = AddFileOperation
|
||||
exports.MoveFileOperation = MoveFileOperation
|
||||
exports.SetCommentStateOperation = SetCommentStateOperation
|
||||
exports.EditFileOperation = EditFileOperation
|
||||
exports.EditNoOperation = EditNoOperation
|
||||
exports.SetFileMetadataOperation = SetFileMetadataOperation
|
||||
exports.NoOperation = NoOperation
|
||||
exports.Operation = Operation
|
||||
exports.RestoreOrigin = RestoreOrigin
|
||||
exports.RestoreFileOrigin = RestoreFileOrigin
|
||||
exports.Origin = Origin
|
||||
exports.OtClient = OtClient
|
||||
exports.TextOperation = TextOperation
|
||||
exports.EditOperation = EditOperation
|
||||
exports.safePathname = safePathname
|
||||
exports.Snapshot = Snapshot
|
||||
exports.util = util
|
||||
exports.V2DocVersions = V2DocVersions
|
||||
exports.ScanOp = ScanOp
|
||||
exports.InsertOp = InsertOp
|
||||
exports.RetainOp = RetainOp
|
||||
exports.RemoveOp = RemoveOp
|
||||
exports.TrackedChangeList = TrackedChangeList
|
||||
exports.TrackedChange = TrackedChange
|
||||
exports.Range = Range
|
||||
exports.CommentList = CommentList
|
||||
exports.TrackingProps = TrackingProps
|
||||
72
libraries/overleaf-editor-core/lib/author.js
Normal file
72
libraries/overleaf-editor-core/lib/author.js
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
/**
|
||||
* An author of a {@link Change}. We want to store user IDs, and then fill in
|
||||
* the other properties (which the user can change over time) when changes are
|
||||
* loaded.
|
||||
*
|
||||
* At present, we're assuming that all authors have a user ID; we may need to
|
||||
* generalise this to cover users for whom we only have a name and email, e.g.
|
||||
* from git. For now, though, this seems to do what we need.
|
||||
*/
|
||||
class Author {
|
||||
/**
|
||||
* @param {number} id
|
||||
* @param {string} email
|
||||
* @param {string} name
|
||||
*/
|
||||
constructor(id, email, name) {
|
||||
assert.number(id, 'bad id')
|
||||
assert.string(email, 'bad email')
|
||||
assert.string(name, 'bad name')
|
||||
|
||||
this.id = id
|
||||
this.email = email
|
||||
this.name = name
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Author from its raw form.
|
||||
*
|
||||
* @param {Object} [raw]
|
||||
* @return {Author | null}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
if (!raw) return null
|
||||
return new Author(raw.id, raw.email, raw.name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the Author to raw form for storage or transmission.
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
toRaw() {
|
||||
return { id: this.id, email: this.email, name: this.name }
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getId() {
|
||||
return this.id
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
getEmail() {
|
||||
return this.email
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
getName() {
|
||||
return this.name
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Author
|
||||
45
libraries/overleaf-editor-core/lib/author_list.js
Normal file
45
libraries/overleaf-editor-core/lib/author_list.js
Normal file
@@ -0,0 +1,45 @@
|
||||
/** @module */
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const check = require('check-types')
|
||||
|
||||
const Author = require('./author')
|
||||
|
||||
/**
|
||||
* Check that every member of the list is a number or every member is
|
||||
* an Author value, disregarding null or undefined values.
|
||||
*
|
||||
* @param {Array.<number|Author>} authors author list
|
||||
* @param {string} msg
|
||||
*/
|
||||
function assertV1(authors, msg) {
|
||||
const authors_ = authors.filter(function (a) {
|
||||
return a !== null && a !== undefined
|
||||
})
|
||||
|
||||
if (authors_.length > 0) {
|
||||
const checker = check.integer(authors_[0])
|
||||
? check.assert.integer
|
||||
: _.partial(check.assert.instance, _, Author)
|
||||
_.each(authors_, function (author) {
|
||||
checker(author, msg)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that every member of the list is a v2 author ID, disregarding
|
||||
* null or undefined values.
|
||||
*
|
||||
* @param {Array.<string>} authors author list
|
||||
* @param {string} msg
|
||||
*/
|
||||
function assertV2(authors, msg) {
|
||||
_.each(authors, function (author) {
|
||||
check.assert.maybe.match(author, /^[0-9a-f]{24}$/, msg)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { assertV1, assertV2 }
|
||||
109
libraries/overleaf-editor-core/lib/blob.js
Normal file
109
libraries/overleaf-editor-core/lib/blob.js
Normal file
@@ -0,0 +1,109 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
const TextOperation = require('./operation/text_operation')
|
||||
|
||||
class NotFoundError extends OError {
|
||||
constructor(hash) {
|
||||
super(`blob ${hash} not found`, { hash })
|
||||
this.hash = hash
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata record for the content of a file.
|
||||
*/
|
||||
class Blob {
|
||||
static HEX_HASH_RX_STRING = '^[0-9a-f]{40,40}$'
|
||||
static HEX_HASH_RX = new RegExp(Blob.HEX_HASH_RX_STRING)
|
||||
|
||||
/**
|
||||
* Size of the largest file that we'll read to determine whether we can edit it
|
||||
* or not, in bytes. The final decision on whether a file is editable or not is
|
||||
* based on the number of characters it contains, but we need to read the file
|
||||
* in to determine that; so it is useful to have an upper bound on the byte
|
||||
* length of a file that might be editable.
|
||||
*
|
||||
* The reason for the factor of 3 is as follows. We cannot currently edit files
|
||||
* that contain characters outside of the basic multilingual plane, so we're
|
||||
* limited to characters that can be represented in a single, two-byte UCS-2
|
||||
* code unit. Encoding the largest such value, 0xFFFF (which is not actually
|
||||
* a valid character), takes three bytes in UTF-8: 0xEF 0xBF 0xBF. A file
|
||||
* composed entirely of three-byte UTF-8 codepoints is the worst case; in
|
||||
* practice, this is a very conservative upper bound.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
static MAX_EDITABLE_BYTE_LENGTH_BOUND = 3 * TextOperation.MAX_STRING_LENGTH
|
||||
|
||||
static NotFoundError = NotFoundError
|
||||
|
||||
/**
|
||||
* @param {string} hash
|
||||
* @param {number} byteLength
|
||||
* @param {number} [stringLength]
|
||||
*/
|
||||
constructor(hash, byteLength, stringLength) {
|
||||
this.setHash(hash)
|
||||
this.setByteLength(byteLength)
|
||||
this.setStringLength(stringLength)
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
if (raw) {
|
||||
return new Blob(raw.hash, raw.byteLength, raw.stringLength)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
toRaw() {
|
||||
return {
|
||||
hash: this.hash,
|
||||
byteLength: this.byteLength,
|
||||
stringLength: this.stringLength,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hex hash.
|
||||
* @return {String}
|
||||
*/
|
||||
getHash() {
|
||||
return this.hash
|
||||
}
|
||||
|
||||
setHash(hash) {
|
||||
assert.match(hash, Blob.HEX_HASH_RX, 'bad hash')
|
||||
this.hash = hash
|
||||
}
|
||||
|
||||
/**
|
||||
* Length of the blob in bytes.
|
||||
* @return {number}
|
||||
*/
|
||||
getByteLength() {
|
||||
return this.byteLength
|
||||
}
|
||||
|
||||
setByteLength(byteLength) {
|
||||
assert.integer(byteLength, 'bad byteLength')
|
||||
this.byteLength = byteLength
|
||||
}
|
||||
|
||||
/**
|
||||
* Utf-8 length of the blob content, if it appears to be valid UTF-8.
|
||||
* @return {number|undefined}
|
||||
*/
|
||||
getStringLength() {
|
||||
return this.stringLength
|
||||
}
|
||||
|
||||
setStringLength(stringLength) {
|
||||
assert.maybe.integer(stringLength, 'bad stringLength')
|
||||
this.stringLength = stringLength
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Blob
|
||||
352
libraries/overleaf-editor-core/lib/change.js
Normal file
352
libraries/overleaf-editor-core/lib/change.js
Normal file
@@ -0,0 +1,352 @@
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const assert = require('check-types').assert
|
||||
const pMap = require('p-map')
|
||||
|
||||
const AuthorList = require('./author_list')
|
||||
const Operation = require('./operation')
|
||||
const Origin = require('./origin')
|
||||
const Snapshot = require('./snapshot')
|
||||
const FileMap = require('./file_map')
|
||||
const V2DocVersions = require('./v2_doc_versions')
|
||||
|
||||
/**
|
||||
* @import Author from "./author"
|
||||
* @import { BlobStore } from "./types"
|
||||
*/
|
||||
|
||||
/**
|
||||
* A Change is a list of {@link Operation}s applied atomically by given
|
||||
* {@link Author}(s) at a given time.
|
||||
*/
|
||||
class Change {
|
||||
static PROJECT_VERSION_RX_STRING = '^[0-9]+\\.[0-9]+$'
|
||||
static PROJECT_VERSION_RX = new RegExp(Change.PROJECT_VERSION_RX_STRING)
|
||||
|
||||
/**
|
||||
* @param {Array.<Operation>} operations
|
||||
* @param {Date} timestamp
|
||||
* @param {number[] | Author[]} [authors]
|
||||
* @param {Origin} [origin]
|
||||
* @param {string[]} [v2Authors]
|
||||
* @param {string} [projectVersion]
|
||||
* @param {V2DocVersions} [v2DocVersions]
|
||||
*/
|
||||
constructor(
|
||||
operations,
|
||||
timestamp,
|
||||
authors,
|
||||
origin,
|
||||
v2Authors,
|
||||
projectVersion,
|
||||
v2DocVersions
|
||||
) {
|
||||
this.setOperations(operations)
|
||||
this.setTimestamp(timestamp)
|
||||
this.setAuthors(authors || [])
|
||||
this.setOrigin(origin)
|
||||
this.setV2Authors(v2Authors || [])
|
||||
this.setProjectVersion(projectVersion)
|
||||
this.setV2DocVersions(v2DocVersions)
|
||||
}
|
||||
|
||||
/**
|
||||
* For serialization.
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
toRaw() {
|
||||
function toRaw(object) {
|
||||
return object.toRaw()
|
||||
}
|
||||
const raw = {
|
||||
operations: this.operations.map(toRaw),
|
||||
timestamp: this.timestamp.toISOString(),
|
||||
authors: this.authors,
|
||||
}
|
||||
if (this.v2Authors) raw.v2Authors = this.v2Authors
|
||||
if (this.origin) raw.origin = this.origin.toRaw()
|
||||
if (this.projectVersion) raw.projectVersion = this.projectVersion
|
||||
if (this.v2DocVersions) raw.v2DocVersions = this.v2DocVersions.toRaw()
|
||||
return raw
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
if (!raw) return null
|
||||
assert.array.of.object(raw.operations, 'bad raw.operations')
|
||||
assert.nonEmptyString(raw.timestamp, 'bad raw.timestamp')
|
||||
|
||||
// Hack to clean up bad data where author id of some changes was 0, instead of
|
||||
// null. The root cause of the bug is fixed in
|
||||
// https://github.com/overleaf/write_latex/pull/3804 but the bad data persists
|
||||
// on S3
|
||||
let authors
|
||||
if (raw.authors) {
|
||||
authors = raw.authors.map(
|
||||
// Null represents an anonymous author
|
||||
author => (author === 0 ? null : author)
|
||||
)
|
||||
}
|
||||
|
||||
return new Change(
|
||||
raw.operations.map(Operation.fromRaw),
|
||||
new Date(raw.timestamp),
|
||||
authors,
|
||||
raw.origin && Origin.fromRaw(raw.origin),
|
||||
raw.v2Authors,
|
||||
raw.projectVersion,
|
||||
raw.v2DocVersions && V2DocVersions.fromRaw(raw.v2DocVersions)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Operation[]}
|
||||
*/
|
||||
getOperations() {
|
||||
return this.operations
|
||||
}
|
||||
|
||||
setOperations(operations) {
|
||||
assert.array.of.object(operations, 'Change: bad operations')
|
||||
this.operations = operations
|
||||
}
|
||||
|
||||
getTimestamp() {
|
||||
return this.timestamp
|
||||
}
|
||||
|
||||
setTimestamp(timestamp) {
|
||||
assert.date(timestamp, 'Change: bad timestamp')
|
||||
this.timestamp = timestamp
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array.<Author>} zero or more
|
||||
*/
|
||||
getAuthors() {
|
||||
return this.authors
|
||||
}
|
||||
|
||||
setAuthors(authors) {
|
||||
assert.array(authors, 'Change: bad author ids array')
|
||||
if (authors.length > 1) {
|
||||
assert.maybe.emptyArray(
|
||||
this.v2Authors,
|
||||
'Change: cannot set v1 authors if v2 authors is set'
|
||||
)
|
||||
}
|
||||
AuthorList.assertV1(authors, 'Change: bad author ids')
|
||||
|
||||
this.authors = authors
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array.<Author>} zero or more
|
||||
*/
|
||||
getV2Authors() {
|
||||
return this.v2Authors
|
||||
}
|
||||
|
||||
setV2Authors(v2Authors) {
|
||||
assert.array(v2Authors, 'Change: bad v2 author ids array')
|
||||
if (v2Authors.length > 1) {
|
||||
assert.maybe.emptyArray(
|
||||
this.authors,
|
||||
'Change: cannot set v2 authors if v1 authors is set'
|
||||
)
|
||||
}
|
||||
AuthorList.assertV2(v2Authors, 'Change: not a v2 author id')
|
||||
this.v2Authors = v2Authors
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Origin | null | undefined}
|
||||
*/
|
||||
getOrigin() {
|
||||
return this.origin
|
||||
}
|
||||
|
||||
setOrigin(origin) {
|
||||
assert.maybe.instance(origin, Origin, 'Change: bad origin')
|
||||
this.origin = origin
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string | null | undefined}
|
||||
*/
|
||||
getProjectVersion() {
|
||||
return this.projectVersion
|
||||
}
|
||||
|
||||
setProjectVersion(projectVersion) {
|
||||
assert.maybe.match(
|
||||
projectVersion,
|
||||
Change.PROJECT_VERSION_RX,
|
||||
'Change: bad projectVersion'
|
||||
)
|
||||
this.projectVersion = projectVersion
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {V2DocVersions | null | undefined}
|
||||
*/
|
||||
getV2DocVersions() {
|
||||
return this.v2DocVersions
|
||||
}
|
||||
|
||||
setV2DocVersions(v2DocVersions) {
|
||||
assert.maybe.instance(
|
||||
v2DocVersions,
|
||||
V2DocVersions,
|
||||
'Change: bad v2DocVersions'
|
||||
)
|
||||
this.v2DocVersions = v2DocVersions
|
||||
}
|
||||
|
||||
/**
|
||||
* If this Change references blob hashes, add them to the given set.
|
||||
*
|
||||
* @param {Set.<String>} blobHashes
|
||||
*/
|
||||
findBlobHashes(blobHashes) {
|
||||
for (const operation of this.operations) {
|
||||
operation.findBlobHashes(blobHashes)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If this Change contains any File objects, load them.
|
||||
*
|
||||
* @param {string} kind see {File#load}
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async loadFiles(kind, blobStore) {
|
||||
for (const operation of this.operations) {
|
||||
await operation.loadFiles(kind, blobStore)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Append an operation to the end of the operations list.
|
||||
*
|
||||
* @param {Operation} operation
|
||||
* @return {this}
|
||||
*/
|
||||
pushOperation(operation) {
|
||||
this.getOperations().push(operation)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply this change to a snapshot. All operations are applied, and then the
|
||||
* snapshot version is increased.
|
||||
*
|
||||
* Recoverable errors (caused by historical bad data) are ignored unless
|
||||
* opts.strict is true
|
||||
*
|
||||
* @param {Snapshot} snapshot modified in place
|
||||
* @param {object} opts
|
||||
* @param {boolean} [opts.strict] - Do not ignore recoverable errors
|
||||
*/
|
||||
applyTo(snapshot, opts = {}) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
for (const operation of this.iterativelyApplyTo(snapshot, opts)) {
|
||||
// Nothing to do: we're just consuming the iterator for the side effects
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generator that applies this change to a snapshot and yields each
|
||||
* operation after it has been applied.
|
||||
*
|
||||
* Recoverable errors (caused by historical bad data) are ignored unless
|
||||
* opts.strict is true
|
||||
*
|
||||
* @param {Snapshot} snapshot modified in place
|
||||
* @param {object} opts
|
||||
* @param {boolean} [opts.strict] - Do not ignore recoverable errors
|
||||
*/
|
||||
*iterativelyApplyTo(snapshot, opts = {}) {
|
||||
assert.object(snapshot, 'bad snapshot')
|
||||
|
||||
for (const operation of this.operations) {
|
||||
try {
|
||||
operation.applyTo(snapshot, opts)
|
||||
} catch (err) {
|
||||
const recoverable =
|
||||
err instanceof Snapshot.EditMissingFileError ||
|
||||
err instanceof FileMap.FileNotFoundError
|
||||
if (!recoverable || opts.strict) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
yield operation
|
||||
}
|
||||
|
||||
// update project version if present in change
|
||||
if (this.projectVersion) {
|
||||
snapshot.setProjectVersion(this.projectVersion)
|
||||
}
|
||||
|
||||
// update doc versions
|
||||
if (this.v2DocVersions) {
|
||||
snapshot.updateV2DocVersions(this.v2DocVersions)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform this change to account for the fact that the other change occurred
|
||||
* simultaneously and was applied first.
|
||||
*
|
||||
* This change is modified in place (by transforming its operations).
|
||||
*
|
||||
* @param {Change} other
|
||||
*/
|
||||
transformAfter(other) {
|
||||
assert.object(other, 'bad other')
|
||||
|
||||
const thisOperations = this.getOperations()
|
||||
const otherOperations = other.getOperations()
|
||||
for (let i = 0; i < otherOperations.length; ++i) {
|
||||
for (let j = 0; j < thisOperations.length; ++j) {
|
||||
thisOperations[j] = Operation.transform(
|
||||
thisOperations[j],
|
||||
otherOperations[i]
|
||||
)[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clone() {
|
||||
return Change.fromRaw(this.toRaw())
|
||||
}
|
||||
|
||||
async store(blobStore, concurrency) {
|
||||
assert.maybe.number(concurrency, 'bad concurrency')
|
||||
|
||||
const raw = this.toRaw()
|
||||
raw.authors = _.uniq(raw.authors)
|
||||
|
||||
const rawOperations = await pMap(
|
||||
this.operations,
|
||||
operation => operation.store(blobStore),
|
||||
{ concurrency: concurrency || 1 }
|
||||
)
|
||||
raw.operations = rawOperations
|
||||
return raw
|
||||
}
|
||||
|
||||
canBeComposedWith(other) {
|
||||
const operations = this.getOperations()
|
||||
const otherOperations = other.getOperations()
|
||||
|
||||
// We ignore complex changes with more than 1 operation
|
||||
if (operations.length > 1 || otherOperations.length > 1) return false
|
||||
|
||||
return operations[0].canBeComposedWith(otherOperations[0])
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Change
|
||||
61
libraries/overleaf-editor-core/lib/change_note.js
Normal file
61
libraries/overleaf-editor-core/lib/change_note.js
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Change = require('./change')
|
||||
|
||||
/**
|
||||
* A `ChangeNote` is returned when the server has applied a {@link Change}.
|
||||
*/
|
||||
class ChangeNote {
|
||||
/**
|
||||
* @param {number} baseVersion the new base version for the change
|
||||
* @param {Change} [change]
|
||||
*/
|
||||
constructor(baseVersion, change) {
|
||||
assert.integer(baseVersion, 'bad baseVersion')
|
||||
assert.maybe.instance(change, Change, 'bad change')
|
||||
|
||||
this.baseVersion = baseVersion
|
||||
this.change = change
|
||||
}
|
||||
|
||||
/**
|
||||
* For serialization.
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
toRaw() {
|
||||
return {
|
||||
baseVersion: this.baseVersion,
|
||||
change: this.change.toRaw(),
|
||||
}
|
||||
}
|
||||
|
||||
toRawWithoutChange() {
|
||||
return {
|
||||
baseVersion: this.baseVersion,
|
||||
}
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
assert.integer(raw.baseVersion, 'bad raw.baseVersion')
|
||||
assert.maybe.object(raw.change, 'bad raw.changes')
|
||||
|
||||
return new ChangeNote(raw.baseVersion, Change.fromRaw(raw.change))
|
||||
}
|
||||
|
||||
getBaseVersion() {
|
||||
return this.baseVersion
|
||||
}
|
||||
|
||||
getResultVersion() {
|
||||
return this.baseVersion + 1
|
||||
}
|
||||
|
||||
getChange() {
|
||||
return this.change
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ChangeNote
|
||||
90
libraries/overleaf-editor-core/lib/change_request.js
Normal file
90
libraries/overleaf-editor-core/lib/change_request.js
Normal file
@@ -0,0 +1,90 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const AuthorList = require('./author_list')
|
||||
const Change = require('./change')
|
||||
const Operation = require('./operation')
|
||||
|
||||
/**
|
||||
* @import Author from "./author"
|
||||
*/
|
||||
|
||||
/**
|
||||
* A `ChangeRequest` is a list of {@link Operation}s that the server can apply
|
||||
* as a {@link Change}.
|
||||
*
|
||||
* If the change is marked as `untransformable`, then the server will not
|
||||
* attempt to transform it if it is out of date (i.e. if the baseVersion no
|
||||
* longer matches the project's latest version). For example, if the client
|
||||
* needs to ensure that a metadata property is set on exactly one file, it can't
|
||||
* do that reliably if there's a chance that other clients will also change the
|
||||
* metadata at the same time. The expectation is that if the change is rejected,
|
||||
* the client will retry on a later version.
|
||||
*/
|
||||
class ChangeRequest {
|
||||
/**
|
||||
* @param {number} baseVersion
|
||||
* @param {Array.<Operation>} operations
|
||||
* @param {boolean} [untransformable]
|
||||
* @param {number[] | Author[]} [authors]
|
||||
*/
|
||||
constructor(baseVersion, operations, untransformable, authors) {
|
||||
assert.integer(baseVersion, 'bad baseVersion')
|
||||
assert.array.of.object(operations, 'bad operations')
|
||||
assert.maybe.boolean(untransformable, 'ChangeRequest: bad untransformable')
|
||||
// TODO remove authors once we have JWTs working --- pass as parameter to
|
||||
// makeChange instead
|
||||
authors = authors || []
|
||||
|
||||
// check all are the same type
|
||||
AuthorList.assertV1(authors, 'bad authors')
|
||||
|
||||
this.authors = authors
|
||||
this.baseVersion = baseVersion
|
||||
this.operations = operations
|
||||
this.untransformable = untransformable || false
|
||||
}
|
||||
|
||||
/**
|
||||
* For serialization.
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
toRaw() {
|
||||
function operationToRaw(operation) {
|
||||
return operation.toRaw()
|
||||
}
|
||||
|
||||
return {
|
||||
baseVersion: this.baseVersion,
|
||||
operations: this.operations.map(operationToRaw),
|
||||
untransformable: this.untransformable,
|
||||
authors: this.authors,
|
||||
}
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
assert.array.of.object(raw.operations, 'bad raw.operations')
|
||||
return new ChangeRequest(
|
||||
raw.baseVersion,
|
||||
raw.operations.map(Operation.fromRaw),
|
||||
raw.untransformable,
|
||||
raw.authors
|
||||
)
|
||||
}
|
||||
|
||||
getBaseVersion() {
|
||||
return this.baseVersion
|
||||
}
|
||||
|
||||
isUntransformable() {
|
||||
return this.untransformable
|
||||
}
|
||||
|
||||
makeChange(timestamp) {
|
||||
return new Change(this.operations, timestamp, this.authors)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ChangeRequest
|
||||
172
libraries/overleaf-editor-core/lib/chunk.js
Normal file
172
libraries/overleaf-editor-core/lib/chunk.js
Normal file
@@ -0,0 +1,172 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
const History = require('./history')
|
||||
|
||||
/**
|
||||
* @import { BlobStore, RawChunk } from "./types"
|
||||
* @import Change from "./change"
|
||||
* @import Snapshot from "./snapshot"
|
||||
*/
|
||||
|
||||
class ConflictingEndVersion extends OError {
|
||||
constructor(clientEndVersion, latestEndVersion) {
|
||||
const message =
|
||||
'client sent updates with end_version ' +
|
||||
clientEndVersion +
|
||||
' but latest chunk has end_version ' +
|
||||
latestEndVersion
|
||||
super(message, { clientEndVersion, latestEndVersion })
|
||||
this.clientEndVersion = clientEndVersion
|
||||
this.latestEndVersion = latestEndVersion
|
||||
}
|
||||
}
|
||||
|
||||
class NotFoundError extends OError {
|
||||
// `message` and `info` optional arguments allow children classes to override
|
||||
// these values, ensuring backwards compatibility with previous implementation
|
||||
// based on the `overleaf-error-type` library
|
||||
constructor(projectId, message, info) {
|
||||
const errorMessage = message || `no chunks for project ${projectId}`
|
||||
const errorInfo = info || { projectId }
|
||||
super(errorMessage, errorInfo)
|
||||
this.projectId = projectId
|
||||
}
|
||||
}
|
||||
|
||||
class VersionNotFoundError extends NotFoundError {
|
||||
constructor(projectId, version) {
|
||||
super(projectId, `chunk for ${projectId} v ${version} not found`, {
|
||||
projectId,
|
||||
version,
|
||||
})
|
||||
this.projectId = projectId
|
||||
this.version = version
|
||||
}
|
||||
}
|
||||
|
||||
class BeforeTimestampNotFoundError extends NotFoundError {
|
||||
constructor(projectId, timestamp) {
|
||||
super(projectId, `chunk for ${projectId} timestamp ${timestamp} not found`)
|
||||
this.projectId = projectId
|
||||
this.timestamp = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
class NotPersistedError extends NotFoundError {
|
||||
constructor(projectId) {
|
||||
super(projectId, `chunk for ${projectId} not persisted yet`)
|
||||
this.projectId = projectId
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A Chunk is a {@link History} that is part of a project's overall history. It
|
||||
* has a start and an end version that place its History in context.
|
||||
*/
|
||||
class Chunk {
|
||||
static ConflictingEndVersion = ConflictingEndVersion
|
||||
static NotFoundError = NotFoundError
|
||||
static VersionNotFoundError = VersionNotFoundError
|
||||
static BeforeTimestampNotFoundError = BeforeTimestampNotFoundError
|
||||
static NotPersistedError = NotPersistedError
|
||||
|
||||
/**
|
||||
* @param {History} history
|
||||
* @param {number} startVersion
|
||||
*/
|
||||
constructor(history, startVersion) {
|
||||
assert.instance(history, History, 'bad history')
|
||||
assert.integer(startVersion, 'bad startVersion')
|
||||
|
||||
this.history = history
|
||||
this.startVersion = startVersion
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawChunk} raw
|
||||
* @return {Chunk}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new Chunk(History.fromRaw(raw.history), raw.startVersion)
|
||||
}
|
||||
|
||||
toRaw() {
|
||||
return { history: this.history.toRaw(), startVersion: this.startVersion }
|
||||
}
|
||||
|
||||
/**
|
||||
* The history for this chunk.
|
||||
*
|
||||
* @return {History}
|
||||
*/
|
||||
getHistory() {
|
||||
return this.history
|
||||
}
|
||||
|
||||
/**
|
||||
* {@see History#getSnapshot}
|
||||
* @return {Snapshot}
|
||||
*/
|
||||
getSnapshot() {
|
||||
return this.history.getSnapshot()
|
||||
}
|
||||
|
||||
/**
|
||||
* {@see History#getChanges}
|
||||
* @return {Array.<Change>}
|
||||
*/
|
||||
getChanges() {
|
||||
return this.history.getChanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* {@see History#pushChanges}
|
||||
* @param {Array.<Change>} changes
|
||||
*/
|
||||
pushChanges(changes) {
|
||||
this.history.pushChanges(changes)
|
||||
}
|
||||
|
||||
/**
|
||||
* The version of the project after applying all changes in this chunk.
|
||||
*
|
||||
* @return {number} non-negative, greater than or equal to start version
|
||||
*/
|
||||
getEndVersion() {
|
||||
return this.startVersion + this.history.countChanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* The timestamp of the last change in this chunk
|
||||
*/
|
||||
|
||||
getEndTimestamp() {
|
||||
if (!this.history.countChanges()) return null
|
||||
return this.history.getChanges().slice(-1)[0].getTimestamp()
|
||||
}
|
||||
|
||||
/**
|
||||
* The version of the project before applying all changes in this chunk.
|
||||
*
|
||||
* @return {number} non-negative, less than or equal to end version
|
||||
*/
|
||||
getStartVersion() {
|
||||
return this.startVersion
|
||||
}
|
||||
|
||||
/**
|
||||
* {@see History#loadFiles}
|
||||
*
|
||||
* @param {string} kind
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async loadFiles(kind, blobStore) {
|
||||
await this.history.loadFiles(kind, blobStore)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Chunk
|
||||
33
libraries/overleaf-editor-core/lib/chunk_response.js
Normal file
33
libraries/overleaf-editor-core/lib/chunk_response.js
Normal file
@@ -0,0 +1,33 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
const Chunk = require('./chunk')
|
||||
|
||||
/**
|
||||
* The ChunkResponse allows for additional data to be sent back with the chunk
|
||||
* at present there are no extra data to send.
|
||||
*/
|
||||
class ChunkResponse {
|
||||
constructor(chunk) {
|
||||
assert.instance(chunk, Chunk)
|
||||
this.chunk = chunk
|
||||
}
|
||||
|
||||
toRaw() {
|
||||
return {
|
||||
chunk: this.chunk.toRaw(),
|
||||
}
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
if (!raw) return null
|
||||
|
||||
return new ChunkResponse(Chunk.fromRaw(raw.chunk))
|
||||
}
|
||||
|
||||
getChunk() {
|
||||
return this.chunk
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ChunkResponse
|
||||
206
libraries/overleaf-editor-core/lib/comment.js
Normal file
206
libraries/overleaf-editor-core/lib/comment.js
Normal file
@@ -0,0 +1,206 @@
|
||||
// @ts-check
|
||||
const { RetainOp, InsertOp, RemoveOp } = require('./operation/scan_op')
|
||||
const Range = require('./range')
|
||||
|
||||
/**
|
||||
* @import { CommentRawData } from "./types"
|
||||
* @import TextOperation from "./operation/text_operation"
|
||||
*/
|
||||
|
||||
class Comment {
|
||||
/**
|
||||
* @readonly
|
||||
* @type {ReadonlyArray<Range>}
|
||||
*/
|
||||
ranges = []
|
||||
|
||||
/**
|
||||
* @readonly
|
||||
* @type {boolean}
|
||||
*/
|
||||
resolved = false
|
||||
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {ReadonlyArray<Range>} ranges
|
||||
* @param {boolean} [resolved]
|
||||
*/
|
||||
constructor(id, ranges, resolved = false) {
|
||||
this.id = id
|
||||
this.resolved = resolved
|
||||
this.ranges = this.mergeRanges(ranges)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
* @param {boolean} [extendComment]
|
||||
* @returns {Comment}
|
||||
*/
|
||||
applyInsert(cursor, length, extendComment = false) {
|
||||
let existingRangeExtended = false
|
||||
const newRanges = []
|
||||
|
||||
for (const commentRange of this.ranges) {
|
||||
if (cursor === commentRange.end) {
|
||||
// insert right after the comment
|
||||
if (extendComment) {
|
||||
newRanges.push(commentRange.extendBy(length))
|
||||
existingRangeExtended = true
|
||||
} else {
|
||||
newRanges.push(commentRange)
|
||||
}
|
||||
} else if (cursor === commentRange.start) {
|
||||
// insert at the start of the comment
|
||||
if (extendComment) {
|
||||
newRanges.push(commentRange.extendBy(length))
|
||||
existingRangeExtended = true
|
||||
} else {
|
||||
newRanges.push(commentRange.moveBy(length))
|
||||
}
|
||||
} else if (commentRange.startIsAfter(cursor)) {
|
||||
// insert before the comment
|
||||
newRanges.push(commentRange.moveBy(length))
|
||||
} else if (commentRange.containsCursor(cursor)) {
|
||||
// insert is inside the comment
|
||||
if (extendComment) {
|
||||
newRanges.push(commentRange.extendBy(length))
|
||||
existingRangeExtended = true
|
||||
} else {
|
||||
const [rangeUpToCursor, , rangeAfterCursor] = commentRange.insertAt(
|
||||
cursor,
|
||||
length
|
||||
)
|
||||
|
||||
// use current commentRange for the part before the cursor
|
||||
newRanges.push(new Range(commentRange.pos, rangeUpToCursor.length))
|
||||
// add the part after the cursor as a new range
|
||||
newRanges.push(rangeAfterCursor)
|
||||
}
|
||||
} else {
|
||||
// insert is after the comment
|
||||
newRanges.push(commentRange)
|
||||
}
|
||||
}
|
||||
|
||||
// if the insert is not inside any range, add a new range
|
||||
if (extendComment && !existingRangeExtended) {
|
||||
newRanges.push(new Range(cursor, length))
|
||||
}
|
||||
|
||||
return new Comment(this.id, newRanges, this.resolved)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Range} deletedRange
|
||||
* @returns {Comment}
|
||||
*/
|
||||
applyDelete(deletedRange) {
|
||||
const newRanges = []
|
||||
|
||||
for (const commentRange of this.ranges) {
|
||||
if (commentRange.overlaps(deletedRange)) {
|
||||
newRanges.push(commentRange.subtract(deletedRange))
|
||||
} else if (commentRange.startsAfter(deletedRange)) {
|
||||
newRanges.push(commentRange.moveBy(-deletedRange.length))
|
||||
} else {
|
||||
newRanges.push(commentRange)
|
||||
}
|
||||
}
|
||||
|
||||
return new Comment(this.id, newRanges, this.resolved)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TextOperation} operation
|
||||
* @param {string} commentId
|
||||
* @returns {Comment}
|
||||
*/
|
||||
applyTextOperation(operation, commentId) {
|
||||
/** @type {Comment} */
|
||||
let comment = this
|
||||
let cursor = 0
|
||||
for (const op of operation.ops) {
|
||||
if (op instanceof RetainOp) {
|
||||
cursor += op.length
|
||||
} else if (op instanceof InsertOp) {
|
||||
comment = comment.applyInsert(
|
||||
cursor,
|
||||
op.insertion.length,
|
||||
op.commentIds?.includes(commentId)
|
||||
)
|
||||
cursor += op.insertion.length
|
||||
} else if (op instanceof RemoveOp) {
|
||||
comment = comment.applyDelete(new Range(cursor, op.length))
|
||||
}
|
||||
}
|
||||
return comment
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
return this.ranges.length === 0
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {CommentRawData}
|
||||
*/
|
||||
toRaw() {
|
||||
/** @type CommentRawData */
|
||||
const raw = {
|
||||
id: this.id,
|
||||
ranges: this.ranges.map(range => range.toRaw()),
|
||||
}
|
||||
if (this.resolved) {
|
||||
raw.resolved = true
|
||||
}
|
||||
return raw
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ReadonlyArray<Range>} ranges
|
||||
* @returns {ReadonlyArray<Range>}
|
||||
*/
|
||||
mergeRanges(ranges) {
|
||||
/** @type {Range[]} */
|
||||
const mergedRanges = []
|
||||
|
||||
const sortedRanges = [...ranges].sort((a, b) => a.start - b.start)
|
||||
for (const range of sortedRanges) {
|
||||
if (range.isEmpty()) {
|
||||
continue
|
||||
}
|
||||
const lastMerged = mergedRanges[mergedRanges.length - 1]
|
||||
if (lastMerged?.overlaps(range)) {
|
||||
throw new Error('Ranges cannot overlap')
|
||||
}
|
||||
if (range.isEmpty()) {
|
||||
throw new Error('Comment range cannot be empty')
|
||||
}
|
||||
if (lastMerged?.canMerge(range)) {
|
||||
mergedRanges[mergedRanges.length - 1] = lastMerged.merge(range)
|
||||
} else {
|
||||
mergedRanges.push(range)
|
||||
}
|
||||
}
|
||||
|
||||
return mergedRanges
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {CommentRawData} rawComment
|
||||
* @returns {Comment}
|
||||
*/
|
||||
static fromRaw(rawComment) {
|
||||
return new Comment(
|
||||
rawComment.id,
|
||||
rawComment.ranges.map(range => Range.fromRaw(range)),
|
||||
rawComment.resolved
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Comment
|
||||
34
libraries/overleaf-editor-core/lib/errors.js
Normal file
34
libraries/overleaf-editor-core/lib/errors.js
Normal file
@@ -0,0 +1,34 @@
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
class UnprocessableError extends OError {}
|
||||
|
||||
class ApplyError extends UnprocessableError {
|
||||
constructor(message, operation, operand) {
|
||||
super(message)
|
||||
this.operation = operation
|
||||
this.operand = operand
|
||||
}
|
||||
}
|
||||
|
||||
class InvalidInsertionError extends UnprocessableError {
|
||||
constructor(str, operation) {
|
||||
super('inserted text contains non BMP characters')
|
||||
this.str = str
|
||||
this.operation = operation
|
||||
}
|
||||
}
|
||||
|
||||
class TooLongError extends UnprocessableError {
|
||||
constructor(operation, resultLength) {
|
||||
super('resulting string would be too long', { resultLength })
|
||||
this.operation = operation
|
||||
this.resultLength = resultLength
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
UnprocessableError,
|
||||
ApplyError,
|
||||
InvalidInsertionError,
|
||||
TooLongError,
|
||||
}
|
||||
280
libraries/overleaf-editor-core/lib/file.js
Normal file
280
libraries/overleaf-editor-core/lib/file.js
Normal file
@@ -0,0 +1,280 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const OError = require('@overleaf/o-error')
|
||||
const FileData = require('./file_data')
|
||||
const HashFileData = require('./file_data/hash_file_data')
|
||||
const StringFileData = require('./file_data/string_file_data')
|
||||
|
||||
/**
|
||||
* @import Blob from "./blob"
|
||||
* @import { BlobStore, ReadonlyBlobStore, RawFileData, RawFile } from "./types"
|
||||
* @import { StringFileRawData, CommentRawData } from "./types"
|
||||
* @import CommentList from "./file_data/comment_list"
|
||||
* @import TextOperation from "./operation/text_operation"
|
||||
* @import TrackedChangeList from "./file_data/tracked_change_list"
|
||||
*
|
||||
* @typedef {{filterTrackedDeletes?: boolean}} FileGetContentOptions
|
||||
*/
|
||||
|
||||
class NotEditableError extends OError {
|
||||
constructor() {
|
||||
super('File is not editable')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A file in a {@link Snapshot}. A file has both data and metadata. There
|
||||
* are several classes of data that represent the various types of file
|
||||
* data that are supported, namely text and binary, and also the various
|
||||
* states that a file's data can be in, namely:
|
||||
*
|
||||
* 1. Hash only: all we know is the file's hash; this is how we encode file
|
||||
* content in long term storage.
|
||||
* 2. Lazily loaded: the hash of the file, its length, and its type are known,
|
||||
* but its content is not loaded. Operations are cached for application
|
||||
* later.
|
||||
* 3. Eagerly loaded: the content of a text file is fully loaded into memory
|
||||
* as a string.
|
||||
* 4. Hollow: only the byte and/or UTF-8 length of the file are known; this is
|
||||
* used to allow for validation of operations when editing collaboratively
|
||||
* without having to keep file data in memory on the server.
|
||||
*/
|
||||
class File {
|
||||
/**
|
||||
* Blob hash for an empty file.
|
||||
*
|
||||
* @type {String}
|
||||
*/
|
||||
static EMPTY_FILE_HASH = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'
|
||||
|
||||
static NotEditableError = NotEditableError
|
||||
|
||||
/**
|
||||
* @param {FileData} data
|
||||
* @param {Object} [metadata]
|
||||
*/
|
||||
constructor(data, metadata) {
|
||||
assert.instance(data, FileData, 'File: bad data')
|
||||
|
||||
this.data = data
|
||||
this.metadata = {}
|
||||
this.setMetadata(metadata || {})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawFile} raw
|
||||
* @return {File|null}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
if (!raw) return null
|
||||
return new File(FileData.fromRaw(raw), raw.metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} hash
|
||||
* @param {string} [rangesHash]
|
||||
* @param {Object} [metadata]
|
||||
* @return {File}
|
||||
*/
|
||||
static fromHash(hash, rangesHash, metadata) {
|
||||
return new File(new HashFileData(hash, rangesHash), metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} string
|
||||
* @param {Object} [metadata]
|
||||
* @return {File}
|
||||
*/
|
||||
static fromString(string, metadata) {
|
||||
return new File(new StringFileData(string), metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} byteLength
|
||||
* @param {number} [stringLength]
|
||||
* @param {Object} [metadata]
|
||||
* @return {File}
|
||||
*/
|
||||
static createHollow(byteLength, stringLength, metadata) {
|
||||
return new File(FileData.createHollow(byteLength, stringLength), metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Blob} blob
|
||||
* @param {Blob} [rangesBlob]
|
||||
* @param {Object} [metadata]
|
||||
* @return {File}
|
||||
*/
|
||||
static createLazyFromBlobs(blob, rangesBlob, metadata) {
|
||||
return new File(FileData.createLazyFromBlobs(blob, rangesBlob), metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {RawFile}
|
||||
*/
|
||||
toRaw() {
|
||||
/** @type RawFile */
|
||||
const rawFileData = this.data.toRaw()
|
||||
storeRawMetadata(this.metadata, rawFileData)
|
||||
return rawFileData
|
||||
}
|
||||
|
||||
/**
|
||||
* Hexadecimal SHA-1 hash of the file's content, if known.
|
||||
*
|
||||
* @return {string | null | undefined}
|
||||
*/
|
||||
getHash() {
|
||||
return this.data.getHash()
|
||||
}
|
||||
|
||||
/**
|
||||
* Hexadecimal SHA-1 hash of the ranges content (comments + tracked changes),
|
||||
* if known.
|
||||
*
|
||||
* @return {string | null | undefined}
|
||||
*/
|
||||
getRangesHash() {
|
||||
return this.data.getRangesHash()
|
||||
}
|
||||
|
||||
/**
|
||||
* The content of the file, if it is known and if this file has UTF-8 encoded
|
||||
* content.
|
||||
*
|
||||
* @param {FileGetContentOptions} [opts]
|
||||
* @return {string | null | undefined}
|
||||
*/
|
||||
getContent(opts = {}) {
|
||||
return this.data.getContent(opts)
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this file has string content and is small enough to be edited using
|
||||
* {@link TextOperation}s.
|
||||
*
|
||||
* @return {boolean | null | undefined} null if it is not currently known
|
||||
*/
|
||||
isEditable() {
|
||||
return this.data.isEditable()
|
||||
}
|
||||
|
||||
/**
|
||||
* The length of the file's content in bytes, if known.
|
||||
*
|
||||
* @return {number | null | undefined}
|
||||
*/
|
||||
getByteLength() {
|
||||
return this.data.getByteLength()
|
||||
}
|
||||
|
||||
/**
|
||||
* The length of the file's content in characters, if known.
|
||||
*
|
||||
* @return {number | null | undefined}
|
||||
*/
|
||||
getStringLength() {
|
||||
return this.data.getStringLength()
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the metadata object for this file.
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
getMetadata() {
|
||||
return this.metadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the metadata object for this file.
|
||||
*
|
||||
* @param {Object} metadata
|
||||
*/
|
||||
setMetadata(metadata) {
|
||||
assert.object(metadata, 'File: bad metadata')
|
||||
this.metadata = metadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Edit this file, if possible.
|
||||
*
|
||||
* @param {TextOperation} textOperation
|
||||
*/
|
||||
edit(textOperation) {
|
||||
if (!this.data.isEditable()) throw new File.NotEditableError()
|
||||
this.data.edit(textOperation)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the comments for this file.
|
||||
*
|
||||
* @return {CommentList}
|
||||
*/
|
||||
getComments() {
|
||||
return this.data.getComments()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the tracked changes for this file.
|
||||
* @return {TrackedChangeList}
|
||||
*/
|
||||
getTrackedChanges() {
|
||||
return this.data.getTrackedChanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a file.
|
||||
*
|
||||
* @return {File} a new object of the same type
|
||||
*/
|
||||
clone() {
|
||||
return /** @type {File} */ (File.fromRaw(this.toRaw()))
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert this file's data to the given kind. This may require us to load file
|
||||
* size or content from the given blob store, so this is an asynchronous
|
||||
* operation.
|
||||
*
|
||||
* @param {string} kind
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @return {Promise.<File>} for this
|
||||
*/
|
||||
async load(kind, blobStore) {
|
||||
const data = await this.data.load(kind, blobStore)
|
||||
this.data = data
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Store the file's content in the blob store and return a raw file with
|
||||
* the corresponding hash. As a side effect, make this object consistent with
|
||||
* the hash.
|
||||
*
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<RawFile>} a raw HashFile
|
||||
*/
|
||||
async store(blobStore) {
|
||||
/** @type RawFile */
|
||||
const raw = await this.data.store(blobStore)
|
||||
storeRawMetadata(this.metadata, raw)
|
||||
return raw
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Object} metadata
|
||||
* @param {RawFile} raw
|
||||
*/
|
||||
function storeRawMetadata(metadata, raw) {
|
||||
if (!_.isEmpty(metadata)) {
|
||||
raw.metadata = _.cloneDeep(metadata)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = File
|
||||
@@ -0,0 +1,82 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Blob = require('../blob')
|
||||
const FileData = require('./')
|
||||
|
||||
/**
|
||||
* @import { RawBinaryFileData } from '../types'
|
||||
*/
|
||||
|
||||
class BinaryFileData extends FileData {
|
||||
/**
|
||||
* @param {string} hash
|
||||
* @param {number} byteLength
|
||||
* @see FileData
|
||||
*/
|
||||
constructor(hash, byteLength) {
|
||||
super()
|
||||
assert.match(hash, Blob.HEX_HASH_RX, 'BinaryFileData: bad hash')
|
||||
assert.integer(byteLength, 'BinaryFileData: bad byteLength')
|
||||
assert.greaterOrEqual(byteLength, 0, 'BinaryFileData: low byteLength')
|
||||
|
||||
this.hash = hash
|
||||
this.byteLength = byteLength
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawBinaryFileData} raw
|
||||
* @returns {BinaryFileData}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new BinaryFileData(raw.hash, raw.byteLength)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {RawBinaryFileData}
|
||||
*/
|
||||
toRaw() {
|
||||
return { hash: this.hash, byteLength: this.byteLength }
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getHash() {
|
||||
return this.hash
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
isEditable() {
|
||||
return false
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getByteLength() {
|
||||
return this.byteLength
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async toEager() {
|
||||
return this
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async toLazy() {
|
||||
return this
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async toHollow() {
|
||||
return FileData.createHollow(this.byteLength, null)
|
||||
}
|
||||
|
||||
/** @inheritdoc
|
||||
* @return {Promise<RawFileData>}
|
||||
*/
|
||||
async store() {
|
||||
return { hash: this.hash }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BinaryFileData
|
||||
@@ -0,0 +1,28 @@
|
||||
// @ts-check
|
||||
|
||||
/**
|
||||
* @import { ClearTrackingPropsRawData } from '../types'
|
||||
*/
|
||||
|
||||
class ClearTrackingProps {
|
||||
constructor() {
|
||||
this.type = 'none'
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} other
|
||||
* @returns {boolean}
|
||||
*/
|
||||
equals(other) {
|
||||
return other instanceof ClearTrackingProps
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {ClearTrackingPropsRawData}
|
||||
*/
|
||||
toRaw() {
|
||||
return { type: 'none' }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ClearTrackingProps
|
||||
124
libraries/overleaf-editor-core/lib/file_data/comment_list.js
Normal file
124
libraries/overleaf-editor-core/lib/file_data/comment_list.js
Normal file
@@ -0,0 +1,124 @@
|
||||
// @ts-check
|
||||
const Comment = require('../comment')
|
||||
|
||||
/**
|
||||
* @import { CommentRawData } from "../types"
|
||||
* @import Range from "../range"
|
||||
*/
|
||||
|
||||
class CommentList {
|
||||
/**
|
||||
* @param {Comment[]} comments
|
||||
*/
|
||||
constructor(comments) {
|
||||
this.comments = new Map(comments.map(comment => [comment.id, comment]))
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {IterableIterator<Comment>}
|
||||
*/
|
||||
[Symbol.iterator]() {
|
||||
return this.comments.values()
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the contents of this list in an array
|
||||
*
|
||||
* @returns {Comment[]}
|
||||
*/
|
||||
toArray() {
|
||||
return Array.from(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the length of the comment list
|
||||
*
|
||||
* @returns {number}
|
||||
*/
|
||||
get length() {
|
||||
return this.comments.size
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the raw version of the comment list
|
||||
*
|
||||
* @returns {CommentRawData[]}
|
||||
*/
|
||||
toRaw() {
|
||||
const raw = []
|
||||
for (const comment of this.comments.values()) {
|
||||
raw.push(comment.toRaw())
|
||||
}
|
||||
return raw
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} id
|
||||
* @returns {Comment | undefined}
|
||||
*/
|
||||
getComment(id) {
|
||||
return this.comments.get(id)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Comment} newComment
|
||||
*/
|
||||
add(newComment) {
|
||||
this.comments.set(newComment.id, newComment)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} id
|
||||
*/
|
||||
delete(id) {
|
||||
return this.comments.delete(id)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {CommentRawData[]} rawComments
|
||||
*/
|
||||
static fromRaw(rawComments) {
|
||||
return new CommentList(rawComments.map(Comment.fromRaw))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Range} range
|
||||
* @param {{ commentIds?: string[] }} opts
|
||||
*/
|
||||
applyInsert(range, opts = { commentIds: [] }) {
|
||||
if (!opts.commentIds) {
|
||||
opts.commentIds = []
|
||||
}
|
||||
for (const [commentId, comment] of this.comments) {
|
||||
const commentAfterInsert = comment.applyInsert(
|
||||
range.pos,
|
||||
range.length,
|
||||
opts.commentIds.includes(commentId)
|
||||
)
|
||||
this.comments.set(commentId, commentAfterInsert)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Range} range
|
||||
*/
|
||||
applyDelete(range) {
|
||||
for (const [commentId, comment] of this.comments) {
|
||||
const commentAfterDelete = comment.applyDelete(range)
|
||||
this.comments.set(commentId, commentAfterDelete)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Range} range
|
||||
* @returns {string[]}
|
||||
*/
|
||||
idsCoveringRange(range) {
|
||||
return Array.from(this.comments.entries())
|
||||
.filter(([, comment]) => comment.ranges.some(r => r.contains(range)))
|
||||
.map(([id]) => id)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CommentList
|
||||
134
libraries/overleaf-editor-core/lib/file_data/hash_file_data.js
Normal file
134
libraries/overleaf-editor-core/lib/file_data/hash_file_data.js
Normal file
@@ -0,0 +1,134 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Blob = require('../blob')
|
||||
const FileData = require('./')
|
||||
/**
|
||||
* @import StringFileData from './string_file_data'
|
||||
* @import LazyStringFileData from './lazy_string_file_data'
|
||||
* @import HollowStringFileData from './hollow_string_file_data'
|
||||
* @import { BlobStore, RawHashFileData } from '../types'
|
||||
*/
|
||||
|
||||
class HashFileData extends FileData {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {string} hash
|
||||
* @param {string} [rangesHash]
|
||||
* @see FileData
|
||||
*/
|
||||
constructor(hash, rangesHash) {
|
||||
super()
|
||||
assert.match(hash, Blob.HEX_HASH_RX, 'HashFileData: bad hash')
|
||||
if (rangesHash) {
|
||||
assert.match(
|
||||
rangesHash,
|
||||
Blob.HEX_HASH_RX,
|
||||
'HashFileData: bad ranges hash'
|
||||
)
|
||||
}
|
||||
this.hash = hash
|
||||
this.rangesHash = rangesHash
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {RawHashFileData} raw
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new HashFileData(raw.hash, raw.rangesHash)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {RawHashFileData}
|
||||
*/
|
||||
toRaw() {
|
||||
/** @type RawHashFileData */
|
||||
const raw = { hash: this.hash }
|
||||
if (this.rangesHash) {
|
||||
raw.rangesHash = this.rangesHash
|
||||
}
|
||||
return raw
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {string}
|
||||
*/
|
||||
getHash() {
|
||||
return this.hash
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
getRangesHash() {
|
||||
return this.rangesHash
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {BlobStore} blobStore
|
||||
* @returns {Promise<StringFileData>}
|
||||
*/
|
||||
async toEager(blobStore) {
|
||||
const lazyFileData = await this.toLazy(blobStore)
|
||||
return await lazyFileData.toEager(blobStore)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {BlobStore} blobStore
|
||||
* @returns {Promise<LazyStringFileData>}
|
||||
*/
|
||||
async toLazy(blobStore) {
|
||||
const [blob, rangesBlob] = await Promise.all([
|
||||
blobStore.getBlob(this.hash),
|
||||
this.rangesHash
|
||||
? blobStore.getBlob(this.rangesHash)
|
||||
: Promise.resolve(undefined),
|
||||
])
|
||||
if (rangesBlob === null) {
|
||||
// We attempted to look up the blob, but none was found
|
||||
throw new Error('Failed to look up rangesHash in blobStore')
|
||||
}
|
||||
if (!blob) throw new Error('blob not found: ' + this.hash)
|
||||
// TODO(das7pad): inline 2nd path of FileData.createLazyFromBlobs?
|
||||
// @ts-ignore
|
||||
return FileData.createLazyFromBlobs(blob, rangesBlob)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {BlobStore} blobStore
|
||||
* @returns {Promise<HollowStringFileData>}
|
||||
*/
|
||||
async toHollow(blobStore) {
|
||||
const blob = await blobStore.getBlob(this.hash)
|
||||
if (!blob) {
|
||||
throw new Error('Failed to look up hash in blobStore')
|
||||
}
|
||||
// TODO(das7pad): inline 2nd path of FileData.createHollow?
|
||||
// @ts-ignore
|
||||
return FileData.createHollow(blob.getByteLength(), blob.getStringLength())
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {Promise<RawHashFileData>}
|
||||
*/
|
||||
async store() {
|
||||
/** @type RawHashFileData */
|
||||
const raw = { hash: this.hash }
|
||||
if (this.rangesHash) {
|
||||
raw.rangesHash = this.rangesHash
|
||||
}
|
||||
return raw
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = HashFileData
|
||||
@@ -0,0 +1,55 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const FileData = require('./')
|
||||
|
||||
/**
|
||||
* @import { RawHollowBinaryFileData } from '../types'
|
||||
*/
|
||||
|
||||
class HollowBinaryFileData extends FileData {
|
||||
/**
|
||||
* @param {number} byteLength
|
||||
* @see FileData
|
||||
*/
|
||||
constructor(byteLength) {
|
||||
super()
|
||||
assert.integer(byteLength, 'HollowBinaryFileData: bad byteLength')
|
||||
assert.greaterOrEqual(byteLength, 0, 'HollowBinaryFileData: low byteLength')
|
||||
this.byteLength = byteLength
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawHollowBinaryFileData} raw
|
||||
* @returns {HollowBinaryFileData}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new HollowBinaryFileData(raw.byteLength)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {RawHollowBinaryFileData}
|
||||
*/
|
||||
toRaw() {
|
||||
return { byteLength: this.byteLength }
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getByteLength() {
|
||||
return this.byteLength
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
isEditable() {
|
||||
return false
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async toHollow() {
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = HollowBinaryFileData
|
||||
@@ -0,0 +1,69 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const FileData = require('./')
|
||||
|
||||
/**
|
||||
* @import { RawHollowStringFileData } from '../types'
|
||||
* @import EditOperation from '../operation/edit_operation'
|
||||
*/
|
||||
|
||||
class HollowStringFileData extends FileData {
|
||||
/**
|
||||
* @param {number} stringLength
|
||||
* @see FileData
|
||||
*/
|
||||
constructor(stringLength) {
|
||||
super()
|
||||
assert.integer(stringLength, 'HollowStringFileData: bad stringLength')
|
||||
assert.greaterOrEqual(
|
||||
stringLength,
|
||||
0,
|
||||
'HollowStringFileData: low stringLength'
|
||||
)
|
||||
this.stringLength = stringLength
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawHollowStringFileData} raw
|
||||
* @returns {HollowStringFileData}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new HollowStringFileData(raw.stringLength)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {RawHollowStringFileData}
|
||||
*/
|
||||
toRaw() {
|
||||
return { stringLength: this.stringLength }
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getStringLength() {
|
||||
return this.stringLength
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
isEditable() {
|
||||
return true
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async toHollow() {
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} operation
|
||||
*/
|
||||
edit(operation) {
|
||||
this.stringLength = operation.applyToLength(this.stringLength)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = HollowStringFileData
|
||||
229
libraries/overleaf-editor-core/lib/file_data/index.js
Normal file
229
libraries/overleaf-editor-core/lib/file_data/index.js
Normal file
@@ -0,0 +1,229 @@
|
||||
// @ts-check
|
||||
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Blob = require('../blob')
|
||||
|
||||
/**
|
||||
* @import { BlobStore, ReadonlyBlobStore, RawFileData, CommentRawData } from "../types"
|
||||
* @import EditOperation from "../operation/edit_operation"
|
||||
* @import CommentList from "../file_data/comment_list"
|
||||
* @import TrackedChangeList from "../file_data/tracked_change_list"
|
||||
*/
|
||||
|
||||
/**
|
||||
* Helper to represent the content of a file. This class and its subclasses
|
||||
* should be used only through {@link File}.
|
||||
*/
|
||||
class FileData {
|
||||
/** @see File.fromRaw
|
||||
* @param {RawFileData} raw
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
// TODO(das7pad): can we teach typescript to understand our polymorphism?
|
||||
if (Object.prototype.hasOwnProperty.call(raw, 'hash')) {
|
||||
if (Object.prototype.hasOwnProperty.call(raw, 'byteLength'))
|
||||
// @ts-ignore
|
||||
return BinaryFileData.fromRaw(raw)
|
||||
if (Object.prototype.hasOwnProperty.call(raw, 'stringLength'))
|
||||
// @ts-ignore
|
||||
return LazyStringFileData.fromRaw(raw)
|
||||
// @ts-ignore
|
||||
return HashFileData.fromRaw(raw)
|
||||
}
|
||||
if (Object.prototype.hasOwnProperty.call(raw, 'byteLength'))
|
||||
// @ts-ignore
|
||||
return HollowBinaryFileData.fromRaw(raw)
|
||||
if (Object.prototype.hasOwnProperty.call(raw, 'stringLength'))
|
||||
// @ts-ignore
|
||||
return HollowStringFileData.fromRaw(raw)
|
||||
if (Object.prototype.hasOwnProperty.call(raw, 'content'))
|
||||
// @ts-ignore
|
||||
return StringFileData.fromRaw(raw)
|
||||
throw new Error('FileData: bad raw object ' + JSON.stringify(raw))
|
||||
}
|
||||
|
||||
/** @see File.createHollow
|
||||
* @param {number} byteLength
|
||||
* @param {number} [stringLength]
|
||||
*/
|
||||
static createHollow(byteLength, stringLength) {
|
||||
if (stringLength == null) {
|
||||
return new HollowBinaryFileData(byteLength)
|
||||
}
|
||||
return new HollowStringFileData(stringLength)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File.createLazyFromBlob
|
||||
* @param {Blob} blob
|
||||
* @param {Blob} [rangesBlob]
|
||||
*/
|
||||
static createLazyFromBlobs(blob, rangesBlob) {
|
||||
assert.instance(blob, Blob, 'FileData: bad blob')
|
||||
const stringLength = blob.getStringLength()
|
||||
if (stringLength == null) {
|
||||
return new BinaryFileData(blob.getHash(), blob.getByteLength())
|
||||
}
|
||||
return new LazyStringFileData(
|
||||
blob.getHash(),
|
||||
rangesBlob?.getHash(),
|
||||
stringLength
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {RawFileData}
|
||||
*/
|
||||
toRaw() {
|
||||
throw new Error('FileData: toRaw not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#getHash
|
||||
* @return {string | null | undefined}
|
||||
*/
|
||||
|
||||
getHash() {
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#getHash
|
||||
* @return {string | null | undefined}
|
||||
*/
|
||||
getRangesHash() {
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#getContent
|
||||
* @param {import('../file').FileGetContentOptions} [opts]
|
||||
* @return {string | null | undefined}
|
||||
*/
|
||||
getContent(opts = {}) {
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#isEditable
|
||||
* @return {boolean | null | undefined} null if it is not currently known
|
||||
*/
|
||||
isEditable() {
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#getByteLength
|
||||
* @return {number | null | undefined}
|
||||
*/
|
||||
getByteLength() {
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#getStringLength
|
||||
* @return {number | null | undefined}
|
||||
*/
|
||||
getStringLength() {
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#edit
|
||||
* @param {EditOperation} editOperation
|
||||
*/
|
||||
edit(editOperation) {
|
||||
throw new Error('edit not implemented for ' + JSON.stringify(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* @function
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @return {Promise<FileData>}
|
||||
* @abstract
|
||||
* @see FileData#load
|
||||
*/
|
||||
async toEager(blobStore) {
|
||||
throw new Error('toEager not implemented for ' + JSON.stringify(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* @function
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @return {Promise<FileData>}
|
||||
* @abstract
|
||||
* @see FileData#load
|
||||
*/
|
||||
async toLazy(blobStore) {
|
||||
throw new Error('toLazy not implemented for ' + JSON.stringify(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* @function
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @return {Promise<FileData>}
|
||||
* @abstract
|
||||
* @see FileData#load
|
||||
*/
|
||||
async toHollow(blobStore) {
|
||||
throw new Error('toHollow not implemented for ' + JSON.stringify(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#load
|
||||
* @param {string} kind
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @return {Promise<FileData>}
|
||||
*/
|
||||
async load(kind, blobStore) {
|
||||
if (kind === 'eager') return await this.toEager(blobStore)
|
||||
if (kind === 'lazy') return await this.toLazy(blobStore)
|
||||
if (kind === 'hollow') return await this.toHollow(blobStore)
|
||||
throw new Error('bad file data load kind: ' + kind)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#store
|
||||
* @function
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<RawFileData>} a raw HashFile
|
||||
* @abstract
|
||||
*/
|
||||
async store(blobStore) {
|
||||
throw new Error('store not implemented for ' + JSON.stringify(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#getComments
|
||||
* @function
|
||||
* @return {CommentList}
|
||||
* @abstract
|
||||
*/
|
||||
getComments() {
|
||||
throw new Error('getComments not implemented for ' + JSON.stringify(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see File#getTrackedChanges
|
||||
* @function
|
||||
* @return {TrackedChangeList}
|
||||
* @abstract
|
||||
*/
|
||||
getTrackedChanges() {
|
||||
throw new Error(
|
||||
'getTrackedChanges not implemented for ' + JSON.stringify(this)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileData
|
||||
|
||||
const BinaryFileData = require('./binary_file_data')
|
||||
const HashFileData = require('./hash_file_data')
|
||||
const HollowBinaryFileData = require('./hollow_binary_file_data')
|
||||
const HollowStringFileData = require('./hollow_string_file_data')
|
||||
const LazyStringFileData = require('./lazy_string_file_data')
|
||||
const StringFileData = require('./string_file_data')
|
||||
@@ -0,0 +1,190 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Blob = require('../blob')
|
||||
const FileData = require('./')
|
||||
const EagerStringFileData = require('./string_file_data')
|
||||
const EditOperation = require('../operation/edit_operation')
|
||||
const EditOperationBuilder = require('../operation/edit_operation_builder')
|
||||
|
||||
/**
|
||||
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types'
|
||||
*/
|
||||
|
||||
class LazyStringFileData extends FileData {
|
||||
/**
|
||||
* @param {string} hash
|
||||
* @param {string | undefined} rangesHash
|
||||
* @param {number} stringLength
|
||||
* @param {Array.<EditOperation>} [operations]
|
||||
* @see FileData
|
||||
*/
|
||||
constructor(hash, rangesHash, stringLength, operations) {
|
||||
super()
|
||||
assert.match(hash, Blob.HEX_HASH_RX)
|
||||
if (rangesHash) {
|
||||
assert.match(rangesHash, Blob.HEX_HASH_RX)
|
||||
}
|
||||
assert.greaterOrEqual(stringLength, 0)
|
||||
assert.maybe.array.of.instance(operations, EditOperation)
|
||||
|
||||
this.hash = hash
|
||||
this.rangesHash = rangesHash
|
||||
this.stringLength = stringLength
|
||||
this.operations = operations || []
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawLazyStringFileData} raw
|
||||
* @returns {LazyStringFileData}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new LazyStringFileData(
|
||||
raw.hash,
|
||||
raw.rangesHash,
|
||||
raw.stringLength,
|
||||
raw.operations && _.map(raw.operations, EditOperationBuilder.fromJSON)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {RawLazyStringFileData}
|
||||
*/
|
||||
toRaw() {
|
||||
/** @type RawLazyStringFileData */
|
||||
const raw = {
|
||||
hash: this.hash,
|
||||
stringLength: this.stringLength,
|
||||
}
|
||||
if (this.rangesHash) {
|
||||
raw.rangesHash = this.rangesHash
|
||||
}
|
||||
if (this.operations.length) {
|
||||
raw.operations = _.map(this.operations, function (operation) {
|
||||
return operation.toJSON()
|
||||
})
|
||||
}
|
||||
return raw
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getHash() {
|
||||
if (this.operations.length) return null
|
||||
return this.hash
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getRangesHash() {
|
||||
if (this.operations.length) return null
|
||||
return this.rangesHash
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
isEditable() {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* For project quota checking, we approximate the byte length by the UTF-8
|
||||
* length for hollow files. This isn't strictly speaking correct; it is an
|
||||
* underestimate of byte length.
|
||||
*
|
||||
* @return {number}
|
||||
*/
|
||||
getByteLength() {
|
||||
return this.stringLength
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getStringLength() {
|
||||
return this.stringLength
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the cached text operations that are to be applied to this file to get
|
||||
* from the content with its last known hash to its latest content.
|
||||
*
|
||||
* @return {Array.<EditOperation>}
|
||||
*/
|
||||
getOperations() {
|
||||
return this.operations
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @returns {Promise<EagerStringFileData>}
|
||||
*/
|
||||
async toEager(blobStore) {
|
||||
const [content, ranges] = await Promise.all([
|
||||
blobStore.getString(this.hash),
|
||||
this.rangesHash
|
||||
? /** @type {Promise<RangesBlob>} */ (
|
||||
blobStore.getObject(this.rangesHash)
|
||||
)
|
||||
: Promise.resolve(undefined),
|
||||
])
|
||||
const file = new EagerStringFileData(
|
||||
content,
|
||||
ranges?.comments,
|
||||
ranges?.trackedChanges
|
||||
)
|
||||
applyOperations(this.operations, file)
|
||||
return file
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async toLazy() {
|
||||
return this
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async toHollow() {
|
||||
// TODO(das7pad): inline 2nd path of FileData.createLazyFromBlobs?
|
||||
// @ts-ignore
|
||||
return FileData.createHollow(null, this.stringLength)
|
||||
}
|
||||
|
||||
/** @inheritdoc
|
||||
* @param {EditOperation} operation
|
||||
*/
|
||||
edit(operation) {
|
||||
this.stringLength = operation.applyToLength(this.stringLength)
|
||||
this.operations.push(operation)
|
||||
}
|
||||
|
||||
/** @inheritdoc
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<RawFileData>}
|
||||
*/
|
||||
async store(blobStore) {
|
||||
if (this.operations.length === 0) {
|
||||
/** @type RawFileData */
|
||||
const raw = { hash: this.hash }
|
||||
if (this.rangesHash) {
|
||||
raw.rangesHash = this.rangesHash
|
||||
}
|
||||
return raw
|
||||
}
|
||||
const eager = await this.toEager(blobStore)
|
||||
this.operations.length = 0
|
||||
/** @type RawFileData */
|
||||
return await eager.store(blobStore)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {EditOperation[]} operations
|
||||
* @param {EagerStringFileData} file
|
||||
* @returns {void}
|
||||
*/
|
||||
function applyOperations(operations, file) {
|
||||
_.each(operations, operation => operation.apply(file))
|
||||
}
|
||||
|
||||
module.exports = LazyStringFileData
|
||||
151
libraries/overleaf-editor-core/lib/file_data/string_file_data.js
Normal file
151
libraries/overleaf-editor-core/lib/file_data/string_file_data.js
Normal file
@@ -0,0 +1,151 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const FileData = require('./')
|
||||
const CommentList = require('./comment_list')
|
||||
const TrackedChangeList = require('./tracked_change_list')
|
||||
|
||||
/**
|
||||
* @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types"
|
||||
* @import { TrackedChangeRawData, RangesBlob } from "../types"
|
||||
* @import EditOperation from "../operation/edit_operation"
|
||||
*/
|
||||
|
||||
class StringFileData extends FileData {
|
||||
/**
|
||||
* @param {string} content
|
||||
* @param {CommentRawData[]} [rawComments]
|
||||
* @param {TrackedChangeRawData[]} [rawTrackedChanges]
|
||||
*/
|
||||
constructor(content, rawComments = [], rawTrackedChanges = []) {
|
||||
super()
|
||||
assert.string(content)
|
||||
this.content = content
|
||||
this.comments = CommentList.fromRaw(rawComments)
|
||||
this.trackedChanges = TrackedChangeList.fromRaw(rawTrackedChanges)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StringFileRawData} raw
|
||||
* @returns {StringFileData}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new StringFileData(
|
||||
raw.content,
|
||||
raw.comments || [],
|
||||
raw.trackedChanges || []
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {StringFileRawData}
|
||||
*/
|
||||
toRaw() {
|
||||
/** @type StringFileRawData */
|
||||
const raw = { content: this.content }
|
||||
|
||||
if (this.comments.length) {
|
||||
raw.comments = this.comments.toRaw()
|
||||
}
|
||||
|
||||
if (this.trackedChanges.length) {
|
||||
raw.trackedChanges = this.trackedChanges.toRaw()
|
||||
}
|
||||
|
||||
return raw
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
isEditable() {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {import('../file').FileGetContentOptions} [opts]
|
||||
*/
|
||||
getContent(opts = {}) {
|
||||
let content = ''
|
||||
let cursor = 0
|
||||
if (opts.filterTrackedDeletes) {
|
||||
for (const tc of this.trackedChanges.asSorted()) {
|
||||
if (tc.tracking.type !== 'delete') {
|
||||
continue
|
||||
}
|
||||
if (cursor < tc.range.start) {
|
||||
content += this.content.slice(cursor, tc.range.start)
|
||||
}
|
||||
// skip the tracked change
|
||||
cursor = tc.range.end
|
||||
}
|
||||
}
|
||||
if (cursor < this.content.length) {
|
||||
content += this.content.slice(cursor)
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getByteLength() {
|
||||
return Buffer.byteLength(this.content)
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getStringLength() {
|
||||
return this.content.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} operation */
|
||||
edit(operation) {
|
||||
operation.apply(this)
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getComments() {
|
||||
return this.comments
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
getTrackedChanges() {
|
||||
return this.trackedChanges
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {Promise<StringFileData>}
|
||||
*/
|
||||
async toEager() {
|
||||
return this
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async toHollow() {
|
||||
return FileData.createHollow(this.getByteLength(), this.getStringLength())
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<RawFileData>}
|
||||
*/
|
||||
async store(blobStore) {
|
||||
const blob = await blobStore.putString(this.content)
|
||||
if (this.comments.comments.size || this.trackedChanges.length) {
|
||||
/** @type {RangesBlob} */
|
||||
const ranges = {
|
||||
comments: this.getComments().toRaw(),
|
||||
trackedChanges: this.trackedChanges.toRaw(),
|
||||
}
|
||||
const rangesBlob = await blobStore.putObject(ranges)
|
||||
return { hash: blob.getHash(), rangesHash: rangesBlob.getHash() }
|
||||
}
|
||||
return { hash: blob.getHash() }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = StringFileData
|
||||
@@ -0,0 +1,89 @@
|
||||
// @ts-check
|
||||
const Range = require('../range')
|
||||
const TrackingProps = require('./tracking_props')
|
||||
|
||||
/**
|
||||
* @import { TrackedChangeRawData } from "../types"
|
||||
*/
|
||||
|
||||
class TrackedChange {
|
||||
/**
|
||||
* @param {Range} range
|
||||
* @param {TrackingProps} tracking
|
||||
*/
|
||||
constructor(range, tracking) {
|
||||
/**
|
||||
* @readonly
|
||||
* @type {Range}
|
||||
*/
|
||||
this.range = range
|
||||
/**
|
||||
* @readonly
|
||||
* @type {TrackingProps}
|
||||
*/
|
||||
this.tracking = tracking
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TrackedChangeRawData} raw
|
||||
* @returns {TrackedChange}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new TrackedChange(
|
||||
Range.fromRaw(raw.range),
|
||||
TrackingProps.fromRaw(raw.tracking)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {TrackedChangeRawData}
|
||||
*/
|
||||
toRaw() {
|
||||
return {
|
||||
range: this.range.toRaw(),
|
||||
tracking: this.tracking.toRaw(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the tracked change can be merged with another
|
||||
* @param {TrackedChange} other
|
||||
* @returns {boolean}
|
||||
*/
|
||||
canMerge(other) {
|
||||
if (!(other instanceof TrackedChange)) {
|
||||
return false
|
||||
}
|
||||
return (
|
||||
this.tracking.type === other.tracking.type &&
|
||||
this.tracking.userId === other.tracking.userId &&
|
||||
this.range.touches(other.range) &&
|
||||
this.range.canMerge(other.range)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges another tracked change into this, updating the range and tracking
|
||||
* timestamp
|
||||
* @param {TrackedChange} other
|
||||
* @returns {TrackedChange}
|
||||
*/
|
||||
merge(other) {
|
||||
if (!this.canMerge(other)) {
|
||||
throw new Error('Cannot merge tracked changes')
|
||||
}
|
||||
return new TrackedChange(
|
||||
this.range.merge(other.range),
|
||||
new TrackingProps(
|
||||
this.tracking.type,
|
||||
this.tracking.userId,
|
||||
this.tracking.ts.getTime() > other.tracking.ts.getTime()
|
||||
? this.tracking.ts
|
||||
: other.tracking.ts
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TrackedChange
|
||||
@@ -0,0 +1,276 @@
|
||||
// @ts-check
|
||||
const Range = require('../range')
|
||||
const TrackedChange = require('./tracked_change')
|
||||
const TrackingProps = require('../file_data/tracking_props')
|
||||
|
||||
/**
|
||||
* @import { TrackingDirective, TrackedChangeRawData } from "../types"
|
||||
*/
|
||||
|
||||
class TrackedChangeList {
|
||||
/**
|
||||
*
|
||||
* @param {TrackedChange[]} trackedChanges
|
||||
*/
|
||||
constructor(trackedChanges) {
|
||||
/**
|
||||
* @type {TrackedChange[]}
|
||||
*/
|
||||
this._trackedChanges = trackedChanges
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TrackedChangeRawData[]} raw
|
||||
* @returns {TrackedChangeList}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new TrackedChangeList(raw.map(TrackedChange.fromRaw))
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the tracked changes to a raw object
|
||||
* @returns {TrackedChangeRawData[]}
|
||||
*/
|
||||
toRaw() {
|
||||
return this._trackedChanges.map(change => change.toRaw())
|
||||
}
|
||||
|
||||
get length() {
|
||||
return this._trackedChanges.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {readonly TrackedChange[]}
|
||||
*/
|
||||
asSorted() {
|
||||
// NOTE: Once all code dependent on this is typed, we can just return
|
||||
// _trackedChanges.
|
||||
return Array.from(this._trackedChanges)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the tracked changes that are fully included in the range
|
||||
* @param {Range} range
|
||||
* @returns {TrackedChange[]}
|
||||
*/
|
||||
inRange(range) {
|
||||
return this._trackedChanges.filter(change => range.contains(change.range))
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the tracking props for a given range.
|
||||
* @param {Range} range
|
||||
* @returns {TrackingProps | undefined}
|
||||
*/
|
||||
propsAtRange(range) {
|
||||
return this._trackedChanges.find(change => change.range.contains(range))
|
||||
?.tracking
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the tracked changes that are fully included in the range
|
||||
* @param {Range} range
|
||||
*/
|
||||
removeInRange(range) {
|
||||
this._trackedChanges = this._trackedChanges.filter(
|
||||
change => !range.contains(change.range)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a tracked change to the list
|
||||
* @param {TrackedChange} trackedChange
|
||||
*/
|
||||
add(trackedChange) {
|
||||
this._trackedChanges.push(trackedChange)
|
||||
this._mergeRanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* Collapses consecutive (and compatible) ranges
|
||||
* @returns {void}
|
||||
*/
|
||||
_mergeRanges() {
|
||||
if (this._trackedChanges.length < 2) {
|
||||
return
|
||||
}
|
||||
// ranges are non-overlapping so we can sort based on their first indices
|
||||
this._trackedChanges.sort((a, b) => a.range.start - b.range.start)
|
||||
const newTrackedChanges = [this._trackedChanges[0]]
|
||||
for (let i = 1; i < this._trackedChanges.length; i++) {
|
||||
const last = newTrackedChanges[newTrackedChanges.length - 1]
|
||||
const current = this._trackedChanges[i]
|
||||
if (last.range.overlaps(current.range)) {
|
||||
throw new Error('Ranges cannot overlap')
|
||||
}
|
||||
if (current.range.isEmpty()) {
|
||||
throw new Error('Tracked changes range cannot be empty')
|
||||
}
|
||||
if (last.canMerge(current)) {
|
||||
newTrackedChanges[newTrackedChanges.length - 1] = last.merge(current)
|
||||
} else {
|
||||
newTrackedChanges.push(current)
|
||||
}
|
||||
}
|
||||
this._trackedChanges = newTrackedChanges
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} cursor
|
||||
* @param {string} insertedText
|
||||
* @param {{tracking?: TrackingProps}} opts
|
||||
*/
|
||||
applyInsert(cursor, insertedText, opts = {}) {
|
||||
const newTrackedChanges = []
|
||||
for (const trackedChange of this._trackedChanges) {
|
||||
if (
|
||||
// If the cursor is before or at the insertion point, we need to move
|
||||
// the tracked change
|
||||
trackedChange.range.startIsAfter(cursor) ||
|
||||
cursor === trackedChange.range.start
|
||||
) {
|
||||
newTrackedChanges.push(
|
||||
new TrackedChange(
|
||||
trackedChange.range.moveBy(insertedText.length),
|
||||
trackedChange.tracking
|
||||
)
|
||||
)
|
||||
} else if (cursor === trackedChange.range.end) {
|
||||
// The insertion is at the end of the tracked change. So we don't need
|
||||
// to move it.
|
||||
newTrackedChanges.push(trackedChange)
|
||||
} else if (trackedChange.range.containsCursor(cursor)) {
|
||||
// If the tracked change is in the inserted text, we need to expand it
|
||||
// split in three chunks. The middle one is added if it is a tracked insertion
|
||||
const [firstRange, , thirdRange] = trackedChange.range.insertAt(
|
||||
cursor,
|
||||
insertedText.length
|
||||
)
|
||||
const firstPart = new TrackedChange(firstRange, trackedChange.tracking)
|
||||
if (!firstPart.range.isEmpty()) {
|
||||
newTrackedChanges.push(firstPart)
|
||||
}
|
||||
// second part will be added at the end if it is a tracked insertion
|
||||
const thirdPart = new TrackedChange(thirdRange, trackedChange.tracking)
|
||||
if (!thirdPart.range.isEmpty()) {
|
||||
newTrackedChanges.push(thirdPart)
|
||||
}
|
||||
} else {
|
||||
newTrackedChanges.push(trackedChange)
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.tracking) {
|
||||
// This is a new tracked change
|
||||
const newTrackedChange = new TrackedChange(
|
||||
new Range(cursor, insertedText.length),
|
||||
opts.tracking
|
||||
)
|
||||
newTrackedChanges.push(newTrackedChange)
|
||||
}
|
||||
this._trackedChanges = newTrackedChanges
|
||||
this._mergeRanges()
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
*/
|
||||
applyDelete(cursor, length) {
|
||||
const newTrackedChanges = []
|
||||
for (const trackedChange of this._trackedChanges) {
|
||||
const deletedRange = new Range(cursor, length)
|
||||
// If the tracked change is after the deletion, we need to move it
|
||||
if (deletedRange.contains(trackedChange.range)) {
|
||||
continue
|
||||
} else if (deletedRange.overlaps(trackedChange.range)) {
|
||||
const newRange = trackedChange.range.subtract(deletedRange)
|
||||
if (!newRange.isEmpty()) {
|
||||
newTrackedChanges.push(
|
||||
new TrackedChange(newRange, trackedChange.tracking)
|
||||
)
|
||||
}
|
||||
} else if (trackedChange.range.startIsAfter(cursor)) {
|
||||
newTrackedChanges.push(
|
||||
new TrackedChange(
|
||||
trackedChange.range.moveBy(-length),
|
||||
trackedChange.tracking
|
||||
)
|
||||
)
|
||||
} else {
|
||||
newTrackedChanges.push(trackedChange)
|
||||
}
|
||||
}
|
||||
this._trackedChanges = newTrackedChanges
|
||||
this._mergeRanges()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
* @param {{tracking?: TrackingDirective}} opts
|
||||
*/
|
||||
applyRetain(cursor, length, opts = {}) {
|
||||
// If there's no tracking info, leave everything as-is
|
||||
if (!opts.tracking) {
|
||||
return
|
||||
}
|
||||
const newTrackedChanges = []
|
||||
const retainedRange = new Range(cursor, length)
|
||||
for (const trackedChange of this._trackedChanges) {
|
||||
if (retainedRange.contains(trackedChange.range)) {
|
||||
// Remove the range
|
||||
} else if (retainedRange.overlaps(trackedChange.range)) {
|
||||
if (trackedChange.range.contains(retainedRange)) {
|
||||
const [leftRange, rightRange] = trackedChange.range.splitAt(cursor)
|
||||
if (!leftRange.isEmpty()) {
|
||||
newTrackedChanges.push(
|
||||
new TrackedChange(leftRange, trackedChange.tracking)
|
||||
)
|
||||
}
|
||||
if (!rightRange.isEmpty() && rightRange.length > length) {
|
||||
newTrackedChanges.push(
|
||||
new TrackedChange(
|
||||
rightRange.moveBy(length).shrinkBy(length),
|
||||
trackedChange.tracking
|
||||
)
|
||||
)
|
||||
}
|
||||
} else if (retainedRange.start <= trackedChange.range.start) {
|
||||
// overlaps to the left
|
||||
const [, reducedRange] = trackedChange.range.splitAt(
|
||||
retainedRange.end
|
||||
)
|
||||
if (!reducedRange.isEmpty()) {
|
||||
newTrackedChanges.push(
|
||||
new TrackedChange(reducedRange, trackedChange.tracking)
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// overlaps to the right
|
||||
const [reducedRange] = trackedChange.range.splitAt(cursor)
|
||||
if (!reducedRange.isEmpty()) {
|
||||
newTrackedChanges.push(
|
||||
new TrackedChange(reducedRange, trackedChange.tracking)
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// keep the range
|
||||
newTrackedChanges.push(trackedChange)
|
||||
}
|
||||
}
|
||||
if (opts.tracking instanceof TrackingProps) {
|
||||
// This is a new tracked change
|
||||
const newTrackedChange = new TrackedChange(retainedRange, opts.tracking)
|
||||
newTrackedChanges.push(newTrackedChange)
|
||||
}
|
||||
this._trackedChanges = newTrackedChanges
|
||||
this._mergeRanges()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TrackedChangeList
|
||||
@@ -0,0 +1,67 @@
|
||||
// @ts-check
|
||||
/**
|
||||
* @import { TrackingPropsRawData, TrackingDirective } from "../types"
|
||||
*/
|
||||
|
||||
class TrackingProps {
|
||||
/**
|
||||
*
|
||||
* @param {'insert' | 'delete'} type
|
||||
* @param {string} userId
|
||||
* @param {Date} ts
|
||||
*/
|
||||
constructor(type, userId, ts) {
|
||||
/**
|
||||
* @readonly
|
||||
* @type {'insert' | 'delete'}
|
||||
*/
|
||||
this.type = type
|
||||
/**
|
||||
* @readonly
|
||||
* @type {string}
|
||||
*/
|
||||
this.userId = userId
|
||||
/**
|
||||
* @readonly
|
||||
* @type {Date}
|
||||
*/
|
||||
this.ts = ts
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TrackingPropsRawData} raw
|
||||
* @returns {TrackingProps}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new TrackingProps(raw.type, raw.userId, new Date(raw.ts))
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {TrackingPropsRawData}
|
||||
*/
|
||||
toRaw() {
|
||||
return {
|
||||
type: this.type,
|
||||
userId: this.userId,
|
||||
ts: this.ts.toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {TrackingDirective} [other]
|
||||
* @returns {boolean}
|
||||
*/
|
||||
equals(other) {
|
||||
if (!(other instanceof TrackingProps)) {
|
||||
return false
|
||||
}
|
||||
return (
|
||||
this.type === other.type &&
|
||||
this.userId === other.userId &&
|
||||
this.ts.getTime() === other.ts.getTime()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TrackingProps
|
||||
390
libraries/overleaf-editor-core/lib/file_map.js
Normal file
390
libraries/overleaf-editor-core/lib/file_map.js
Normal file
@@ -0,0 +1,390 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const assert = require('check-types').assert
|
||||
const OError = require('@overleaf/o-error')
|
||||
const pMap = require('p-map')
|
||||
|
||||
const File = require('./file')
|
||||
const safePathname = require('./safe_pathname')
|
||||
|
||||
/**
|
||||
* @import { RawFile, RawFileMap } from './types'
|
||||
*
|
||||
* @typedef {Record<String, File | null>} FileMapData
|
||||
*/
|
||||
|
||||
class PathnameError extends OError {}
|
||||
|
||||
class NonUniquePathnameError extends PathnameError {
|
||||
/**
|
||||
* @param {string[]} pathnames
|
||||
*/
|
||||
constructor(pathnames) {
|
||||
super('pathnames are not unique', { pathnames })
|
||||
this.pathnames = pathnames
|
||||
}
|
||||
}
|
||||
|
||||
class BadPathnameError extends PathnameError {
|
||||
/**
|
||||
* @param {string} pathname
|
||||
* @param {string} reason
|
||||
*/
|
||||
constructor(pathname, reason) {
|
||||
if (pathname.length > 10) {
|
||||
pathname = pathname.slice(0, 5) + '...' + pathname.slice(-5)
|
||||
}
|
||||
super('invalid pathname', { reason, pathname })
|
||||
this.pathname = pathname
|
||||
}
|
||||
}
|
||||
|
||||
class PathnameConflictError extends PathnameError {
|
||||
/**
|
||||
* @param {string} pathname
|
||||
*/
|
||||
constructor(pathname) {
|
||||
super('pathname conflicts with another file', { pathname })
|
||||
this.pathname = pathname
|
||||
}
|
||||
}
|
||||
|
||||
class FileNotFoundError extends PathnameError {
|
||||
/**
|
||||
* @param {string} pathname
|
||||
*/
|
||||
constructor(pathname) {
|
||||
super('file does not exist', { pathname })
|
||||
this.pathname = pathname
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A set of {@link File}s. Several properties are enforced on the pathnames:
|
||||
*
|
||||
* 1. File names and paths are case sensitive and can differ by case alone. This
|
||||
* is consistent with most Linux file systems, but it is not consistent with
|
||||
* Windows or OS X. Ideally, we would be case-preserving and case insensitive,
|
||||
* like they are. And we used to be, but it caused too many incompatibilities
|
||||
* with the old system, which was case sensitive. See
|
||||
* https://github.com/overleaf/overleaf-ot-prototype/blob/
|
||||
* 19ed046c09f5a4d14fa12b3ea813ce0d977af88a/editor/core/lib/file_map.js
|
||||
* for an implementation of this map with those properties.
|
||||
*
|
||||
* 2. Uniqueness: No two pathnames are the same.
|
||||
*
|
||||
* 3. No type conflicts: A pathname cannot refer to both a file and a directory
|
||||
* within the same snapshot. That is, you can't have pathnames `a` and `a/b` in
|
||||
* the same file map; {@see FileMap#wouldConflict}.
|
||||
*/
|
||||
class FileMap {
|
||||
static PathnameError = PathnameError
|
||||
static NonUniquePathnameError = NonUniquePathnameError
|
||||
static BadPathnameError = BadPathnameError
|
||||
static PathnameConflictError = PathnameConflictError
|
||||
static FileNotFoundError = FileNotFoundError
|
||||
|
||||
/**
|
||||
* @param {Record<String, File | null>} files
|
||||
*/
|
||||
constructor(files) {
|
||||
// create bare object for use as Map
|
||||
// http://ryanmorr.com/true-hash-maps-in-javascript/
|
||||
/** @type FileMapData */
|
||||
this.files = Object.create(null)
|
||||
_.assign(this.files, files)
|
||||
checkPathnamesAreUnique(this.files)
|
||||
checkPathnamesDoNotConflict(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawFileMap} raw
|
||||
* @returns {FileMap}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
assert.object(raw, 'bad raw files')
|
||||
return new FileMap(_.mapValues(raw, File.fromRaw))
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert to raw object for serialization.
|
||||
*
|
||||
* @return {RawFileMap}
|
||||
*/
|
||||
toRaw() {
|
||||
/**
|
||||
* @param {File} file
|
||||
* @return {RawFile}
|
||||
*/
|
||||
function fileToRaw(file) {
|
||||
return file.toRaw()
|
||||
}
|
||||
// TODO(das7pad): refine types to enforce no nulls in FileMapData
|
||||
// @ts-ignore
|
||||
return _.mapValues(this.files, fileToRaw)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the given file.
|
||||
*
|
||||
* @param {string} pathname
|
||||
* @param {File} file
|
||||
*/
|
||||
addFile(pathname, file) {
|
||||
checkPathname(pathname)
|
||||
assert.object(file, 'bad file')
|
||||
// TODO(das7pad): make ignoredPathname argument fully optional
|
||||
// @ts-ignore
|
||||
checkNewPathnameDoesNotConflict(this, pathname)
|
||||
addFile(this.files, pathname, file)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the given file.
|
||||
*
|
||||
* @param {string} pathname
|
||||
*/
|
||||
removeFile(pathname) {
|
||||
checkPathname(pathname)
|
||||
|
||||
const key = findPathnameKey(this.files, pathname)
|
||||
if (!key) {
|
||||
throw new FileMap.FileNotFoundError(pathname)
|
||||
}
|
||||
delete this.files[key]
|
||||
}
|
||||
|
||||
/**
|
||||
* Move or remove a file. If the origin file does not exist, or if the old
|
||||
* and new paths are identical, this has no effect.
|
||||
*
|
||||
* @param {string} pathname
|
||||
* @param {string} newPathname if a blank string, {@link FileMap#removeFile}
|
||||
*/
|
||||
moveFile(pathname, newPathname) {
|
||||
if (pathname === newPathname) return
|
||||
if (newPathname === '') return this.removeFile(pathname)
|
||||
checkPathname(pathname)
|
||||
checkPathname(newPathname)
|
||||
checkNewPathnameDoesNotConflict(this, newPathname, pathname)
|
||||
|
||||
const key = findPathnameKey(this.files, pathname)
|
||||
if (!key) {
|
||||
throw new FileMap.FileNotFoundError(pathname)
|
||||
}
|
||||
const file = this.files[key]
|
||||
delete this.files[key]
|
||||
|
||||
addFile(this.files, newPathname, file)
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of files in the file map.
|
||||
*
|
||||
* @return {number}
|
||||
*/
|
||||
countFiles() {
|
||||
return _.size(this.files)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a file by its pathname.
|
||||
*
|
||||
* @param {string} pathname
|
||||
* @return {File | null | undefined}
|
||||
*/
|
||||
getFile(pathname) {
|
||||
const key = findPathnameKey(this.files, pathname)
|
||||
if (key) return this.files[key]
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether the given pathname conflicts with any file in the map.
|
||||
*
|
||||
* Paths conflict in type if one path is a strict prefix of the other path. For
|
||||
* example, 'a/b' conflicts with 'a', because in the former case 'a' is a
|
||||
* folder, but in the latter case it is a file. Similarly, the pathname 'a/b/c'
|
||||
* conflicts with 'a' and 'a/b', but it does not conflict with 'a/b/c', 'a/x',
|
||||
* or 'a/b/x'. (In our case, identical paths don't conflict, because AddFile
|
||||
* and MoveFile overwrite existing files.)
|
||||
*
|
||||
* @param {string} pathname
|
||||
* @param {string?} ignoredPathname pretend this pathname does not exist
|
||||
*/
|
||||
wouldConflict(pathname, ignoredPathname) {
|
||||
checkPathname(pathname)
|
||||
assert.maybe.string(ignoredPathname)
|
||||
const pathnames = this.getPathnames()
|
||||
const dirname = pathname + '/'
|
||||
// Check the filemap to see whether the supplied pathname is a
|
||||
// parent of any entry, or any entry is a parent of the pathname.
|
||||
for (let i = 0; i < pathnames.length; i++) {
|
||||
// First check if pathname is a strict prefix of pathnames[i] (and that
|
||||
// pathnames[i] is not ignored)
|
||||
if (
|
||||
pathnames[i].startsWith(dirname) &&
|
||||
!pathnamesEqual(pathnames[i], ignoredPathname)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
// Now make the reverse check, whether pathnames[i] is a strict prefix of
|
||||
// pathname. To avoid expensive string concatenation on each pathname we
|
||||
// first perform a partial check with a.startsWith(b), and then do the
|
||||
// full check for a subsequent '/' if this passes. This saves about 25%
|
||||
// of the runtime. Again only return a conflict if pathnames[i] is not
|
||||
// ignored.
|
||||
if (
|
||||
pathname.startsWith(pathnames[i]) &&
|
||||
pathname.length > pathnames[i].length &&
|
||||
pathname[pathnames[i].length] === '/' &&
|
||||
!pathnamesEqual(pathnames[i], ignoredPathname)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
// No conflicts - after excluding ignoredPathname, there were no entries
|
||||
// which were a strict prefix of pathname, and pathname was not a strict
|
||||
// prefix of any entry.
|
||||
return false
|
||||
}
|
||||
|
||||
/** @see Snapshot#getFilePathnames */
|
||||
getPathnames() {
|
||||
return _.keys(this.files)
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the files in this map to new values.
|
||||
* @template T
|
||||
* @param {(file: File | null, path: string) => T} iteratee
|
||||
* @return {Record<String, T>}
|
||||
*/
|
||||
map(iteratee) {
|
||||
return _.mapValues(this.files, iteratee)
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the files in this map to new values asynchronously, with an optional
|
||||
* limit on concurrency.
|
||||
* @template T
|
||||
* @param {(file: File | null | undefined, path: string, pathnames: string[]) => T} iteratee
|
||||
* @param {number} [concurrency]
|
||||
* @return {Promise<Record<String, T>>}
|
||||
*/
|
||||
async mapAsync(iteratee, concurrency) {
|
||||
assert.maybe.number(concurrency, 'bad concurrency')
|
||||
|
||||
const pathnames = this.getPathnames()
|
||||
const files = await pMap(
|
||||
pathnames,
|
||||
file => {
|
||||
return iteratee(this.getFile(file), file, pathnames)
|
||||
},
|
||||
{ concurrency: concurrency || 1 }
|
||||
)
|
||||
return _.zipObject(pathnames, files)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} pathname0
|
||||
* @param {string?} pathname1
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function pathnamesEqual(pathname0, pathname1) {
|
||||
return pathname0 === pathname1
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileMapData} files
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function pathnamesAreUnique(files) {
|
||||
const keys = _.keys(files)
|
||||
return _.uniqWith(keys, pathnamesEqual).length === keys.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileMapData} files
|
||||
*/
|
||||
function checkPathnamesAreUnique(files) {
|
||||
if (pathnamesAreUnique(files)) return
|
||||
throw new FileMap.NonUniquePathnameError(_.keys(files))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} pathname
|
||||
*/
|
||||
function checkPathname(pathname) {
|
||||
assert.nonEmptyString(pathname, 'bad pathname')
|
||||
const [isClean, reason] = safePathname.isCleanDebug(pathname)
|
||||
if (isClean) return
|
||||
throw new FileMap.BadPathnameError(pathname, reason)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileMap} fileMap
|
||||
* @param {string} pathname
|
||||
* @param {string?} ignoredPathname
|
||||
*/
|
||||
function checkNewPathnameDoesNotConflict(fileMap, pathname, ignoredPathname) {
|
||||
if (fileMap.wouldConflict(pathname, ignoredPathname)) {
|
||||
throw new FileMap.PathnameConflictError(pathname)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileMap} fileMap
|
||||
*/
|
||||
function checkPathnamesDoNotConflict(fileMap) {
|
||||
const pathnames = fileMap.getPathnames()
|
||||
// check pathnames for validity first
|
||||
pathnames.forEach(checkPathname)
|
||||
// convert pathnames to candidate directory names
|
||||
const dirnames = []
|
||||
for (let i = 0; i < pathnames.length; i++) {
|
||||
dirnames[i] = pathnames[i] + '/'
|
||||
}
|
||||
// sort in lexical order and check if one directory contains another
|
||||
dirnames.sort()
|
||||
for (let i = 0; i < dirnames.length - 1; i++) {
|
||||
if (dirnames[i + 1].startsWith(dirnames[i])) {
|
||||
// strip trailing slash to get original pathname
|
||||
const conflictPathname = dirnames[i + 1].substr(0, -1)
|
||||
throw new FileMap.PathnameConflictError(conflictPathname)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is somewhat vestigial: it was used when this map used
|
||||
* case-insensitive pathname comparison. We could probably simplify some of the
|
||||
* logic in the callers, but in the hope that we will one day return to
|
||||
* case-insensitive semantics, we've just left things as-is for now.
|
||||
*
|
||||
* TODO(das7pad): In a followup, inline this function and make types stricter.
|
||||
*
|
||||
* @param {FileMapData} files
|
||||
* @param {string} pathname
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function findPathnameKey(files, pathname) {
|
||||
// we can check for the key without worrying about properties
|
||||
// in the prototype because we are now using a bare object/
|
||||
if (pathname in files) return pathname
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileMapData} files
|
||||
* @param {string} pathname
|
||||
* @param {File?} file
|
||||
*/
|
||||
function addFile(files, pathname, file) {
|
||||
const key = findPathnameKey(files, pathname)
|
||||
if (key) delete files[key]
|
||||
files[pathname] = file
|
||||
}
|
||||
|
||||
module.exports = FileMap
|
||||
135
libraries/overleaf-editor-core/lib/history.js
Normal file
135
libraries/overleaf-editor-core/lib/history.js
Normal file
@@ -0,0 +1,135 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
const pMap = require('p-map')
|
||||
|
||||
const Change = require('./change')
|
||||
const Snapshot = require('./snapshot')
|
||||
|
||||
/**
|
||||
* @import { BlobStore } from "./types"
|
||||
*/
|
||||
|
||||
class History {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {Snapshot} snapshot
|
||||
* @param {Array.<Change>} changes
|
||||
*
|
||||
* @classdesc
|
||||
* A History is a {@link Snapshot} and a sequence of {@link Change}s that can
|
||||
* be applied to produce a new snapshot.
|
||||
*/
|
||||
constructor(snapshot, changes) {
|
||||
assert.instance(snapshot, Snapshot, 'bad snapshot')
|
||||
assert.maybe.array.of.instance(changes, Change, 'bad changes')
|
||||
|
||||
this.snapshot = snapshot
|
||||
/** @type {Array<Change>} */
|
||||
this.changes = changes || []
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
return new History(
|
||||
Snapshot.fromRaw(raw.snapshot),
|
||||
raw.changes.map(Change.fromRaw)
|
||||
)
|
||||
}
|
||||
|
||||
toRaw() {
|
||||
function changeToRaw(change) {
|
||||
return change.toRaw()
|
||||
}
|
||||
return {
|
||||
snapshot: this.snapshot.toRaw(),
|
||||
changes: this.changes.map(changeToRaw),
|
||||
}
|
||||
}
|
||||
|
||||
getSnapshot() {
|
||||
return this.snapshot
|
||||
}
|
||||
|
||||
getChanges() {
|
||||
return this.changes
|
||||
}
|
||||
|
||||
countChanges() {
|
||||
return this.changes.length
|
||||
}
|
||||
|
||||
/**
|
||||
* Add changes to this history.
|
||||
*
|
||||
* @param {Array.<Change>} changes
|
||||
*/
|
||||
pushChanges(changes) {
|
||||
this.changes.push.apply(this.changes, changes)
|
||||
}
|
||||
|
||||
/**
|
||||
* If this History references blob hashes, either in the Snapshot or the
|
||||
* Changes, add them to the given set.
|
||||
*
|
||||
* @param {Set.<String>} blobHashes
|
||||
*/
|
||||
findBlobHashes(blobHashes) {
|
||||
function findChangeBlobHashes(change) {
|
||||
change.findBlobHashes(blobHashes)
|
||||
}
|
||||
this.snapshot.findBlobHashes(blobHashes)
|
||||
this.changes.forEach(findChangeBlobHashes)
|
||||
}
|
||||
|
||||
/**
|
||||
* If this History contains any File objects, load them.
|
||||
*
|
||||
* @param {string} kind see {File#load}
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async loadFiles(kind, blobStore) {
|
||||
async function loadChangeFiles(changes) {
|
||||
for (const change of changes) {
|
||||
await change.loadFiles(kind, blobStore)
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
this.snapshot.loadFiles(kind, blobStore),
|
||||
loadChangeFiles(this.changes),
|
||||
])
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a version of this history that is suitable for long term storage.
|
||||
* This requires that we store the content of file objects in the provided
|
||||
* blobStore.
|
||||
*
|
||||
* @param {BlobStore} blobStore
|
||||
* @param {number} [concurrency] applies separately to files, changes and
|
||||
* operations
|
||||
* @return {Promise<import('overleaf-editor-core/lib/types').RawHistory>}
|
||||
*/
|
||||
async store(blobStore, concurrency) {
|
||||
assert.maybe.number(concurrency, 'bad concurrency')
|
||||
|
||||
/**
|
||||
* @param {Change} change
|
||||
*/
|
||||
async function storeChange(change) {
|
||||
return await change.store(blobStore, concurrency)
|
||||
}
|
||||
|
||||
const [rawSnapshot, rawChanges] = await Promise.all([
|
||||
this.snapshot.store(blobStore, concurrency),
|
||||
pMap(this.changes, storeChange, { concurrency: concurrency || 1 }),
|
||||
])
|
||||
return {
|
||||
snapshot: rawSnapshot,
|
||||
changes: rawChanges,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = History
|
||||
99
libraries/overleaf-editor-core/lib/label.js
Normal file
99
libraries/overleaf-editor-core/lib/label.js
Normal file
@@ -0,0 +1,99 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
/**
|
||||
* @import { RawLabel } from './types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @classdesc
|
||||
* A user-configurable label that can be attached to a specific change. Labels
|
||||
* are not versioned, and they are not stored alongside the Changes in Chunks.
|
||||
* They are instead intended to provide external markers into the history of the
|
||||
* project.
|
||||
*/
|
||||
class Label {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {string} text
|
||||
* @param {number?} authorId
|
||||
* @param {Date} timestamp
|
||||
* @param {number} version
|
||||
*/
|
||||
constructor(text, authorId, timestamp, version) {
|
||||
assert.string(text, 'bad text')
|
||||
assert.maybe.integer(authorId, 'bad author id')
|
||||
assert.date(timestamp, 'bad timestamp')
|
||||
assert.integer(version, 'bad version')
|
||||
|
||||
this.text = text
|
||||
this.authorId = authorId
|
||||
this.timestamp = timestamp
|
||||
this.version = version
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Label from its raw form.
|
||||
*
|
||||
* @param {RawLabel} raw
|
||||
* @return {Label}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new Label(
|
||||
raw.text,
|
||||
raw.authorId,
|
||||
new Date(raw.timestamp),
|
||||
raw.version
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the Label to raw form for transmission.
|
||||
*
|
||||
* @return {RawLabel}
|
||||
*/
|
||||
toRaw() {
|
||||
return {
|
||||
text: this.text,
|
||||
authorId: this.authorId,
|
||||
timestamp: this.timestamp.toISOString(),
|
||||
version: this.version,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
getText() {
|
||||
return this.text
|
||||
}
|
||||
|
||||
/**
|
||||
* The ID of the author, if any. Note that we now require all saved versions to
|
||||
* have an author, but this was not always the case, so we have to allow nulls
|
||||
* here for historical reasons.
|
||||
*
|
||||
* @return {number | null | undefined}
|
||||
*/
|
||||
getAuthorId() {
|
||||
return this.authorId
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Date}
|
||||
*/
|
||||
getTimestamp() {
|
||||
return this.timestamp
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getVersion() {
|
||||
return this.version
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Label
|
||||
@@ -0,0 +1,150 @@
|
||||
// @ts-check
|
||||
const core = require('../../index')
|
||||
const Comment = require('../comment')
|
||||
const Range = require('../range')
|
||||
const EditOperation = require('./edit_operation')
|
||||
|
||||
/**
|
||||
* @import DeleteCommentOperation from './delete_comment_operation'
|
||||
* @import { CommentRawData, RawAddCommentOperation } from '../types'
|
||||
* @import StringFileData from '../file_data/string_file_data'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @extends EditOperation
|
||||
*/
|
||||
class AddCommentOperation extends EditOperation {
|
||||
/**
|
||||
* @param {string} commentId
|
||||
* @param {ReadonlyArray<Range>} ranges
|
||||
* @param {boolean} resolved
|
||||
*/
|
||||
constructor(commentId, ranges, resolved = false) {
|
||||
super()
|
||||
|
||||
for (const range of ranges) {
|
||||
if (range.isEmpty()) {
|
||||
throw new Error("AddCommentOperation can't be built with empty ranges")
|
||||
}
|
||||
}
|
||||
|
||||
/** @readonly */
|
||||
this.commentId = commentId
|
||||
|
||||
/** @readonly */
|
||||
this.ranges = ranges
|
||||
|
||||
/** @readonly */
|
||||
this.resolved = resolved
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {RawAddCommentOperation}
|
||||
*/
|
||||
toJSON() {
|
||||
/** @type RawAddCommentOperation */
|
||||
const raw = {
|
||||
commentId: this.commentId,
|
||||
ranges: this.ranges.map(range => range.toRaw()),
|
||||
}
|
||||
if (this.resolved) {
|
||||
raw.resolved = true
|
||||
}
|
||||
return raw
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StringFileData} fileData
|
||||
*/
|
||||
apply(fileData) {
|
||||
fileData.comments.add(
|
||||
new Comment(this.commentId, this.ranges, this.resolved)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {StringFileData} previousState
|
||||
* @returns {EditOperation}
|
||||
*/
|
||||
invert(previousState) {
|
||||
const comment = previousState.comments.getComment(this.commentId)
|
||||
if (!comment) {
|
||||
return new core.DeleteCommentOperation(this.commentId)
|
||||
}
|
||||
|
||||
return new core.AddCommentOperation(
|
||||
comment.id,
|
||||
comment.ranges.slice(),
|
||||
comment.resolved
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} other
|
||||
* @returns {boolean}
|
||||
*/
|
||||
canBeComposedWith(other) {
|
||||
return (
|
||||
(other instanceof AddCommentOperation &&
|
||||
this.commentId === other.commentId) ||
|
||||
(other instanceof core.DeleteCommentOperation &&
|
||||
this.commentId === other.commentId) ||
|
||||
(other instanceof core.SetCommentStateOperation &&
|
||||
this.commentId === other.commentId)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} other
|
||||
* @returns {EditOperation}
|
||||
*/
|
||||
compose(other) {
|
||||
if (
|
||||
other instanceof core.DeleteCommentOperation &&
|
||||
other.commentId === this.commentId
|
||||
) {
|
||||
return other
|
||||
}
|
||||
|
||||
if (
|
||||
other instanceof AddCommentOperation &&
|
||||
other.commentId === this.commentId
|
||||
) {
|
||||
return other
|
||||
}
|
||||
|
||||
if (
|
||||
other instanceof core.SetCommentStateOperation &&
|
||||
other.commentId === this.commentId
|
||||
) {
|
||||
return new AddCommentOperation(
|
||||
this.commentId,
|
||||
this.ranges,
|
||||
other.resolved
|
||||
)
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Trying to compose AddCommentOperation with ${other?.constructor?.name}.`
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {RawAddCommentOperation} raw
|
||||
* @returns {AddCommentOperation}
|
||||
*/
|
||||
static fromJSON(raw) {
|
||||
return new AddCommentOperation(
|
||||
raw.commentId,
|
||||
raw.ranges.map(Range.fromRaw),
|
||||
raw.resolved ?? false
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AddCommentOperation
|
||||
@@ -0,0 +1,78 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const File = require('../file')
|
||||
const Operation = require('./')
|
||||
|
||||
/**
|
||||
* Adds a new file to a project.
|
||||
*/
|
||||
class AddFileOperation extends Operation {
|
||||
/**
|
||||
* @param {string} pathname
|
||||
* @param {File} file
|
||||
*/
|
||||
constructor(pathname, file) {
|
||||
super()
|
||||
assert.string(pathname, 'bad pathname')
|
||||
assert.object(file, 'bad file')
|
||||
|
||||
this.pathname = pathname
|
||||
this.file = file
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {String}
|
||||
*/
|
||||
getPathname() {
|
||||
return this.pathname
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO
|
||||
* @param {Object} raw
|
||||
* @return {AddFileOperation}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new AddFileOperation(raw.pathname, File.fromRaw(raw.file))
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
toRaw() {
|
||||
return { pathname: this.pathname, file: this.file.toRaw() }
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
getFile() {
|
||||
return this.file
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
findBlobHashes(blobHashes) {
|
||||
const hash = this.file.getHash()
|
||||
if (hash) blobHashes.add(hash)
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
async loadFiles(kind, blobStore) {
|
||||
return await this.file.load(kind, blobStore)
|
||||
}
|
||||
|
||||
async store(blobStore) {
|
||||
const rawFile = await this.file.store(blobStore)
|
||||
return { pathname: this.pathname, file: rawFile }
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
applyTo(snapshot) {
|
||||
snapshot.addFile(this.pathname, this.file.clone())
|
||||
}
|
||||
}
|
||||
module.exports = AddFileOperation
|
||||
@@ -0,0 +1,70 @@
|
||||
// @ts-check
|
||||
const core = require('../../index')
|
||||
const EditNoOperation = require('./edit_no_operation')
|
||||
const EditOperation = require('./edit_operation')
|
||||
|
||||
/**
|
||||
* @import AddCommentOperation from './add_comment_operation'
|
||||
* @import StringFileData from '../file_data/string_file_data'
|
||||
* @import { RawDeleteCommentOperation } from '../types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @extends EditOperation
|
||||
*/
|
||||
class DeleteCommentOperation extends EditOperation {
|
||||
/**
|
||||
* @param {string} commentId
|
||||
*/
|
||||
constructor(commentId) {
|
||||
super()
|
||||
this.commentId = commentId
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {RawDeleteCommentOperation}
|
||||
*/
|
||||
toJSON() {
|
||||
return {
|
||||
deleteComment: this.commentId,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {StringFileData} fileData
|
||||
*/
|
||||
apply(fileData) {
|
||||
fileData.comments.delete(this.commentId)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {StringFileData} previousState
|
||||
* @returns {AddCommentOperation | EditNoOperation}
|
||||
*/
|
||||
invert(previousState) {
|
||||
const comment = previousState.comments.getComment(this.commentId)
|
||||
if (!comment) {
|
||||
return new EditNoOperation()
|
||||
}
|
||||
|
||||
return new core.AddCommentOperation(
|
||||
comment.id,
|
||||
comment.ranges.slice(),
|
||||
comment.resolved
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {RawDeleteCommentOperation} raw
|
||||
* @returns {DeleteCommentOperation}
|
||||
*/
|
||||
static fromJSON(raw) {
|
||||
return new DeleteCommentOperation(raw.deleteComment)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DeleteCommentOperation
|
||||
@@ -0,0 +1,105 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
/**
|
||||
* @import EditOperation from './edit_operation'
|
||||
* @import { RawEditFileOperation } from '../types'
|
||||
* @import Snapshot from "../snapshot"
|
||||
*/
|
||||
|
||||
const Operation = require('./')
|
||||
const EditOperationBuilder = require('./edit_operation_builder')
|
||||
|
||||
/**
|
||||
* Edit a file in place. It is a wrapper around a single EditOperation.
|
||||
*/
|
||||
class EditFileOperation extends Operation {
|
||||
/**
|
||||
* @param {string} pathname
|
||||
* @param {EditOperation} operation
|
||||
*/
|
||||
constructor(pathname, operation) {
|
||||
super()
|
||||
this.pathname = pathname
|
||||
this.operation = operation
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
toRaw() {
|
||||
return {
|
||||
pathname: this.pathname,
|
||||
...this.operation.toJSON(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deserialize an EditFileOperation.
|
||||
*
|
||||
* @param {RawEditFileOperation} raw
|
||||
* @return {EditFileOperation}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new EditFileOperation(
|
||||
raw.pathname,
|
||||
EditOperationBuilder.fromJSON(raw)
|
||||
)
|
||||
}
|
||||
|
||||
getPathname() {
|
||||
return this.pathname
|
||||
}
|
||||
|
||||
getOperation() {
|
||||
return this.operation
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {Snapshot} snapshot
|
||||
*/
|
||||
applyTo(snapshot) {
|
||||
// TODO(das7pad): can we teach typescript our polymorphism?
|
||||
// @ts-ignore
|
||||
snapshot.editFile(this.pathname, this.operation)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {Operation} other
|
||||
* @return {boolean}
|
||||
*/
|
||||
canBeComposedWithForUndo(other) {
|
||||
return (
|
||||
this.canBeComposedWith(other) &&
|
||||
this.operation.canBeComposedWithForUndo(other.operation)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {Operation} other
|
||||
* @return {other is EditFileOperation}
|
||||
*/
|
||||
canBeComposedWith(other) {
|
||||
// Ensure that other operation is an edit file operation
|
||||
if (!(other instanceof EditFileOperation)) return false
|
||||
// Ensure that both operations are editing the same file
|
||||
if (this.getPathname() !== other.getPathname()) return false
|
||||
|
||||
return this.operation.canBeComposedWith(other.operation)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditFileOperation} other
|
||||
*/
|
||||
compose(other) {
|
||||
return new EditFileOperation(
|
||||
this.pathname,
|
||||
this.operation.compose(other.operation)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = EditFileOperation
|
||||
@@ -0,0 +1,29 @@
|
||||
const EditOperation = require('./edit_operation')
|
||||
|
||||
/**
|
||||
* @import { RawEditNoOperation } from '../types'
|
||||
*/
|
||||
|
||||
class EditNoOperation extends EditOperation {
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {StringFileData} fileData
|
||||
*/
|
||||
apply(fileData) {}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {RawEditNoOperation}
|
||||
*/
|
||||
toJSON() {
|
||||
return {
|
||||
noOp: true,
|
||||
}
|
||||
}
|
||||
|
||||
static fromJSON() {
|
||||
return new EditNoOperation()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = EditNoOperation
|
||||
@@ -0,0 +1,91 @@
|
||||
// @ts-check
|
||||
/**
|
||||
* @import FileData from '../file_data'
|
||||
* @import { RawEditOperation } from '../types'
|
||||
*/
|
||||
|
||||
class EditOperation {
|
||||
constructor() {
|
||||
if (this.constructor === EditOperation) {
|
||||
throw new Error('Cannot instantiate abstract class')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts operation into a JSON value.
|
||||
* @returns {RawEditOperation}
|
||||
*/
|
||||
toJSON() {
|
||||
throw new Error('Abstract method not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* @abstract
|
||||
* @param {FileData} fileData
|
||||
*/
|
||||
apply(fileData) {
|
||||
throw new Error('Abstract method not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the effect of this operation on the length of the text.
|
||||
*
|
||||
* NB: This is an Overleaf addition to the original OT system.
|
||||
*
|
||||
* @param {number} length of the original string; non-negative
|
||||
* @return {number} length of the new string; non-negative
|
||||
*/
|
||||
applyToLength(length) {
|
||||
return length
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the inverse of an operation. The inverse of an operation is the
|
||||
* operation that reverts the effects of the operation, e.g. when you have an
|
||||
* operation 'insert("hello "); skip(6);' then the inverse is 'remove("hello ");
|
||||
* skip(6);'. The inverse should be used for implementing undo.
|
||||
* @param {FileData} previousState
|
||||
* @returns {EditOperation}
|
||||
*/
|
||||
invert(previousState) {
|
||||
throw new Error('Abstract method not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {EditOperation} other
|
||||
* @returns {boolean}
|
||||
*/
|
||||
canBeComposedWith(other) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* When you use ctrl-z to undo your latest changes, you expect the program not
|
||||
* to undo every single keystroke but to undo your last sentence you wrote at
|
||||
* a stretch or the deletion you did by holding the backspace key down. This
|
||||
* This can be implemented by composing operations on the undo stack. This
|
||||
* method can help decide whether two operations should be composed. It
|
||||
* returns true if the operations are consecutive insert operations or both
|
||||
* operations delete text at the same position. You may want to include other
|
||||
* factors like the time since the last change in your decision.
|
||||
* @param {EditOperation} other
|
||||
*/
|
||||
canBeComposedWithForUndo(other) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose merges two consecutive operations into one operation, that
|
||||
* preserves the changes of both. Or, in other words, for each input string S
|
||||
* and a pair of consecutive operations A and B,
|
||||
* apply(apply(S, A), B) = apply(S, compose(A, B)) must hold.
|
||||
* @param {EditOperation} other
|
||||
* @returns {EditOperation}
|
||||
*/
|
||||
compose(other) {
|
||||
throw new Error('Abstract method not implemented')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = EditOperation
|
||||
@@ -0,0 +1,93 @@
|
||||
// @ts-check
|
||||
|
||||
/**
|
||||
* @import EditOperation from './edit_operation'
|
||||
* @import { RawTextOperation, RawAddCommentOperation, RawEditOperation } from '../types'
|
||||
* @import { RawDeleteCommentOperation, RawSetCommentStateOperation } from '../types'
|
||||
*/
|
||||
|
||||
const DeleteCommentOperation = require('./delete_comment_operation')
|
||||
const AddCommentOperation = require('./add_comment_operation')
|
||||
const TextOperation = require('./text_operation')
|
||||
const SetCommentStateOperation = require('./set_comment_state_operation')
|
||||
const EditNoOperation = require('./edit_no_operation')
|
||||
|
||||
class EditOperationBuilder {
|
||||
/**
|
||||
*
|
||||
* @param {RawEditOperation} raw
|
||||
* @returns {EditOperation}
|
||||
*/
|
||||
static fromJSON(raw) {
|
||||
if (isTextOperation(raw)) {
|
||||
return TextOperation.fromJSON(raw)
|
||||
}
|
||||
if (isRawAddCommentOperation(raw)) {
|
||||
return AddCommentOperation.fromJSON(raw)
|
||||
}
|
||||
if (isRawDeleteCommentOperation(raw)) {
|
||||
return DeleteCommentOperation.fromJSON(raw)
|
||||
}
|
||||
if (isRawSetCommentStateOperation(raw)) {
|
||||
return SetCommentStateOperation.fromJSON(raw)
|
||||
}
|
||||
if (isRawEditNoOperation(raw)) {
|
||||
return EditNoOperation.fromJSON()
|
||||
}
|
||||
throw new Error('Unsupported operation in EditOperationBuilder.fromJSON')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {unknown} raw
|
||||
* @returns {raw is RawTextOperation}
|
||||
*/
|
||||
function isTextOperation(raw) {
|
||||
return raw !== null && typeof raw === 'object' && 'textOperation' in raw
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {unknown} raw
|
||||
* @returns {raw is RawAddCommentOperation}
|
||||
*/
|
||||
function isRawAddCommentOperation(raw) {
|
||||
return (
|
||||
raw !== null &&
|
||||
typeof raw === 'object' &&
|
||||
'commentId' in raw &&
|
||||
'ranges' in raw &&
|
||||
Array.isArray(raw.ranges)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {unknown} raw
|
||||
* @returns {raw is RawDeleteCommentOperation}
|
||||
*/
|
||||
function isRawDeleteCommentOperation(raw) {
|
||||
return raw !== null && typeof raw === 'object' && 'deleteComment' in raw
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {unknown} raw
|
||||
* @returns {raw is RawSetCommentStateOperation}
|
||||
*/
|
||||
function isRawSetCommentStateOperation(raw) {
|
||||
return (
|
||||
raw !== null &&
|
||||
typeof raw === 'object' &&
|
||||
'commentId' in raw &&
|
||||
'resolved' in raw &&
|
||||
typeof raw.resolved === 'boolean'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {unknown} raw
|
||||
* @returns {raw is RawEditNoOperation}
|
||||
*/
|
||||
function isRawEditNoOperation(raw) {
|
||||
return raw !== null && typeof raw === 'object' && 'noOp' in raw
|
||||
}
|
||||
|
||||
module.exports = EditOperationBuilder
|
||||
@@ -0,0 +1,162 @@
|
||||
// @ts-check
|
||||
const core = require('../..')
|
||||
const Comment = require('../comment')
|
||||
const EditNoOperation = require('./edit_no_operation')
|
||||
const TextOperation = require('./text_operation')
|
||||
|
||||
/**
|
||||
* @import EditOperation from './edit_operation'
|
||||
*/
|
||||
|
||||
class EditOperationTransformer {
|
||||
/**
|
||||
* Transform two edit operations against each other.
|
||||
* @param {EditOperation} a
|
||||
* @param {EditOperation} b
|
||||
* @returns {[EditOperation, EditOperation]}
|
||||
*/
|
||||
static transform(a, b) {
|
||||
const {
|
||||
AddCommentOperation,
|
||||
DeleteCommentOperation,
|
||||
SetCommentStateOperation,
|
||||
} = core
|
||||
|
||||
if (a instanceof EditNoOperation || b instanceof EditNoOperation) {
|
||||
return [a, b]
|
||||
}
|
||||
|
||||
const transformers = [
|
||||
createTransformer(TextOperation, TextOperation, TextOperation.transform),
|
||||
createTransformer(TextOperation, DeleteCommentOperation, noConflict),
|
||||
createTransformer(TextOperation, SetCommentStateOperation, noConflict),
|
||||
createTransformer(TextOperation, AddCommentOperation, (a, b) => {
|
||||
// apply the text operation to the comment
|
||||
const originalComment = new Comment(b.commentId, b.ranges, b.resolved)
|
||||
const movedComment = originalComment.applyTextOperation(a, b.commentId)
|
||||
return [
|
||||
a,
|
||||
new AddCommentOperation(
|
||||
movedComment.id,
|
||||
movedComment.ranges,
|
||||
movedComment.resolved
|
||||
),
|
||||
]
|
||||
}),
|
||||
createTransformer(AddCommentOperation, AddCommentOperation, (a, b) => {
|
||||
if (a.commentId === b.commentId) {
|
||||
return [new EditNoOperation(), b]
|
||||
}
|
||||
return [a, b]
|
||||
}),
|
||||
createTransformer(AddCommentOperation, DeleteCommentOperation, (a, b) => {
|
||||
if (a.commentId === b.commentId) {
|
||||
// delete wins
|
||||
return [new EditNoOperation(), b]
|
||||
}
|
||||
return [a, b]
|
||||
}),
|
||||
createTransformer(
|
||||
AddCommentOperation,
|
||||
SetCommentStateOperation,
|
||||
(a, b) => {
|
||||
if (a.commentId === b.commentId) {
|
||||
const newA = new AddCommentOperation(
|
||||
a.commentId,
|
||||
a.ranges,
|
||||
b.resolved
|
||||
)
|
||||
return [newA, b]
|
||||
}
|
||||
return [a, b]
|
||||
}
|
||||
),
|
||||
createTransformer(
|
||||
DeleteCommentOperation,
|
||||
DeleteCommentOperation,
|
||||
(a, b) => {
|
||||
if (a.commentId === b.commentId) {
|
||||
// if both operations delete the same comment, we can ignore both
|
||||
return [new EditNoOperation(), new EditNoOperation()]
|
||||
}
|
||||
return [a, b]
|
||||
}
|
||||
),
|
||||
createTransformer(
|
||||
DeleteCommentOperation,
|
||||
SetCommentStateOperation,
|
||||
(a, b) => {
|
||||
if (a.commentId === b.commentId) {
|
||||
// delete wins
|
||||
return [a, new EditNoOperation()]
|
||||
}
|
||||
return [a, b]
|
||||
}
|
||||
),
|
||||
createTransformer(
|
||||
SetCommentStateOperation,
|
||||
SetCommentStateOperation,
|
||||
(a, b) => {
|
||||
if (a.commentId !== b.commentId) {
|
||||
return [a, b]
|
||||
}
|
||||
|
||||
if (a.resolved === b.resolved) {
|
||||
return [new EditNoOperation(), new EditNoOperation()]
|
||||
}
|
||||
|
||||
const shouldResolve = a.resolved && b.resolved
|
||||
if (a.resolved === shouldResolve) {
|
||||
return [a, new EditNoOperation()]
|
||||
} else {
|
||||
return [new EditNoOperation(), b]
|
||||
}
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
for (const transformer of transformers) {
|
||||
const result = transformer(a, b)
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Transform not implemented for ${a.constructor.name}○${b.constructor.name}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {EditOperation} X
|
||||
* @template {EditOperation} Y
|
||||
* @param {new(...args: any[]) => X} ClassA
|
||||
* @param {new(...args: any[]) => Y} ClassB
|
||||
* @param {(a: X, b: Y) => [EditOperation, EditOperation]} transformer
|
||||
* @returns {(a: EditOperation, b: EditOperation) => [EditOperation, EditOperation] | false}
|
||||
*/
|
||||
function createTransformer(ClassA, ClassB, transformer) {
|
||||
return (a, b) => {
|
||||
if (a instanceof ClassA && b instanceof ClassB) {
|
||||
return transformer(a, b)
|
||||
}
|
||||
if (b instanceof ClassA && a instanceof ClassB) {
|
||||
const [bPrime, aPrime] = transformer(b, a)
|
||||
return [aPrime, bPrime]
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {EditOperation} a
|
||||
* @param {EditOperation} b
|
||||
* @returns {[EditOperation, EditOperation]}
|
||||
*/
|
||||
function noConflict(a, b) {
|
||||
return [a, b]
|
||||
}
|
||||
|
||||
module.exports = EditOperationTransformer
|
||||
462
libraries/overleaf-editor-core/lib/operation/index.js
Normal file
462
libraries/overleaf-editor-core/lib/operation/index.js
Normal file
@@ -0,0 +1,462 @@
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const assert = require('check-types').assert
|
||||
const EditOperationTransformer = require('./edit_operation_transformer')
|
||||
|
||||
// Dependencies are loaded at the bottom of the file to mitigate circular
|
||||
// dependency
|
||||
let NoOperation = null
|
||||
let AddFileOperation = null
|
||||
let MoveFileOperation = null
|
||||
let EditFileOperation = null
|
||||
let SetFileMetadataOperation = null
|
||||
|
||||
/**
|
||||
* @import { BlobStore } from "../types"
|
||||
* @import Snapshot from "../snapshot"
|
||||
*/
|
||||
|
||||
/**
|
||||
* An `Operation` changes a `Snapshot` when it is applied. See the
|
||||
* {@tutorial OT} tutorial for background.
|
||||
*/
|
||||
class Operation {
|
||||
/**
|
||||
* Deserialize an Operation.
|
||||
*
|
||||
* @param {Object} raw
|
||||
* @return {Operation} one of the subclasses
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
if ('file' in raw) {
|
||||
return AddFileOperation.fromRaw(raw)
|
||||
}
|
||||
if (
|
||||
'textOperation' in raw ||
|
||||
'commentId' in raw ||
|
||||
'deleteComment' in raw
|
||||
) {
|
||||
return EditFileOperation.fromRaw(raw)
|
||||
}
|
||||
if ('newPathname' in raw) {
|
||||
return new MoveFileOperation(raw.pathname, raw.newPathname)
|
||||
}
|
||||
if ('metadata' in raw) {
|
||||
return new SetFileMetadataOperation(raw.pathname, raw.metadata)
|
||||
}
|
||||
if (_.isEmpty(raw)) {
|
||||
return new NoOperation()
|
||||
}
|
||||
throw new Error('invalid raw operation ' + JSON.stringify(raw))
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize an Operation.
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
toRaw() {
|
||||
return {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this operation does nothing when applied.
|
||||
*
|
||||
* @return {Boolean}
|
||||
*/
|
||||
isNoOp() {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* If this Operation references blob hashes, add them to the given Set.
|
||||
*
|
||||
* @param {Set.<String>} blobHashes
|
||||
*/
|
||||
findBlobHashes(blobHashes) {}
|
||||
|
||||
/**
|
||||
* If this operation references any files, load the files.
|
||||
*
|
||||
* @param {string} kind see {File#load}
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async loadFiles(kind, blobStore) {}
|
||||
|
||||
/**
|
||||
* Return a version of this operation that is suitable for long term storage.
|
||||
* In most cases, we just need to convert the operation to raw form, but if
|
||||
* the operation involves File objects, we may need to store their content.
|
||||
*
|
||||
* @param {BlobStore} blobStore
|
||||
* @return {Promise.<Object>}
|
||||
*/
|
||||
async store(blobStore) {
|
||||
return this.toRaw()
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply this Operation to a snapshot.
|
||||
*
|
||||
* The snapshot is modified in place.
|
||||
*
|
||||
* @param {Snapshot} snapshot
|
||||
*/
|
||||
applyTo(snapshot) {
|
||||
assert.object(snapshot, 'bad snapshot')
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this operation can be composed with another operation to produce a
|
||||
* single operation of the same type as this one, while keeping the composed
|
||||
* operation small and logical enough to be used in the undo stack.
|
||||
*
|
||||
* @param {Operation} other
|
||||
* @return {Boolean}
|
||||
*/
|
||||
canBeComposedWithForUndo(other) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this operation can be composed with another operation to produce a
|
||||
* single operation of the same type as this one.
|
||||
*
|
||||
* TODO Moves can be composed. For example, if you rename a to b and then decide
|
||||
* shortly after that actually you want to call it c, we could compose the two
|
||||
* to get a -> c). Edits can also be composed --- see rules in TextOperation.
|
||||
* We also need to consider the Change --- we will need to consider both time
|
||||
* and author(s) when composing changes. I guess that AddFile can also be
|
||||
* composed in some cases --- if you upload a file and then decide it was the
|
||||
* wrong one and upload a new one, we could drop the one in the middle, but
|
||||
* that seems like a pretty rare case.
|
||||
*
|
||||
* @param {Operation} other
|
||||
* @return {Boolean}
|
||||
*/
|
||||
canBeComposedWith(other) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose this operation with another operation to produce a single operation
|
||||
* of the same type as this one.
|
||||
*
|
||||
* @param {Operation} other
|
||||
* @return {Operation}
|
||||
*/
|
||||
compose(other) {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform takes two operations A and B that happened concurrently and
|
||||
* produces two operations A' and B' (in an array) such that
|
||||
* `apply(apply(S, A), B') = apply(apply(S, B), A')`.
|
||||
*
|
||||
* That is, if one client applies A and then B', they get the same result as
|
||||
* another client who applies B and then A'.
|
||||
*
|
||||
* @param {Operation} a
|
||||
* @param {Operation} b
|
||||
* @return {Operation[]} operations `[a', b']`
|
||||
*/
|
||||
static transform(a, b) {
|
||||
if (a.isNoOp() || b.isNoOp()) return [a, b]
|
||||
|
||||
function transpose(transformer) {
|
||||
return transformer(b, a).reverse()
|
||||
}
|
||||
|
||||
const bIsAddFile = b instanceof AddFileOperation
|
||||
const bIsEditFile = b instanceof EditFileOperation
|
||||
const bIsMoveFile = b instanceof MoveFileOperation
|
||||
const bIsSetFileMetadata = b instanceof SetFileMetadataOperation
|
||||
|
||||
if (a instanceof AddFileOperation) {
|
||||
if (bIsAddFile) return transformAddFileAddFile(a, b)
|
||||
if (bIsMoveFile) return transformAddFileMoveFile(a, b)
|
||||
if (bIsEditFile) return transformAddFileEditFile(a, b)
|
||||
if (bIsSetFileMetadata) return transformAddFileSetFileMetadata(a, b)
|
||||
throw new Error('bad op b')
|
||||
}
|
||||
if (a instanceof MoveFileOperation) {
|
||||
if (bIsAddFile) return transpose(transformAddFileMoveFile)
|
||||
if (bIsMoveFile) return transformMoveFileMoveFile(a, b)
|
||||
if (bIsEditFile) return transformMoveFileEditFile(a, b)
|
||||
if (bIsSetFileMetadata) return transformMoveFileSetFileMetadata(a, b)
|
||||
throw new Error('bad op b')
|
||||
}
|
||||
if (a instanceof EditFileOperation) {
|
||||
if (bIsAddFile) return transpose(transformAddFileEditFile)
|
||||
if (bIsMoveFile) return transpose(transformMoveFileEditFile)
|
||||
if (bIsEditFile) return transformEditFileEditFile(a, b)
|
||||
if (bIsSetFileMetadata) return transformEditFileSetFileMetadata(a, b)
|
||||
throw new Error('bad op b')
|
||||
}
|
||||
if (a instanceof SetFileMetadataOperation) {
|
||||
if (bIsAddFile) return transpose(transformAddFileSetFileMetadata)
|
||||
if (bIsMoveFile) return transpose(transformMoveFileSetFileMetadata)
|
||||
if (bIsEditFile) return transpose(transformEditFileSetFileMetadata)
|
||||
if (bIsSetFileMetadata) return transformSetFileMetadatas(a, b)
|
||||
throw new Error('bad op b')
|
||||
}
|
||||
throw new Error('bad op a')
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform each operation in `a` by each operation in `b` and save the primed
|
||||
* operations in place.
|
||||
*
|
||||
* @param {Array.<Operation>} as - modified in place
|
||||
* @param {Array.<Operation>} bs - modified in place
|
||||
*/
|
||||
static transformMultiple(as, bs) {
|
||||
for (let i = 0; i < as.length; ++i) {
|
||||
for (let j = 0; j < bs.length; ++j) {
|
||||
const primes = Operation.transform(as[i], bs[j])
|
||||
as[i] = primes[0]
|
||||
bs[j] = primes[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static addFile(pathname, file) {
|
||||
return new AddFileOperation(pathname, file)
|
||||
}
|
||||
|
||||
static editFile(pathname, editOperation) {
|
||||
return new EditFileOperation(pathname, editOperation)
|
||||
}
|
||||
|
||||
static moveFile(pathname, newPathname) {
|
||||
return new MoveFileOperation(pathname, newPathname)
|
||||
}
|
||||
|
||||
static removeFile(pathname) {
|
||||
return new MoveFileOperation(pathname, '')
|
||||
}
|
||||
|
||||
static setFileMetadata(pathname, metadata) {
|
||||
return new SetFileMetadataOperation(pathname, metadata)
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Transform
|
||||
//
|
||||
// The way to read these transform functions is that
|
||||
// 1. return_value[0] is the op to be applied after arguments[1], and
|
||||
// 2. return_value[1] is the op to be applied after arguments[0],
|
||||
// in order to arrive at the same project state.
|
||||
//
|
||||
|
||||
function transformAddFileAddFile(add1, add2) {
|
||||
if (add1.getPathname() === add2.getPathname()) {
|
||||
return [Operation.NO_OP, add2] // add2 wins
|
||||
}
|
||||
|
||||
return [add1, add2]
|
||||
}
|
||||
|
||||
function transformAddFileMoveFile(add, move) {
|
||||
function relocateAddFile() {
|
||||
return new AddFileOperation(move.getNewPathname(), add.getFile().clone())
|
||||
}
|
||||
|
||||
if (add.getPathname() === move.getPathname()) {
|
||||
if (move.isRemoveFile()) {
|
||||
return [add, Operation.NO_OP]
|
||||
}
|
||||
return [
|
||||
relocateAddFile(),
|
||||
new MoveFileOperation(add.getPathname(), move.getNewPathname()),
|
||||
]
|
||||
}
|
||||
|
||||
if (add.getPathname() === move.getNewPathname()) {
|
||||
return [relocateAddFile(), new MoveFileOperation(move.getPathname(), '')]
|
||||
}
|
||||
|
||||
return [add, move]
|
||||
}
|
||||
|
||||
function transformAddFileEditFile(add, edit) {
|
||||
if (add.getPathname() === edit.getPathname()) {
|
||||
return [add, Operation.NO_OP] // the add wins
|
||||
}
|
||||
|
||||
return [add, edit]
|
||||
}
|
||||
|
||||
function transformAddFileSetFileMetadata(add, set) {
|
||||
if (add.getPathname() === set.getPathname()) {
|
||||
const newFile = add.getFile().clone()
|
||||
newFile.setMetadata(set.getMetadata())
|
||||
return [new AddFileOperation(add.getPathname(), newFile), set]
|
||||
}
|
||||
|
||||
return [add, set]
|
||||
}
|
||||
|
||||
//
|
||||
// This is one of the trickier ones. There are 15 possible equivalence
|
||||
// relationships between our four variables:
|
||||
//
|
||||
// path1, newPath1, path2, newPath2 --- "same move" (all equal)
|
||||
//
|
||||
// path1, newPath1, path2 | newPath2 --- "no-ops" (1)
|
||||
// path1, newPath1, newPath2 | path2 --- "no-ops" (1)
|
||||
// path1, path2, newPath2 | newPath1 --- "no-ops" (2)
|
||||
// newPath1, path2, newPath2 | path1 --- "no-ops" (2)
|
||||
//
|
||||
// path1, newPath1 | path2, newPath2 --- "no-ops" (1 and 2)
|
||||
// path1, path2 | newPath1, newPath2 --- "same move"
|
||||
// path1, newPath2 | newPath1, path2 --- "opposite moves"
|
||||
//
|
||||
// path1, newPath1 | path2 | newPath2 --- "no-ops" (1)
|
||||
// path1, path2 | newPath1 | newPath2 --- "divergent moves"
|
||||
// path1, newPath2 | newPath1 | path2 --- "transitive move"
|
||||
// newPath1, path2 | path1 | newPath2 --- "transitive move"
|
||||
// newPath1, newPath2 | path1 | path2 --- "convergent move"
|
||||
// path2, newPath2 | path1 | newPath1 --- "no-ops" (2)
|
||||
//
|
||||
// path1 | newPath1 | path2 | newPath2 --- "no conflict"
|
||||
//
|
||||
function transformMoveFileMoveFile(move1, move2) {
|
||||
const path1 = move1.getPathname()
|
||||
const path2 = move2.getPathname()
|
||||
const newPath1 = move1.getNewPathname()
|
||||
const newPath2 = move2.getNewPathname()
|
||||
|
||||
// the same move
|
||||
if (path1 === path2 && newPath1 === newPath2) {
|
||||
return [Operation.NO_OP, Operation.NO_OP]
|
||||
}
|
||||
|
||||
// no-ops
|
||||
if (path1 === newPath1 && path2 === newPath2) {
|
||||
return [Operation.NO_OP, Operation.NO_OP]
|
||||
}
|
||||
if (path1 === newPath1) {
|
||||
return [Operation.NO_OP, move2]
|
||||
}
|
||||
if (path2 === newPath2) {
|
||||
return [move1, Operation.NO_OP]
|
||||
}
|
||||
|
||||
// opposite moves (foo -> bar, bar -> foo)
|
||||
if (path1 === newPath2 && path2 === newPath1) {
|
||||
// We can't handle this very well: if we wanted move2 (say) to win, move2'
|
||||
// would have to be addFile(foo) with the content of bar, but we don't have
|
||||
// the content of bar available here. So, we just destroy both files.
|
||||
return [Operation.removeFile(path1), Operation.removeFile(path2)]
|
||||
}
|
||||
|
||||
// divergent moves (foo -> bar, foo -> baz); convention: move2 wins
|
||||
if (path1 === path2 && newPath1 !== newPath2) {
|
||||
return [Operation.NO_OP, Operation.moveFile(newPath1, newPath2)]
|
||||
}
|
||||
|
||||
// convergent move (foo -> baz, bar -> baz); convention: move2 wins
|
||||
if (newPath1 === newPath2 && path1 !== path2) {
|
||||
return [Operation.removeFile(path1), move2]
|
||||
}
|
||||
|
||||
// transitive move:
|
||||
// 1: foo -> baz, 2: bar -> foo (result: bar -> baz) or
|
||||
// 1: foo -> bar, 2: bar -> baz (result: foo -> baz)
|
||||
if (path1 === newPath2 && newPath1 !== path2) {
|
||||
return [
|
||||
Operation.moveFile(newPath2, newPath1),
|
||||
Operation.moveFile(path2, newPath1),
|
||||
]
|
||||
}
|
||||
if (newPath1 === path2 && path1 !== newPath2) {
|
||||
return [
|
||||
Operation.moveFile(path1, newPath2),
|
||||
Operation.moveFile(newPath1, newPath2),
|
||||
]
|
||||
}
|
||||
|
||||
// no conflict
|
||||
return [move1, move2]
|
||||
}
|
||||
|
||||
function transformMoveFileEditFile(move, edit) {
|
||||
if (move.getPathname() === edit.getPathname()) {
|
||||
if (move.isRemoveFile()) {
|
||||
// let the remove win
|
||||
return [move, Operation.NO_OP]
|
||||
}
|
||||
return [
|
||||
move,
|
||||
Operation.editFile(move.getNewPathname(), edit.getOperation()),
|
||||
]
|
||||
}
|
||||
|
||||
if (move.getNewPathname() === edit.getPathname()) {
|
||||
// let the move win
|
||||
return [move, Operation.NO_OP]
|
||||
}
|
||||
|
||||
return [move, edit]
|
||||
}
|
||||
|
||||
function transformMoveFileSetFileMetadata(move, set) {
|
||||
if (move.getPathname() === set.getPathname()) {
|
||||
return [
|
||||
move,
|
||||
Operation.setFileMetadata(move.getNewPathname(), set.getMetadata()),
|
||||
]
|
||||
}
|
||||
// A: mv foo -> bar
|
||||
// B: set bar.x
|
||||
//
|
||||
// A': mv foo -> bar
|
||||
// B': nothing
|
||||
if (move.getNewPathname() === set.getPathname()) {
|
||||
return [move, Operation.NO_OP] // let the move win
|
||||
}
|
||||
return [move, set]
|
||||
}
|
||||
|
||||
function transformEditFileEditFile(edit1, edit2) {
|
||||
if (edit1.getPathname() === edit2.getPathname()) {
|
||||
const primeOps = EditOperationTransformer.transform(
|
||||
edit1.getOperation(),
|
||||
edit2.getOperation()
|
||||
)
|
||||
return [
|
||||
Operation.editFile(edit1.getPathname(), primeOps[0]),
|
||||
Operation.editFile(edit2.getPathname(), primeOps[1]),
|
||||
]
|
||||
}
|
||||
|
||||
return [edit1, edit2]
|
||||
}
|
||||
|
||||
function transformEditFileSetFileMetadata(edit, set) {
|
||||
// There is no conflict.
|
||||
return [edit, set]
|
||||
}
|
||||
|
||||
function transformSetFileMetadatas(set1, set2) {
|
||||
if (set1.getPathname() === set2.getPathname()) {
|
||||
return [Operation.NO_OP, set2] // set2 wins
|
||||
}
|
||||
return [set1, set2]
|
||||
}
|
||||
|
||||
module.exports = Operation
|
||||
|
||||
// Work around circular import
|
||||
NoOperation = require('./no_operation')
|
||||
AddFileOperation = require('./add_file_operation')
|
||||
MoveFileOperation = require('./move_file_operation')
|
||||
EditFileOperation = require('./edit_file_operation')
|
||||
SetFileMetadataOperation = require('./set_file_metadata_operation')
|
||||
|
||||
Operation.NO_OP = new NoOperation()
|
||||
@@ -0,0 +1,54 @@
|
||||
'use strict'
|
||||
|
||||
const Operation = require('./')
|
||||
|
||||
/**
|
||||
* Moves or removes a file from a project.
|
||||
*/
|
||||
class MoveFileOperation extends Operation {
|
||||
/**
|
||||
* @param {string} pathname
|
||||
* @param {string} newPathname
|
||||
*/
|
||||
constructor(pathname, newPathname) {
|
||||
super()
|
||||
this.pathname = pathname
|
||||
this.newPathname = newPathname
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
toRaw() {
|
||||
return {
|
||||
pathname: this.pathname,
|
||||
newPathname: this.newPathname,
|
||||
}
|
||||
}
|
||||
|
||||
getPathname() {
|
||||
return this.pathname
|
||||
}
|
||||
|
||||
getNewPathname() {
|
||||
return this.newPathname
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this operation is a MoveFile operation that deletes the file.
|
||||
*
|
||||
* @return {boolean}
|
||||
*/
|
||||
isRemoveFile() {
|
||||
return this.getNewPathname() === ''
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
applyTo(snapshot) {
|
||||
snapshot.moveFile(this.getPathname(), this.getNewPathname())
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MoveFileOperation
|
||||
20
libraries/overleaf-editor-core/lib/operation/no_operation.js
Normal file
20
libraries/overleaf-editor-core/lib/operation/no_operation.js
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict'
|
||||
|
||||
const Operation = require('./')
|
||||
|
||||
/**
|
||||
* An explicit no-operation.
|
||||
*
|
||||
* There are several no-ops, such as moving a file to itself, but it's useful
|
||||
* to have a generic no-op as well.
|
||||
*/
|
||||
class NoOperation extends Operation {
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
isNoOp() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NoOperation
|
||||
457
libraries/overleaf-editor-core/lib/operation/scan_op.js
Normal file
457
libraries/overleaf-editor-core/lib/operation/scan_op.js
Normal file
@@ -0,0 +1,457 @@
|
||||
// @ts-check
|
||||
const { containsNonBmpChars } = require('../util')
|
||||
const {
|
||||
ApplyError,
|
||||
InvalidInsertionError,
|
||||
UnprocessableError,
|
||||
} = require('../errors')
|
||||
const ClearTrackingProps = require('../file_data/clear_tracking_props')
|
||||
const TrackingProps = require('../file_data/tracking_props')
|
||||
|
||||
/**
|
||||
* @import { RawScanOp, RawInsertOp, RawRetainOp, RawRemoveOp, TrackingDirective } from '../types'
|
||||
*
|
||||
* @typedef {{ length: number, inputCursor: number, readonly inputLength: number}} LengthApplyContext
|
||||
*/
|
||||
|
||||
class ScanOp {
|
||||
constructor() {
|
||||
if (this.constructor === ScanOp) {
|
||||
throw new Error('Cannot instantiate abstract class')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies an operation to a length
|
||||
* @param {LengthApplyContext} current
|
||||
* @returns {LengthApplyContext}
|
||||
*/
|
||||
applyToLength(current) {
|
||||
throw new Error('abstract method')
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {RawScanOp}
|
||||
*/
|
||||
toJSON() {
|
||||
throw new Error('abstract method')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawScanOp} raw
|
||||
* @returns {ScanOp}
|
||||
*/
|
||||
static fromJSON(raw) {
|
||||
if (isRetain(raw)) {
|
||||
return RetainOp.fromJSON(raw)
|
||||
} else if (isInsert(raw)) {
|
||||
return InsertOp.fromJSON(raw)
|
||||
} else if (isRemove(raw)) {
|
||||
return RemoveOp.fromJSON(raw)
|
||||
}
|
||||
throw new UnprocessableError(`Invalid ScanOp ${JSON.stringify(raw)}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether two ScanOps are equal
|
||||
* @param {ScanOp} _other
|
||||
* @returns {boolean}
|
||||
*/
|
||||
equals(_other) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether two ScanOps can be merged into a single operation
|
||||
* @param {ScanOp} other
|
||||
* @returns
|
||||
*/
|
||||
canMergeWith(other) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge two ScanOps into a single operation
|
||||
* @param {ScanOp} _other
|
||||
* @returns {void}
|
||||
*/
|
||||
mergeWith(_other) {
|
||||
throw new Error('abstract method')
|
||||
}
|
||||
|
||||
toString() {
|
||||
'ScanOp'
|
||||
}
|
||||
}
|
||||
|
||||
class InsertOp extends ScanOp {
|
||||
/**
|
||||
*
|
||||
* @param {string} insertion
|
||||
* @param {TrackingProps | undefined} tracking
|
||||
* @param {string[] | undefined} commentIds
|
||||
*/
|
||||
constructor(insertion, tracking = undefined, commentIds = undefined) {
|
||||
super()
|
||||
if (typeof insertion !== 'string') {
|
||||
throw new InvalidInsertionError('insertion must be a string')
|
||||
}
|
||||
if (containsNonBmpChars(insertion)) {
|
||||
throw new InvalidInsertionError('insertion contains non-BMP characters')
|
||||
}
|
||||
/** @type {string} */
|
||||
this.insertion = insertion
|
||||
/** @type {TrackingProps | undefined} */
|
||||
this.tracking = tracking
|
||||
/** @type {string[] | undefined} */
|
||||
this.commentIds = commentIds
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {RawInsertOp} op
|
||||
* @returns {InsertOp}
|
||||
*/
|
||||
static fromJSON(op) {
|
||||
if (typeof op === 'string') {
|
||||
return new InsertOp(op)
|
||||
}
|
||||
// It must be an object with an 'i' property.
|
||||
if (typeof op.i !== 'string') {
|
||||
throw new InvalidInsertionError(
|
||||
'insert operation must have a string property'
|
||||
)
|
||||
}
|
||||
return new InsertOp(
|
||||
op.i,
|
||||
op.tracking && TrackingProps.fromRaw(op.tracking),
|
||||
op.commentIds
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {LengthApplyContext} current
|
||||
* @returns {LengthApplyContext}
|
||||
*/
|
||||
applyToLength(current) {
|
||||
current.length += this.insertion.length
|
||||
return current
|
||||
}
|
||||
|
||||
/** @inheritdoc
|
||||
* @param {ScanOp} other
|
||||
*/
|
||||
equals(other) {
|
||||
if (!(other instanceof InsertOp)) {
|
||||
return false
|
||||
}
|
||||
if (this.insertion !== other.insertion) {
|
||||
return false
|
||||
}
|
||||
if (this.tracking) {
|
||||
if (!this.tracking.equals(other.tracking)) {
|
||||
return false
|
||||
}
|
||||
} else if (other.tracking) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (this.commentIds) {
|
||||
return (
|
||||
this.commentIds.length === other.commentIds?.length &&
|
||||
this.commentIds.every(id => other.commentIds?.includes(id))
|
||||
)
|
||||
}
|
||||
return !other.commentIds
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ScanOp} other
|
||||
* @return {other is InsertOp}
|
||||
*/
|
||||
canMergeWith(other) {
|
||||
if (!(other instanceof InsertOp)) {
|
||||
return false
|
||||
}
|
||||
if (this.tracking) {
|
||||
if (!this.tracking.equals(other.tracking)) {
|
||||
return false
|
||||
}
|
||||
} else if (other.tracking) {
|
||||
return false
|
||||
}
|
||||
if (this.commentIds) {
|
||||
return (
|
||||
this.commentIds.length === other.commentIds?.length &&
|
||||
this.commentIds.every(id => other.commentIds?.includes(id))
|
||||
)
|
||||
}
|
||||
return !other.commentIds
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ScanOp} other
|
||||
*/
|
||||
mergeWith(other) {
|
||||
if (!this.canMergeWith(other)) {
|
||||
throw new Error('Cannot merge with incompatible operation')
|
||||
}
|
||||
this.insertion += other.insertion
|
||||
// We already have the same tracking info and commentIds
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {RawInsertOp}
|
||||
*/
|
||||
toJSON() {
|
||||
if (!this.tracking && !this.commentIds) {
|
||||
return this.insertion
|
||||
}
|
||||
/** @type RawInsertOp */
|
||||
const obj = { i: this.insertion }
|
||||
if (this.tracking) {
|
||||
obj.tracking = this.tracking.toRaw()
|
||||
}
|
||||
if (this.commentIds) {
|
||||
obj.commentIds = this.commentIds
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
toString() {
|
||||
return `insert '${this.insertion}'`
|
||||
}
|
||||
}
|
||||
|
||||
class RetainOp extends ScanOp {
|
||||
/**
|
||||
* @param {number} length
|
||||
* @param {TrackingDirective | undefined} tracking
|
||||
*/
|
||||
constructor(length, tracking = undefined) {
|
||||
super()
|
||||
if (length < 0) {
|
||||
throw new Error('length must be non-negative')
|
||||
}
|
||||
/** @type {number} */
|
||||
this.length = length
|
||||
/** @type {TrackingDirective | undefined} */
|
||||
this.tracking = tracking
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {LengthApplyContext} current
|
||||
* @returns {LengthApplyContext}
|
||||
*/
|
||||
applyToLength(current) {
|
||||
if (current.inputCursor + this.length > current.inputLength) {
|
||||
throw new ApplyError(
|
||||
"Operation can't retain more chars than are left in the string.",
|
||||
this.toJSON(),
|
||||
current.inputLength
|
||||
)
|
||||
}
|
||||
current.length += this.length
|
||||
current.inputCursor += this.length
|
||||
return current
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {RawRetainOp} op
|
||||
* @returns {RetainOp}
|
||||
*/
|
||||
static fromJSON(op) {
|
||||
if (typeof op === 'number') {
|
||||
return new RetainOp(op)
|
||||
}
|
||||
// It must be an object with a 'r' property.
|
||||
if (typeof op.r !== 'number') {
|
||||
throw new Error('retain operation must have a number property')
|
||||
}
|
||||
if (op.tracking) {
|
||||
const tracking =
|
||||
op.tracking.type === 'none'
|
||||
? new ClearTrackingProps()
|
||||
: TrackingProps.fromRaw(op.tracking)
|
||||
return new RetainOp(op.r, tracking)
|
||||
}
|
||||
return new RetainOp(op.r)
|
||||
}
|
||||
|
||||
/** @inheritdoc
|
||||
* @param {ScanOp} other
|
||||
*/
|
||||
equals(other) {
|
||||
if (!(other instanceof RetainOp)) {
|
||||
return false
|
||||
}
|
||||
if (this.length !== other.length) {
|
||||
return false
|
||||
}
|
||||
if (this.tracking) {
|
||||
return this.tracking.equals(other.tracking)
|
||||
}
|
||||
return !other.tracking
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ScanOp} other
|
||||
* @return {other is RetainOp}
|
||||
*/
|
||||
canMergeWith(other) {
|
||||
if (!(other instanceof RetainOp)) {
|
||||
return false
|
||||
}
|
||||
if (this.tracking) {
|
||||
return this.tracking.equals(other.tracking)
|
||||
}
|
||||
return !other.tracking
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ScanOp} other
|
||||
*/
|
||||
mergeWith(other) {
|
||||
if (!this.canMergeWith(other)) {
|
||||
throw new Error('Cannot merge with incompatible operation')
|
||||
}
|
||||
this.length += other.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {RawRetainOp}
|
||||
*/
|
||||
toJSON() {
|
||||
if (!this.tracking) {
|
||||
return this.length
|
||||
}
|
||||
return { r: this.length, tracking: this.tracking.toRaw() }
|
||||
}
|
||||
|
||||
toString() {
|
||||
return `retain ${this.length}`
|
||||
}
|
||||
}
|
||||
|
||||
class RemoveOp extends ScanOp {
|
||||
/**
|
||||
* @param {number} length
|
||||
*/
|
||||
constructor(length) {
|
||||
super()
|
||||
if (length < 0) {
|
||||
throw new Error('length must be non-negative')
|
||||
}
|
||||
/** @type {number} */
|
||||
this.length = length
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {LengthApplyContext} current
|
||||
* @returns {LengthApplyContext}
|
||||
*/
|
||||
applyToLength(current) {
|
||||
current.inputCursor += this.length
|
||||
return current
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {RawRemoveOp} op
|
||||
* @returns {RemoveOp}
|
||||
*/
|
||||
static fromJSON(op) {
|
||||
if (typeof op !== 'number' || op > 0) {
|
||||
throw new Error('delete operation must be a negative number')
|
||||
}
|
||||
return new RemoveOp(-op)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {ScanOp} other
|
||||
* @return {boolean}
|
||||
*/
|
||||
equals(other) {
|
||||
if (!(other instanceof RemoveOp)) {
|
||||
return false
|
||||
}
|
||||
return this.length === other.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ScanOp} other
|
||||
* @return {other is RemoveOp}
|
||||
*/
|
||||
canMergeWith(other) {
|
||||
return other instanceof RemoveOp
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ScanOp} other
|
||||
*/
|
||||
mergeWith(other) {
|
||||
if (!this.canMergeWith(other)) {
|
||||
throw new Error('Cannot merge with incompatible operation')
|
||||
}
|
||||
this.length += other.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {RawRemoveOp}
|
||||
*/
|
||||
toJSON() {
|
||||
return -this.length
|
||||
}
|
||||
|
||||
toString() {
|
||||
return `remove ${this.length}`
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawScanOp} op
|
||||
* @returns {op is RawRetainOp}
|
||||
*/
|
||||
function isRetain(op) {
|
||||
return (
|
||||
(typeof op === 'number' && op > 0) ||
|
||||
(typeof op === 'object' &&
|
||||
'r' in op &&
|
||||
typeof op.r === 'number' &&
|
||||
op.r > 0)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawScanOp} op
|
||||
* @returns {op is RawInsertOp}
|
||||
*/
|
||||
function isInsert(op) {
|
||||
return (
|
||||
typeof op === 'string' ||
|
||||
(typeof op === 'object' && 'i' in op && typeof op.i === 'string')
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawScanOp} op
|
||||
* @returns {op is RawRemoveOp}
|
||||
*/
|
||||
function isRemove(op) {
|
||||
return typeof op === 'number' && op < 0
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ScanOp,
|
||||
InsertOp,
|
||||
RetainOp,
|
||||
RemoveOp,
|
||||
isRetain,
|
||||
isInsert,
|
||||
isRemove,
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
// @ts-check
|
||||
const core = require('../../index')
|
||||
const Comment = require('../comment')
|
||||
const EditNoOperation = require('./edit_no_operation')
|
||||
const EditOperation = require('./edit_operation')
|
||||
|
||||
/**
|
||||
* @import DeleteCommentOperation from './delete_comment_operation'
|
||||
* @import { CommentRawData } from '../types'
|
||||
* @import { RawSetCommentStateOperation } from '../types'
|
||||
* @import StringFileData from '../file_data/string_file_data'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @extends EditOperation
|
||||
*/
|
||||
class SetCommentStateOperation extends EditOperation {
|
||||
/**
|
||||
* @param {string} commentId
|
||||
* @param {boolean} resolved
|
||||
*/
|
||||
constructor(commentId, resolved) {
|
||||
super()
|
||||
this.commentId = commentId
|
||||
this.resolved = resolved
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {RawSetCommentStateOperation}
|
||||
*/
|
||||
toJSON() {
|
||||
return {
|
||||
resolved: this.resolved,
|
||||
commentId: this.commentId,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StringFileData} fileData
|
||||
*/
|
||||
apply(fileData) {
|
||||
const comment = fileData.comments.getComment(this.commentId)
|
||||
if (comment) {
|
||||
const newComment = new Comment(comment.id, comment.ranges, this.resolved)
|
||||
fileData.comments.add(newComment)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StringFileData} previousState
|
||||
* @returns {SetCommentStateOperation | EditNoOperation}
|
||||
*/
|
||||
invert(previousState) {
|
||||
const comment = previousState.comments.getComment(this.commentId)
|
||||
if (!comment) {
|
||||
return new EditNoOperation()
|
||||
}
|
||||
|
||||
return new SetCommentStateOperation(this.commentId, comment.resolved)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} other
|
||||
* @returns {boolean}
|
||||
*/
|
||||
canBeComposedWith(other) {
|
||||
return (
|
||||
(other instanceof SetCommentStateOperation &&
|
||||
this.commentId === other.commentId) ||
|
||||
(other instanceof core.DeleteCommentOperation &&
|
||||
this.commentId === other.commentId)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} other
|
||||
* @returns {SetCommentStateOperation | core.DeleteCommentOperation}
|
||||
*/
|
||||
compose(other) {
|
||||
if (
|
||||
other instanceof SetCommentStateOperation &&
|
||||
other.commentId === this.commentId
|
||||
) {
|
||||
return other
|
||||
}
|
||||
|
||||
if (
|
||||
other instanceof core.DeleteCommentOperation &&
|
||||
other.commentId === this.commentId
|
||||
) {
|
||||
return other
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Trying to compose SetCommentStateOperation with ${other?.constructor?.name}.`
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {RawSetCommentStateOperation} raw
|
||||
* @returns {SetCommentStateOperation}
|
||||
*/
|
||||
static fromJSON(raw) {
|
||||
return new SetCommentStateOperation(raw.commentId, raw.resolved)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SetCommentStateOperation
|
||||
@@ -0,0 +1,53 @@
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Operation = require('./')
|
||||
|
||||
/**
|
||||
* Moves or removes a file from a project.
|
||||
*/
|
||||
class SetFileMetadataOperation extends Operation {
|
||||
/**
|
||||
* @param {string} pathname
|
||||
* @param {Object} metadata
|
||||
*/
|
||||
constructor(pathname, metadata) {
|
||||
super()
|
||||
assert.string(pathname, 'SetFileMetadataOperation: bad pathname')
|
||||
assert.object(metadata, 'SetFileMetadataOperation: bad metadata')
|
||||
|
||||
this.pathname = pathname
|
||||
this.metadata = metadata
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
toRaw() {
|
||||
return {
|
||||
pathname: this.pathname,
|
||||
metadata: _.cloneDeep(this.metadata),
|
||||
}
|
||||
}
|
||||
|
||||
getPathname() {
|
||||
return this.pathname
|
||||
}
|
||||
|
||||
getMetadata() {
|
||||
return this.metadata
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
*/
|
||||
applyTo(snapshot) {
|
||||
const file = snapshot.getFile(this.pathname)
|
||||
if (!file) return
|
||||
file.setMetadata(this.metadata)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SetFileMetadataOperation
|
||||
929
libraries/overleaf-editor-core/lib/operation/text_operation.js
Normal file
929
libraries/overleaf-editor-core/lib/operation/text_operation.js
Normal file
@@ -0,0 +1,929 @@
|
||||
// @ts-check
|
||||
/**
|
||||
* The text operation from OT.js with some minor cosmetic changes.
|
||||
*
|
||||
* Specifically, this is based on
|
||||
* https://github.com/Operational-Transformation/ot.js/
|
||||
* blob/298825f58fb51fefb352e7df5ddbc668f4d5646f/lib/text-operation.js
|
||||
* from 18 Mar 2013.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
const containsNonBmpChars = require('../util').containsNonBmpChars
|
||||
const EditOperation = require('./edit_operation')
|
||||
const {
|
||||
RetainOp,
|
||||
InsertOp,
|
||||
RemoveOp,
|
||||
isRetain,
|
||||
isInsert,
|
||||
isRemove,
|
||||
} = require('./scan_op')
|
||||
const {
|
||||
UnprocessableError,
|
||||
ApplyError,
|
||||
InvalidInsertionError,
|
||||
TooLongError,
|
||||
} = require('../errors')
|
||||
const Range = require('../range')
|
||||
const ClearTrackingProps = require('../file_data/clear_tracking_props')
|
||||
const TrackingProps = require('../file_data/tracking_props')
|
||||
|
||||
/**
|
||||
* @import StringFileData from '../file_data/string_file_data'
|
||||
* @import { RawTextOperation, TrackingDirective } from '../types'
|
||||
* @import { ScanOp } from '../operation/scan_op'
|
||||
* @import TrackedChangeList from '../file_data/tracked_change_list'
|
||||
*
|
||||
* @typedef {{tracking?: TrackingProps, commentIds?: string[]}} InsertOptions
|
||||
*/
|
||||
|
||||
/**
|
||||
* Create an empty text operation.
|
||||
* @extends EditOperation
|
||||
*/
|
||||
class TextOperation extends EditOperation {
|
||||
/**
|
||||
* Length of the longest file that we'll attempt to edit, in characters.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
static MAX_STRING_LENGTH = 2 * Math.pow(1024, 2)
|
||||
static UnprocessableError = UnprocessableError
|
||||
static ApplyError = ApplyError
|
||||
static InvalidInsertionError = InvalidInsertionError
|
||||
static TooLongError = TooLongError
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
|
||||
/**
|
||||
* When an operation is applied to an input string, you can think of this as
|
||||
* if an imaginary cursor runs over the entire string and skips over some
|
||||
* parts, removes some parts and inserts characters at some positions. These
|
||||
* actions (skip/remove/insert) are stored as an array in the "ops" property.
|
||||
* @type {ScanOp[]}
|
||||
*/
|
||||
this.ops = []
|
||||
|
||||
/**
|
||||
* An operation's baseLength is the length of every string the operation
|
||||
* can be applied to.
|
||||
*/
|
||||
this.baseLength = 0
|
||||
|
||||
/**
|
||||
* The targetLength is the length of every string that results from applying
|
||||
* the operation on a valid input string.
|
||||
*/
|
||||
this.targetLength = 0
|
||||
|
||||
/**
|
||||
* The expected content hash after this operation is applied
|
||||
*
|
||||
* @type {string | null}
|
||||
*/
|
||||
this.contentHash = null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {TextOperation} other
|
||||
* @return {boolean}
|
||||
*/
|
||||
equals(other) {
|
||||
if (this.baseLength !== other.baseLength) {
|
||||
return false
|
||||
}
|
||||
if (this.targetLength !== other.targetLength) {
|
||||
return false
|
||||
}
|
||||
if (this.ops.length !== other.ops.length) {
|
||||
return false
|
||||
}
|
||||
for (let i = 0; i < this.ops.length; i++) {
|
||||
if (!this.ops[i].equals(other.ops[i])) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// After an operation is constructed, the user of the library can specify the
|
||||
// actions of an operation (skip/insert/remove) with these three builder
|
||||
// methods. They all return the operation for convenient chaining.
|
||||
|
||||
/**
|
||||
* Skip over a given number of characters.
|
||||
* @param {number | {r: number}} n
|
||||
* @param {{tracking?: TrackingDirective}} opts
|
||||
* @returns {TextOperation}
|
||||
*/
|
||||
retain(n, opts = {}) {
|
||||
if (n === 0) {
|
||||
return this
|
||||
}
|
||||
|
||||
if (!isRetain(n)) {
|
||||
throw new Error('retain expects an integer or a retain object')
|
||||
}
|
||||
const newOp = RetainOp.fromJSON(n)
|
||||
newOp.tracking = opts.tracking
|
||||
|
||||
if (newOp.length === 0) {
|
||||
return this
|
||||
}
|
||||
|
||||
this.baseLength += newOp.length
|
||||
this.targetLength += newOp.length
|
||||
|
||||
const lastOperation = this.ops[this.ops.length - 1]
|
||||
if (lastOperation?.canMergeWith(newOp)) {
|
||||
// The last op is a retain op => we can merge them into one op.
|
||||
lastOperation.mergeWith(newOp)
|
||||
} else {
|
||||
// Create a new op.
|
||||
this.ops.push(newOp)
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a string at the current position.
|
||||
* @param {string | {i: string}} insertValue
|
||||
* @param {InsertOptions} opts
|
||||
* @returns {TextOperation}
|
||||
*/
|
||||
insert(insertValue, opts = {}) {
|
||||
if (!isInsert(insertValue)) {
|
||||
throw new Error('insert expects a string or an insert object')
|
||||
}
|
||||
const newOp = InsertOp.fromJSON(insertValue)
|
||||
newOp.tracking = opts.tracking
|
||||
newOp.commentIds = opts.commentIds
|
||||
if (newOp.insertion === '') {
|
||||
return this
|
||||
}
|
||||
this.targetLength += newOp.insertion.length
|
||||
const ops = this.ops
|
||||
const lastOp = this.ops[this.ops.length - 1]
|
||||
if (lastOp?.canMergeWith(newOp)) {
|
||||
// Merge insert op.
|
||||
lastOp.mergeWith(newOp)
|
||||
} else if (lastOp instanceof RemoveOp) {
|
||||
// It doesn't matter when an operation is applied whether the operation
|
||||
// is remove(3), insert("something") or insert("something"), remove(3).
|
||||
// Here we enforce that in this case, the insert op always comes first.
|
||||
// This makes all operations that have the same effect when applied to
|
||||
// a document of the right length equal in respect to the `equals` method.
|
||||
const secondToLastOp = ops[ops.length - 2]
|
||||
if (secondToLastOp?.canMergeWith(newOp)) {
|
||||
secondToLastOp.mergeWith(newOp)
|
||||
} else {
|
||||
ops[ops.length] = ops[ops.length - 1]
|
||||
ops[ops.length - 2] = newOp
|
||||
}
|
||||
} else {
|
||||
ops.push(newOp)
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a string at the current position.
|
||||
* @param {number | string} n
|
||||
* @returns {TextOperation}
|
||||
*/
|
||||
remove(n) {
|
||||
if (typeof n === 'string') {
|
||||
n = n.length
|
||||
}
|
||||
if (typeof n !== 'number') {
|
||||
throw new Error('remove expects an integer or a string')
|
||||
}
|
||||
if (n === 0) {
|
||||
return this
|
||||
}
|
||||
if (n > 0) {
|
||||
n = -n
|
||||
}
|
||||
const newOp = RemoveOp.fromJSON(n)
|
||||
this.baseLength -= n
|
||||
const lastOp = this.ops[this.ops.length - 1]
|
||||
if (lastOp?.canMergeWith(newOp)) {
|
||||
lastOp.mergeWith(newOp)
|
||||
} else {
|
||||
this.ops.push(newOp)
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether this operation has no effect.
|
||||
*/
|
||||
isNoop() {
|
||||
return (
|
||||
this.ops.length === 0 ||
|
||||
(this.ops.length === 1 && this.ops[0] instanceof RetainOp)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Pretty printing.
|
||||
*/
|
||||
toString() {
|
||||
return this.ops.map(op => op.toString()).join(', ')
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @returns {RawTextOperation}
|
||||
*/
|
||||
toJSON() {
|
||||
/** @type {RawTextOperation} */
|
||||
const json = { textOperation: this.ops.map(op => op.toJSON()) }
|
||||
if (this.contentHash != null) {
|
||||
json.contentHash = this.contentHash
|
||||
}
|
||||
return json
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a plain JS object into an operation and validates it.
|
||||
* @param {RawTextOperation} obj
|
||||
* @returns {TextOperation}
|
||||
*/
|
||||
static fromJSON = function ({ textOperation: ops, contentHash }) {
|
||||
const o = new TextOperation()
|
||||
for (const op of ops) {
|
||||
if (isRetain(op)) {
|
||||
const retain = RetainOp.fromJSON(op)
|
||||
o.retain(retain.length, { tracking: retain.tracking })
|
||||
} else if (isInsert(op)) {
|
||||
const insert = InsertOp.fromJSON(op)
|
||||
o.insert(insert.insertion, {
|
||||
commentIds: insert.commentIds,
|
||||
tracking: insert.tracking,
|
||||
})
|
||||
} else if (isRemove(op)) {
|
||||
const remove = RemoveOp.fromJSON(op)
|
||||
o.remove(-remove.length)
|
||||
} else {
|
||||
throw new UnprocessableError('unknown operation: ' + JSON.stringify(op))
|
||||
}
|
||||
}
|
||||
if (contentHash != null) {
|
||||
o.contentHash = contentHash
|
||||
}
|
||||
return o
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply an operation to a string, returning a new string. Throws an error if
|
||||
* there's a mismatch between the input string and the operation.
|
||||
* @override
|
||||
* @inheritdoc
|
||||
* @param {StringFileData} file
|
||||
*/
|
||||
apply(file) {
|
||||
const str = file.getContent()
|
||||
const operation = this
|
||||
if (containsNonBmpChars(str)) {
|
||||
throw new TextOperation.ApplyError(
|
||||
'The string contains non BMP characters.',
|
||||
operation,
|
||||
str
|
||||
)
|
||||
}
|
||||
if (str.length !== operation.baseLength) {
|
||||
throw new TextOperation.ApplyError(
|
||||
"The operation's base length must be equal to the string's length.",
|
||||
operation,
|
||||
str
|
||||
)
|
||||
}
|
||||
|
||||
const ops = this.ops
|
||||
let inputCursor = 0
|
||||
let result = ''
|
||||
for (const op of ops) {
|
||||
if (op instanceof RetainOp) {
|
||||
if (inputCursor + op.length > str.length) {
|
||||
throw new ApplyError(
|
||||
"Operation can't retain more chars than are left in the string.",
|
||||
op.toJSON(),
|
||||
str
|
||||
)
|
||||
}
|
||||
file.trackedChanges.applyRetain(result.length, op.length, {
|
||||
tracking: op.tracking,
|
||||
})
|
||||
result += str.slice(inputCursor, inputCursor + op.length)
|
||||
inputCursor += op.length
|
||||
} else if (op instanceof InsertOp) {
|
||||
if (containsNonBmpChars(op.insertion)) {
|
||||
throw new InvalidInsertionError(str, op.toJSON())
|
||||
}
|
||||
file.trackedChanges.applyInsert(result.length, op.insertion, {
|
||||
tracking: op.tracking,
|
||||
})
|
||||
file.comments.applyInsert(
|
||||
new Range(result.length, op.insertion.length),
|
||||
{ commentIds: op.commentIds }
|
||||
)
|
||||
result += op.insertion
|
||||
} else if (op instanceof RemoveOp) {
|
||||
file.trackedChanges.applyDelete(result.length, op.length)
|
||||
file.comments.applyDelete(new Range(result.length, op.length))
|
||||
inputCursor += op.length
|
||||
} else {
|
||||
throw new UnprocessableError('Unknown ScanOp type during apply')
|
||||
}
|
||||
}
|
||||
|
||||
if (inputCursor !== str.length) {
|
||||
throw new TextOperation.ApplyError(
|
||||
"The operation didn't operate on the whole string.",
|
||||
operation,
|
||||
str
|
||||
)
|
||||
}
|
||||
|
||||
if (result.length > TextOperation.MAX_STRING_LENGTH) {
|
||||
throw new TextOperation.TooLongError(operation, result.length)
|
||||
}
|
||||
|
||||
file.content = result
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {number} length of the original string; non-negative
|
||||
* @return {number} length of the new string; non-negative
|
||||
*/
|
||||
applyToLength(length) {
|
||||
const operation = this
|
||||
if (length !== operation.baseLength) {
|
||||
throw new TextOperation.ApplyError(
|
||||
"The operation's base length must be equal to the string's length.",
|
||||
operation,
|
||||
length
|
||||
)
|
||||
}
|
||||
|
||||
const { length: newLength, inputCursor } = this.ops.reduce(
|
||||
(intermediate, op) => op.applyToLength(intermediate),
|
||||
{ length: 0, inputCursor: 0, inputLength: length }
|
||||
)
|
||||
|
||||
if (inputCursor !== length) {
|
||||
throw new TextOperation.ApplyError(
|
||||
"The operation didn't operate on the whole string.",
|
||||
operation,
|
||||
length
|
||||
)
|
||||
}
|
||||
if (newLength > TextOperation.MAX_STRING_LENGTH) {
|
||||
throw new TextOperation.TooLongError(operation, newLength)
|
||||
}
|
||||
return newLength
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {StringFileData} previousState
|
||||
*/
|
||||
invert(previousState) {
|
||||
const str = previousState.getContent()
|
||||
let strIndex = 0
|
||||
const inverse = new TextOperation()
|
||||
const ops = this.ops
|
||||
for (let i = 0, l = ops.length; i < l; i++) {
|
||||
const op = ops[i]
|
||||
if (op instanceof RetainOp) {
|
||||
// Where we need to end up after the retains
|
||||
const target = strIndex + op.length
|
||||
// A previous retain could have overriden some tracking info. Now we
|
||||
// need to restore it.
|
||||
const previousRanges = previousState.trackedChanges.inRange(
|
||||
new Range(strIndex, op.length)
|
||||
)
|
||||
|
||||
let removeTrackingInfoIfNeeded
|
||||
if (op.tracking) {
|
||||
removeTrackingInfoIfNeeded = new ClearTrackingProps()
|
||||
}
|
||||
|
||||
for (const trackedChange of previousRanges) {
|
||||
if (strIndex < trackedChange.range.start) {
|
||||
inverse.retain(trackedChange.range.start - strIndex, {
|
||||
tracking: removeTrackingInfoIfNeeded,
|
||||
})
|
||||
strIndex = trackedChange.range.start
|
||||
}
|
||||
if (trackedChange.range.end < strIndex + op.length) {
|
||||
inverse.retain(trackedChange.range.length, {
|
||||
tracking: trackedChange.tracking,
|
||||
})
|
||||
strIndex = trackedChange.range.end
|
||||
}
|
||||
if (trackedChange.range.end !== strIndex) {
|
||||
// No need to split the range at the end
|
||||
const [left] = trackedChange.range.splitAt(strIndex)
|
||||
inverse.retain(left.length, { tracking: trackedChange.tracking })
|
||||
strIndex = left.end
|
||||
}
|
||||
}
|
||||
if (strIndex < target) {
|
||||
inverse.retain(target - strIndex, {
|
||||
tracking: removeTrackingInfoIfNeeded,
|
||||
})
|
||||
strIndex = target
|
||||
}
|
||||
} else if (op instanceof InsertOp) {
|
||||
inverse.remove(op.insertion.length)
|
||||
} else if (op instanceof RemoveOp) {
|
||||
const segments = calculateTrackingCommentSegments(
|
||||
strIndex,
|
||||
op.length,
|
||||
previousState.comments,
|
||||
previousState.trackedChanges
|
||||
)
|
||||
for (const segment of segments) {
|
||||
inverse.insert(str.slice(strIndex, strIndex + segment.length), {
|
||||
tracking: segment.tracking,
|
||||
commentIds: segment.commentIds,
|
||||
})
|
||||
strIndex += segment.length
|
||||
}
|
||||
} else {
|
||||
throw new UnprocessableError('unknown scanop during inversion')
|
||||
}
|
||||
}
|
||||
return inverse
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} other
|
||||
*/
|
||||
canBeComposedWithForUndo(other) {
|
||||
if (!(other instanceof TextOperation)) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (this.isNoop() || other.isNoop()) {
|
||||
return true
|
||||
}
|
||||
|
||||
const startA = getStartIndex(this)
|
||||
const startB = getStartIndex(other)
|
||||
const simpleA = getSimpleOp(this)
|
||||
const simpleB = getSimpleOp(other)
|
||||
if (!simpleA || !simpleB) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (simpleA instanceof InsertOp && simpleB instanceof InsertOp) {
|
||||
return startA + simpleA.insertion.length === startB
|
||||
}
|
||||
|
||||
if (simpleA instanceof RemoveOp && simpleB instanceof RemoveOp) {
|
||||
// there are two possibilities to delete: with backspace and with the
|
||||
// delete key.
|
||||
return startB + simpleB.length === startA || startA === startB
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} other
|
||||
*/
|
||||
canBeComposedWith(other) {
|
||||
if (!(other instanceof TextOperation)) {
|
||||
return false
|
||||
}
|
||||
return this.targetLength === other.baseLength
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritdoc
|
||||
* @param {EditOperation} operation2
|
||||
*/
|
||||
compose(operation2) {
|
||||
if (!(operation2 instanceof TextOperation)) {
|
||||
throw new Error(
|
||||
`Trying to compose TextOperation with ${operation2?.constructor?.name}.`
|
||||
)
|
||||
}
|
||||
const operation1 = this
|
||||
if (operation1.targetLength !== operation2.baseLength) {
|
||||
throw new Error(
|
||||
'The base length of the second operation has to be the ' +
|
||||
'target length of the first operation'
|
||||
)
|
||||
}
|
||||
|
||||
const operation = new TextOperation() // the combined operation
|
||||
const ops1 = operation1.ops
|
||||
const ops2 = operation2.ops // for fast access
|
||||
let i1 = 0
|
||||
let i2 = 0 // current index into ops1 respectively ops2
|
||||
let op1 = ops1[i1++]
|
||||
let op2 = ops2[i2++] // current ops
|
||||
for (;;) {
|
||||
// Dispatch on the type of op1 and op2
|
||||
if (typeof op1 === 'undefined' && typeof op2 === 'undefined') {
|
||||
// end condition: both ops1 and ops2 have been processed
|
||||
break
|
||||
}
|
||||
|
||||
if (op1 instanceof RemoveOp) {
|
||||
operation.remove(-op1.length)
|
||||
op1 = ops1[i1++]
|
||||
continue
|
||||
}
|
||||
|
||||
if (op2 instanceof InsertOp) {
|
||||
operation.insert(op2.insertion, {
|
||||
tracking: op2.tracking,
|
||||
commentIds: op2.commentIds,
|
||||
})
|
||||
op2 = ops2[i2++]
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof op1 === 'undefined') {
|
||||
throw new Error(
|
||||
'Cannot compose operations: first operation is too short.'
|
||||
)
|
||||
}
|
||||
if (typeof op2 === 'undefined') {
|
||||
throw new Error(
|
||||
'Cannot compose operations: first operation is too long.'
|
||||
)
|
||||
}
|
||||
|
||||
if (op1 instanceof RetainOp && op2 instanceof RetainOp) {
|
||||
// If both have tracking info, use the latter one. Otherwise use the
|
||||
// tracking info from the former.
|
||||
const tracking = op2.tracking ?? op1.tracking
|
||||
if (op1.length > op2.length) {
|
||||
operation.retain(op2.length, {
|
||||
tracking,
|
||||
})
|
||||
op1 = new RetainOp(op1.length - op2.length, op1.tracking)
|
||||
op2 = ops2[i2++]
|
||||
} else if (op1.length === op2.length) {
|
||||
operation.retain(op1.length, {
|
||||
tracking,
|
||||
})
|
||||
op1 = ops1[i1++]
|
||||
op2 = ops2[i2++]
|
||||
} else {
|
||||
operation.retain(op1.length, {
|
||||
tracking,
|
||||
})
|
||||
op2 = new RetainOp(op2.length - op1.length, op2.tracking)
|
||||
op1 = ops1[i1++]
|
||||
}
|
||||
} else if (op1 instanceof InsertOp && op2 instanceof RemoveOp) {
|
||||
if (op1.insertion.length > op2.length) {
|
||||
op1 = new InsertOp(
|
||||
op1.insertion.slice(op2.length),
|
||||
op1.tracking,
|
||||
op1.commentIds
|
||||
)
|
||||
op2 = ops2[i2++]
|
||||
} else if (op1.insertion.length === op2.length) {
|
||||
op1 = ops1[i1++]
|
||||
op2 = ops2[i2++]
|
||||
} else {
|
||||
op2 = RemoveOp.fromJSON(op1.insertion.length - op2.length)
|
||||
op1 = ops1[i1++]
|
||||
}
|
||||
} else if (op1 instanceof InsertOp && op2 instanceof RetainOp) {
|
||||
/** @type InsertOptions */
|
||||
const opts = {
|
||||
commentIds: op1.commentIds,
|
||||
}
|
||||
if (op2.tracking instanceof TrackingProps) {
|
||||
// Prefer the tracking info on the second operation
|
||||
opts.tracking = op2.tracking
|
||||
} else if (!(op2.tracking instanceof ClearTrackingProps)) {
|
||||
// The second operation does not cancel the first operation's tracking
|
||||
opts.tracking = op1.tracking
|
||||
}
|
||||
if (op1.insertion.length > op2.length) {
|
||||
operation.insert(op1.insertion.slice(0, op2.length), opts)
|
||||
op1 = new InsertOp(
|
||||
op1.insertion.slice(op2.length),
|
||||
op1.tracking,
|
||||
op1.commentIds
|
||||
)
|
||||
op2 = ops2[i2++]
|
||||
} else if (op1.insertion.length === op2.length) {
|
||||
operation.insert(op1.insertion, opts)
|
||||
op1 = ops1[i1++]
|
||||
op2 = ops2[i2++]
|
||||
} else {
|
||||
operation.insert(op1.insertion, opts)
|
||||
op2 = new RetainOp(op2.length - op1.insertion.length, op2.tracking)
|
||||
op1 = ops1[i1++]
|
||||
}
|
||||
} else if (op1 instanceof RetainOp && op2 instanceof RemoveOp) {
|
||||
if (op1.length > op2.length) {
|
||||
operation.remove(-op2.length)
|
||||
op1 = new RetainOp(op1.length - op2.length, op1.tracking)
|
||||
op2 = ops2[i2++]
|
||||
} else if (op1.length === op2.length) {
|
||||
operation.remove(-op2.length)
|
||||
op1 = ops1[i1++]
|
||||
op2 = ops2[i2++]
|
||||
} else {
|
||||
operation.remove(op1.length)
|
||||
op2 = RemoveOp.fromJSON(op1.length - op2.length)
|
||||
op1 = ops1[i1++]
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
"This shouldn't happen: op1: " +
|
||||
JSON.stringify(op1) +
|
||||
', op2: ' +
|
||||
JSON.stringify(op2)
|
||||
)
|
||||
}
|
||||
}
|
||||
return operation
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform takes two operations A and B that happened concurrently and
|
||||
* produces two operations A' and B' (in an array) such that
|
||||
* `apply(apply(S, A), B') = apply(apply(S, B), A')`. This function is the
|
||||
* heart of OT.
|
||||
* @param {TextOperation} operation1
|
||||
* @param {TextOperation} operation2
|
||||
* @returns {[TextOperation, TextOperation]}
|
||||
*/
|
||||
static transform(operation1, operation2) {
|
||||
if (operation1.baseLength !== operation2.baseLength) {
|
||||
throw new Error('Both operations have to have the same base length')
|
||||
}
|
||||
|
||||
const operation1prime = new TextOperation()
|
||||
const operation2prime = new TextOperation()
|
||||
const ops1 = operation1.ops
|
||||
const ops2 = operation2.ops
|
||||
let i1 = 0
|
||||
let i2 = 0
|
||||
let op1 = ops1[i1++]
|
||||
let op2 = ops2[i2++]
|
||||
for (;;) {
|
||||
// At every iteration of the loop, the imaginary cursor that both
|
||||
// operation1 and operation2 have that operates on the input string must
|
||||
// have the same position in the input string.
|
||||
|
||||
if (typeof op1 === 'undefined' && typeof op2 === 'undefined') {
|
||||
// end condition: both ops1 and ops2 have been processed
|
||||
break
|
||||
}
|
||||
|
||||
// next two cases: one or both ops are insert ops
|
||||
// => insert the string in the corresponding prime operation, skip it in
|
||||
// the other one. If both op1 and op2 are insert ops, prefer op1.
|
||||
if (op1 instanceof InsertOp) {
|
||||
operation1prime.insert(op1.insertion, {
|
||||
tracking: op1.tracking,
|
||||
commentIds: op1.commentIds,
|
||||
})
|
||||
operation2prime.retain(op1.insertion.length)
|
||||
op1 = ops1[i1++]
|
||||
continue
|
||||
}
|
||||
if (op2 instanceof InsertOp) {
|
||||
operation1prime.retain(op2.insertion.length)
|
||||
operation2prime.insert(op2.insertion, {
|
||||
tracking: op2.tracking,
|
||||
commentIds: op2.commentIds,
|
||||
})
|
||||
op2 = ops2[i2++]
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof op1 === 'undefined') {
|
||||
throw new Error(
|
||||
'Cannot compose operations: first operation is too short.'
|
||||
)
|
||||
}
|
||||
if (typeof op2 === 'undefined') {
|
||||
throw new Error(
|
||||
'Cannot compose operations: first operation is too long.'
|
||||
)
|
||||
}
|
||||
|
||||
let minl
|
||||
if (op1 instanceof RetainOp && op2 instanceof RetainOp) {
|
||||
// Simple case: retain/retain
|
||||
|
||||
// If both have tracking info, we use the one from op1
|
||||
/** @type {TrackingProps | ClearTrackingProps | undefined} */
|
||||
let operation1primeTracking
|
||||
/** @type {TrackingProps | ClearTrackingProps | undefined} */
|
||||
let operation2primeTracking
|
||||
if (op1.tracking) {
|
||||
operation1primeTracking = op1.tracking
|
||||
} else {
|
||||
operation2primeTracking = op2.tracking
|
||||
}
|
||||
|
||||
if (op1.length > op2.length) {
|
||||
minl = op2.length
|
||||
op1 = new RetainOp(op1.length - op2.length, op1.tracking)
|
||||
op2 = ops2[i2++]
|
||||
} else if (op1.length === op2.length) {
|
||||
minl = op2.length
|
||||
op1 = ops1[i1++]
|
||||
op2 = ops2[i2++]
|
||||
} else {
|
||||
minl = op1.length
|
||||
op2 = new RetainOp(op2.length - op1.length, op2.tracking)
|
||||
op1 = ops1[i1++]
|
||||
}
|
||||
operation1prime.retain(minl, { tracking: operation1primeTracking })
|
||||
operation2prime.retain(minl, { tracking: operation2primeTracking })
|
||||
} else if (op1 instanceof RemoveOp && op2 instanceof RemoveOp) {
|
||||
// Both operations remove the same string at the same position. We don't
|
||||
// need to produce any operations, we just skip over the remove ops and
|
||||
// handle the case that one operation removes more than the other.
|
||||
if (op1.length > op2.length) {
|
||||
op1 = RemoveOp.fromJSON(op2.length - op1.length)
|
||||
op2 = ops2[i2++]
|
||||
} else if (op1.length === op2.length) {
|
||||
op1 = ops1[i1++]
|
||||
op2 = ops2[i2++]
|
||||
} else {
|
||||
op2 = RemoveOp.fromJSON(op1.length - op2.length)
|
||||
op1 = ops1[i1++]
|
||||
}
|
||||
// next two cases: remove/retain and retain/remove
|
||||
} else if (op1 instanceof RemoveOp && op2 instanceof RetainOp) {
|
||||
if (op1.length > op2.length) {
|
||||
minl = op2.length
|
||||
op1 = RemoveOp.fromJSON(op2.length - op1.length)
|
||||
op2 = ops2[i2++]
|
||||
} else if (op1.length === op2.length) {
|
||||
minl = op2.length
|
||||
op1 = ops1[i1++]
|
||||
op2 = ops2[i2++]
|
||||
} else {
|
||||
minl = op1.length
|
||||
op2 = new RetainOp(op2.length - op1.length, op2.tracking)
|
||||
op1 = ops1[i1++]
|
||||
}
|
||||
operation1prime.remove(minl)
|
||||
} else if (op1 instanceof RetainOp && op2 instanceof RemoveOp) {
|
||||
if (op1.length > op2.length) {
|
||||
minl = op2.length
|
||||
op1 = new RetainOp(op1.length - op2.length, op1.tracking)
|
||||
op2 = ops2[i2++]
|
||||
} else if (op1.length === op2.length) {
|
||||
minl = op1.length
|
||||
op1 = ops1[i1++]
|
||||
op2 = ops2[i2++]
|
||||
} else {
|
||||
minl = op1.length
|
||||
op2 = RemoveOp.fromJSON(op1.length - op2.length)
|
||||
op1 = ops1[i1++]
|
||||
}
|
||||
operation2prime.remove(minl)
|
||||
} else {
|
||||
throw new Error("The two operations aren't compatible")
|
||||
}
|
||||
}
|
||||
|
||||
return [operation1prime, operation2prime]
|
||||
}
|
||||
}
|
||||
|
||||
// Operation are essentially lists of ops. There are three types of ops:
|
||||
//
|
||||
// * Retain ops: Advance the cursor position by a given number of characters.
|
||||
// Represented by positive ints.
|
||||
// * Insert ops: Insert a given string at the current cursor position.
|
||||
// Represented by strings.
|
||||
// * Remove ops: Remove the next n characters. Represented by negative ints.
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TextOperation} operation
|
||||
* @returns {ScanOp | null}
|
||||
*/
|
||||
function getSimpleOp(operation) {
|
||||
const ops = operation.ops
|
||||
switch (ops.length) {
|
||||
case 1:
|
||||
return ops[0]
|
||||
case 2:
|
||||
return ops[0] instanceof RetainOp
|
||||
? ops[1]
|
||||
: ops[1] instanceof RetainOp
|
||||
? ops[0]
|
||||
: null
|
||||
case 3:
|
||||
if (ops[0] instanceof RetainOp && ops[2] instanceof RetainOp) {
|
||||
return ops[1]
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {TextOperation} operation
|
||||
* @return {number}
|
||||
*/
|
||||
function getStartIndex(operation) {
|
||||
if (operation.ops[0] instanceof RetainOp) {
|
||||
return operation.ops[0].length
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs the segments defined as each overlapping range of tracked
|
||||
* changes and comments. Each segment can have it's own tracking props and
|
||||
* attached comment ids.
|
||||
*
|
||||
* The quick brown fox jumps over the lazy dog
|
||||
* Tracked inserts ---------- -----
|
||||
* Tracked deletes ------
|
||||
* Comment 1 -------
|
||||
* Comment 2 ----
|
||||
* Comment 3 -----------------
|
||||
*
|
||||
* Approx. boundaries: | | | || | | | |
|
||||
*
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
* @param {import('../file_data/comment_list')} commentsList
|
||||
* @param {TrackedChangeList} trackedChangeList
|
||||
* @returns {{length: number, commentIds?: string[], tracking?: TrackingProps}[]}
|
||||
*/
|
||||
function calculateTrackingCommentSegments(
|
||||
cursor,
|
||||
length,
|
||||
commentsList,
|
||||
trackedChangeList
|
||||
) {
|
||||
const breaks = new Set()
|
||||
const opStart = cursor
|
||||
const opEnd = cursor + length
|
||||
/**
|
||||
* Utility function to limit breaks to the boundary set by the operation range
|
||||
* @param {number} rangeBoundary
|
||||
*/
|
||||
function addBreak(rangeBoundary) {
|
||||
if (rangeBoundary < opStart || rangeBoundary > opEnd) {
|
||||
return
|
||||
}
|
||||
breaks.add(rangeBoundary)
|
||||
}
|
||||
// Add comment boundaries
|
||||
for (const comment of commentsList.comments.values()) {
|
||||
for (const range of comment.ranges) {
|
||||
addBreak(range.end)
|
||||
addBreak(range.start)
|
||||
}
|
||||
}
|
||||
// Add tracked change boundaries
|
||||
for (const trackedChange of trackedChangeList.asSorted()) {
|
||||
addBreak(trackedChange.range.start)
|
||||
addBreak(trackedChange.range.end)
|
||||
}
|
||||
// Add operation boundaries
|
||||
addBreak(opStart)
|
||||
addBreak(opEnd)
|
||||
|
||||
// Sort the boundaries so that we can construct ranges between them
|
||||
const sortedBreaks = Array.from(breaks).sort((a, b) => a - b)
|
||||
|
||||
const separateRanges = []
|
||||
for (let i = 1; i < sortedBreaks.length; i++) {
|
||||
const start = sortedBreaks[i - 1]
|
||||
const end = sortedBreaks[i]
|
||||
const currentRange = new Range(start, end - start)
|
||||
// The comment ids that cover the current range is part of this sub-range
|
||||
const commentIds = commentsList.idsCoveringRange(currentRange)
|
||||
// The tracking info that covers the current range is part of this sub-range
|
||||
const tracking = trackedChangeList.propsAtRange(currentRange)
|
||||
separateRanges.push({
|
||||
length: currentRange.length,
|
||||
commentIds: commentIds.length > 0 ? commentIds : undefined,
|
||||
tracking,
|
||||
})
|
||||
}
|
||||
return separateRanges
|
||||
}
|
||||
|
||||
module.exports = TextOperation
|
||||
64
libraries/overleaf-editor-core/lib/origin/index.js
Normal file
64
libraries/overleaf-editor-core/lib/origin/index.js
Normal file
@@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
// Dependencies are loaded at the bottom of the file to mitigate circular
|
||||
// dependency
|
||||
let RestoreOrigin = null
|
||||
let RestoreFileOrigin = null
|
||||
let RestoreProjectOrigin = null
|
||||
|
||||
/**
|
||||
* An Origin records where a {@link Change} came from. The Origin class handles
|
||||
* simple tag origins, like "it came from rich text mode", or "it came from
|
||||
* uploading files". Its subclasses record more detailed data for Changes such
|
||||
* as restoring a version.
|
||||
*/
|
||||
class Origin {
|
||||
/**
|
||||
* @param {string} kind
|
||||
*/
|
||||
constructor(kind) {
|
||||
assert.string(kind, 'Origin: bad kind')
|
||||
|
||||
this.kind = kind
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Origin from its raw form.
|
||||
*
|
||||
* @param {Object} [raw]
|
||||
* @return {Origin | null}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
if (!raw) return null
|
||||
if (raw.kind === RestoreOrigin.KIND) return RestoreOrigin.fromRaw(raw)
|
||||
if (raw.kind === RestoreFileOrigin.KIND)
|
||||
return RestoreFileOrigin.fromRaw(raw)
|
||||
if (raw.kind === RestoreProjectOrigin.KIND)
|
||||
return RestoreProjectOrigin.fromRaw(raw)
|
||||
return new Origin(raw.kind)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the Origin to raw form for storage or transmission.
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
toRaw() {
|
||||
return { kind: this.kind }
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
getKind() {
|
||||
return this.kind
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Origin
|
||||
|
||||
RestoreOrigin = require('./restore_origin')
|
||||
RestoreFileOrigin = require('./restore_file_origin')
|
||||
RestoreProjectOrigin = require('./restore_project_origin')
|
||||
@@ -0,0 +1,62 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Origin = require('.')
|
||||
|
||||
class RestoreFileOrigin extends Origin {
|
||||
/**
|
||||
* @param {number} version that was restored
|
||||
* @param {string} path that was restored
|
||||
* @param {Date} timestamp from the restored version
|
||||
*/
|
||||
constructor(version, path, timestamp) {
|
||||
assert.integer(version, 'RestoreFileOrigin: bad version')
|
||||
assert.string(path, 'RestoreFileOrigin: bad path')
|
||||
assert.date(timestamp, 'RestoreFileOrigin: bad timestamp')
|
||||
|
||||
super(RestoreFileOrigin.KIND)
|
||||
this.version = version
|
||||
this.path = path
|
||||
this.timestamp = timestamp
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
return new RestoreFileOrigin(raw.version, raw.path, new Date(raw.timestamp))
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
toRaw() {
|
||||
return {
|
||||
kind: RestoreFileOrigin.KIND,
|
||||
version: this.version,
|
||||
path: this.path,
|
||||
timestamp: this.timestamp.toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getVersion() {
|
||||
return this.version
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
getPath() {
|
||||
return this.path
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Date}
|
||||
*/
|
||||
getTimestamp() {
|
||||
return this.timestamp
|
||||
}
|
||||
}
|
||||
|
||||
RestoreFileOrigin.KIND = 'file-restore'
|
||||
|
||||
module.exports = RestoreFileOrigin
|
||||
62
libraries/overleaf-editor-core/lib/origin/restore_origin.js
Normal file
62
libraries/overleaf-editor-core/lib/origin/restore_origin.js
Normal file
@@ -0,0 +1,62 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Origin = require('./')
|
||||
|
||||
/**
|
||||
* When a {@link Change} is generated by restoring a previous version, this
|
||||
* records the original version. We also store the timestamp of the restored
|
||||
* version for display; technically, this is redundant, because we could
|
||||
* recover it using the version ID. However, it would be very expensive to
|
||||
* recover all referenced versions, and it is also possible that the change
|
||||
* for the restored version will no longer exist, either because it was merged
|
||||
* with other changes or was deleted.
|
||||
*
|
||||
* @see Origin
|
||||
*/
|
||||
class RestoreOrigin extends Origin {
|
||||
/**
|
||||
* @param {number} version that was restored
|
||||
* @param {Date} timestamp from the restored version
|
||||
*/
|
||||
constructor(version, timestamp) {
|
||||
assert.integer(version, 'RestoreOrigin: bad version')
|
||||
assert.date(timestamp, 'RestoreOrigin: bad timestamp')
|
||||
|
||||
super(RestoreOrigin.KIND)
|
||||
this.version = version
|
||||
this.timestamp = timestamp
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
return new RestoreOrigin(raw.version, new Date(raw.timestamp))
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
toRaw() {
|
||||
return {
|
||||
kind: RestoreOrigin.KIND,
|
||||
version: this.version,
|
||||
timestamp: this.timestamp.toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getVersion() {
|
||||
return this.version
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Date}
|
||||
*/
|
||||
getTimestamp() {
|
||||
return this.timestamp
|
||||
}
|
||||
}
|
||||
|
||||
RestoreOrigin.KIND = 'restore'
|
||||
|
||||
module.exports = RestoreOrigin
|
||||
@@ -0,0 +1,51 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
|
||||
const Origin = require('.')
|
||||
|
||||
class RestoreProjectOrigin extends Origin {
|
||||
/**
|
||||
* @param {number} version that was restored
|
||||
* @param {Date} timestamp from the restored version
|
||||
*/
|
||||
constructor(version, timestamp) {
|
||||
assert.integer(version, 'RestoreProjectOrigin: bad version')
|
||||
assert.date(timestamp, 'RestoreProjectOrigin: bad timestamp')
|
||||
|
||||
super(RestoreProjectOrigin.KIND)
|
||||
this.version = version
|
||||
this.timestamp = timestamp
|
||||
}
|
||||
|
||||
static fromRaw(raw) {
|
||||
return new RestoreProjectOrigin(raw.version, new Date(raw.timestamp))
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
toRaw() {
|
||||
return {
|
||||
kind: RestoreProjectOrigin.KIND,
|
||||
version: this.version,
|
||||
timestamp: this.timestamp.toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getVersion() {
|
||||
return this.version
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Date}
|
||||
*/
|
||||
getTimestamp() {
|
||||
return this.timestamp
|
||||
}
|
||||
}
|
||||
|
||||
RestoreProjectOrigin.KIND = 'project-restore'
|
||||
|
||||
module.exports = RestoreProjectOrigin
|
||||
239
libraries/overleaf-editor-core/lib/ot_client.js
Normal file
239
libraries/overleaf-editor-core/lib/ot_client.js
Normal file
@@ -0,0 +1,239 @@
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
|
||||
const ChangeNote = require('./change_note')
|
||||
const ChangeRequest = require('./change_request')
|
||||
const Chunk = require('./chunk')
|
||||
const Operation = require('./operation')
|
||||
|
||||
/**
|
||||
* Operational Transformation client.
|
||||
*
|
||||
* See OT.md for explanation.
|
||||
*/
|
||||
class OtClient {
|
||||
constructor(_projectId, _editor, _blobStore, _socket) {
|
||||
const STATE_DISCONNECTED = 0
|
||||
const STATE_LOADING = 1
|
||||
const STATE_READY = 2
|
||||
const STATE_WAITING = 3
|
||||
|
||||
let _version = null
|
||||
let _state = STATE_DISCONNECTED
|
||||
const _buffer = []
|
||||
let _ackVersion = null
|
||||
let _outstanding = []
|
||||
let _pending = []
|
||||
const _waiting = []
|
||||
|
||||
this.connect = function otClientConnect() {
|
||||
switch (_state) {
|
||||
case STATE_DISCONNECTED:
|
||||
_state = STATE_LOADING
|
||||
_socket.emit('authenticate', {
|
||||
projectId: _projectId,
|
||||
token: 'letmein',
|
||||
})
|
||||
break
|
||||
default:
|
||||
throw new Error('connect in state ' + _state)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The latest project version number for which the client can construct the
|
||||
* project content.
|
||||
*
|
||||
* @return {number} non-negative
|
||||
*/
|
||||
this.getVersion = function () {
|
||||
return _version
|
||||
}
|
||||
|
||||
_socket.on('load', function otClientOnLoad(data) {
|
||||
switch (_state) {
|
||||
case STATE_LOADING: {
|
||||
const chunk = Chunk.fromRaw(data)
|
||||
const snapshot = chunk.getSnapshot()
|
||||
snapshot.applyAll(chunk.getChanges(), { strict: true })
|
||||
_version = chunk.getEndVersion()
|
||||
// TODO: we can get remote changes here, so it's not correct to wait for
|
||||
// the editor to load before transitioning to the READY state
|
||||
_editor.load(snapshot).then(function () {
|
||||
_state = STATE_READY
|
||||
})
|
||||
break
|
||||
}
|
||||
default:
|
||||
throw new Error('loaded in state ' + _state)
|
||||
}
|
||||
})
|
||||
|
||||
//
|
||||
// Local Operations
|
||||
//
|
||||
|
||||
function sendOutstandingChange() {
|
||||
const changeRequest = new ChangeRequest(_version, _outstanding)
|
||||
_socket.emit('change', changeRequest.toRaw())
|
||||
_state = STATE_WAITING
|
||||
}
|
||||
|
||||
function sendLocalOperation(operation) {
|
||||
_outstanding.push(operation)
|
||||
sendOutstandingChange()
|
||||
}
|
||||
|
||||
function queueLocalOperation(operation) {
|
||||
_pending.push(operation)
|
||||
}
|
||||
|
||||
this.handleLocalOperation = function otClientHandleLocalOperation(
|
||||
operation
|
||||
) {
|
||||
switch (_state) {
|
||||
case STATE_READY:
|
||||
sendLocalOperation(operation)
|
||||
break
|
||||
case STATE_WAITING:
|
||||
queueLocalOperation(operation)
|
||||
break
|
||||
default:
|
||||
throw new Error('local operation in state ' + _state)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A promise that resolves when the project reaches the given version.
|
||||
*
|
||||
* @param {number} version non-negative
|
||||
* @return {Promise}
|
||||
*/
|
||||
this.waitForVersion = function otClientWaitForVersion(version) {
|
||||
if (!_waiting[version]) _waiting[version] = []
|
||||
return new Promise(function (resolve, reject) {
|
||||
_waiting[version].push(resolve)
|
||||
})
|
||||
}
|
||||
|
||||
function resolveWaitingPromises() {
|
||||
for (const version in _waiting) {
|
||||
if (!Object.prototype.hasOwnProperty.call(_waiting, version)) continue
|
||||
if (version > _version) continue
|
||||
_waiting[version].forEach(function (resolve) {
|
||||
resolve()
|
||||
})
|
||||
delete _waiting[version]
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Messages from Server
|
||||
//
|
||||
|
||||
function advanceIfReady() {
|
||||
if (_ackVersion !== null && _version === _ackVersion) {
|
||||
_version += 1
|
||||
_ackVersion = null
|
||||
handleAckReady()
|
||||
advanceIfReady()
|
||||
return
|
||||
}
|
||||
const changeNotes = _.remove(_buffer, function (changeNote) {
|
||||
return changeNote.getBaseVersion() === _version
|
||||
})
|
||||
if (changeNotes.length === 1) {
|
||||
handleRemoteChangeReady(changeNotes[0].getChange())
|
||||
_version += 1
|
||||
advanceIfReady()
|
||||
return
|
||||
}
|
||||
if (changeNotes.length !== 0) {
|
||||
throw new Error('multiple remote changes in client version ' + _version)
|
||||
}
|
||||
}
|
||||
|
||||
function bufferRemoteChangeNote(changeNote) {
|
||||
const version = changeNote.getBaseVersion()
|
||||
if (_.find(_buffer, 'baseVersion', version)) {
|
||||
throw new Error('multiple changes in version ' + version)
|
||||
}
|
||||
if (version === _ackVersion) {
|
||||
throw new Error('received change that was acked in ' + _ackVersion)
|
||||
}
|
||||
_buffer.push(changeNote)
|
||||
}
|
||||
|
||||
function handleAckReady() {
|
||||
// console.log('handleAckReady')
|
||||
if (_outstanding.length === 0) {
|
||||
throw new Error('ack complete without outstanding change')
|
||||
}
|
||||
if (_state !== STATE_WAITING) {
|
||||
throw new Error('ack complete in state ' + _state)
|
||||
}
|
||||
_editor.handleChangeAcknowledged()
|
||||
resolveWaitingPromises()
|
||||
if (_pending.length > 0) {
|
||||
_outstanding = _pending
|
||||
_pending = []
|
||||
sendOutstandingChange()
|
||||
} else {
|
||||
_outstanding = []
|
||||
_state = STATE_READY
|
||||
}
|
||||
}
|
||||
|
||||
function handleRemoteChangeReady(change) {
|
||||
if (_pending.length > 0) {
|
||||
if (_outstanding.length === 0) {
|
||||
throw new Error('pending change without outstanding change')
|
||||
}
|
||||
}
|
||||
|
||||
Operation.transformMultiple(_outstanding, change.getOperations())
|
||||
Operation.transformMultiple(_pending, change.getOperations())
|
||||
|
||||
_editor.applyRemoteChange(change)
|
||||
}
|
||||
|
||||
_socket.on('ack', function otClientOnAck(data) {
|
||||
switch (_state) {
|
||||
case STATE_WAITING: {
|
||||
const changeNote = ChangeNote.fromRaw(data)
|
||||
_ackVersion = changeNote.getBaseVersion()
|
||||
advanceIfReady()
|
||||
break
|
||||
}
|
||||
default:
|
||||
throw new Error('ack in state ' + _state)
|
||||
}
|
||||
})
|
||||
|
||||
_socket.on('change', function otClientOnChange(data) {
|
||||
switch (_state) {
|
||||
case STATE_READY:
|
||||
case STATE_WAITING:
|
||||
bufferRemoteChangeNote(ChangeNote.fromRaw(data))
|
||||
advanceIfReady()
|
||||
break
|
||||
default:
|
||||
throw new Error('remote change in state ' + _state)
|
||||
}
|
||||
})
|
||||
|
||||
//
|
||||
// Connection State
|
||||
// TODO: socket.io error handling
|
||||
//
|
||||
|
||||
_socket.on('disconnect', function () {
|
||||
_state = STATE_DISCONNECTED
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('disconnected') // TODO: how do we handle disconnect?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OtClient
|
||||
232
libraries/overleaf-editor-core/lib/range.js
Normal file
232
libraries/overleaf-editor-core/lib/range.js
Normal file
@@ -0,0 +1,232 @@
|
||||
// @ts-check
|
||||
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
/**
|
||||
* @import { RawRange } from './types'
|
||||
*/
|
||||
|
||||
class Range {
|
||||
/**
|
||||
* @param {number} pos
|
||||
* @param {number} length
|
||||
*/
|
||||
constructor(pos, length) {
|
||||
if (pos < 0 || length < 0) {
|
||||
throw new OError('Invalid range', { pos, length })
|
||||
}
|
||||
/** @readonly */
|
||||
this.pos = pos
|
||||
/** @readonly */
|
||||
this.length = length
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
get start() {
|
||||
return this.pos
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
get end() {
|
||||
return this.pos + this.length
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this range equal to the given range?
|
||||
*
|
||||
* @param {Range} other
|
||||
* @returns {boolean}
|
||||
*/
|
||||
equals(other) {
|
||||
return this.pos === other.pos && this.length === other.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Range} range
|
||||
* @returns {boolean}
|
||||
*/
|
||||
startsAfter(range) {
|
||||
return this.start >= range.end
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} pos
|
||||
* @returns {boolean}
|
||||
*/
|
||||
startIsAfter(pos) {
|
||||
return this.start > pos
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isEmpty() {
|
||||
return this.length === 0
|
||||
}
|
||||
|
||||
/**
|
||||
* checks if the range contains a given range
|
||||
* @param {Range} range
|
||||
*/
|
||||
contains(range) {
|
||||
return this.start <= range.start && this.end >= range.end
|
||||
}
|
||||
|
||||
/**
|
||||
* checks if the range contains a cursor (i.e. is not at the ends of the range)
|
||||
* @param {number} cursor
|
||||
*/
|
||||
containsCursor(cursor) {
|
||||
return this.start <= cursor && this.end >= cursor
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Range} range
|
||||
*/
|
||||
overlaps(range) {
|
||||
return this.start < range.end && this.end > range.start
|
||||
}
|
||||
|
||||
/**
|
||||
* checks if the range touches a given range
|
||||
* @param {Range} range
|
||||
*/
|
||||
touches(range) {
|
||||
return this.end === range.start || this.start === range.end
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Range} range
|
||||
* @returns {Range}
|
||||
*/
|
||||
subtract(range) {
|
||||
if (this.contains(range)) {
|
||||
return this.shrinkBy(range.length)
|
||||
}
|
||||
|
||||
if (range.contains(this)) {
|
||||
return new Range(this.pos, 0)
|
||||
}
|
||||
|
||||
if (range.overlaps(this)) {
|
||||
if (range.start < this.start) {
|
||||
const intersectedLength = range.end - this.start
|
||||
return new Range(range.pos, this.length - intersectedLength)
|
||||
} else {
|
||||
const intersectedLength = this.end - range.start
|
||||
return new Range(this.pos, this.length - intersectedLength)
|
||||
}
|
||||
}
|
||||
|
||||
return new Range(this.pos, this.length)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Range} range
|
||||
* @returns {boolean}
|
||||
*/
|
||||
canMerge(range) {
|
||||
return this.overlaps(range) || this.touches(range)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Range} range
|
||||
*/
|
||||
merge(range) {
|
||||
if (!this.canMerge(range)) {
|
||||
throw new Error('Ranges cannot be merged')
|
||||
}
|
||||
const newPos = Math.min(this.pos, range.pos)
|
||||
const newEnd = Math.max(this.end, range.end)
|
||||
|
||||
return new Range(newPos, newEnd - newPos)
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves the range by a given number
|
||||
* @param {number} length
|
||||
*/
|
||||
moveBy(length) {
|
||||
return new Range(this.pos + length, this.length)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extends the range by a given number
|
||||
* @param {number} extensionLength
|
||||
*/
|
||||
extendBy(extensionLength) {
|
||||
return new Range(this.pos, this.length + extensionLength)
|
||||
}
|
||||
|
||||
/**
|
||||
* Shrinks the range by a given number
|
||||
* @param {number} shrinkLength
|
||||
*/
|
||||
shrinkBy(shrinkLength) {
|
||||
const newLength = this.length - shrinkLength
|
||||
|
||||
if (newLength < 0) {
|
||||
throw new Error('Cannot shrink range by more than its length')
|
||||
}
|
||||
|
||||
return new Range(this.pos, newLength)
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a range on the cursor and insert a range with the length provided
|
||||
* @param {number} cursor
|
||||
* @param {number} length
|
||||
* @returns {[Range, Range, Range]}
|
||||
*/
|
||||
insertAt(cursor, length) {
|
||||
if (!this.containsCursor(cursor)) {
|
||||
throw new Error('The cursor must be contained in the range')
|
||||
}
|
||||
const rangeUpToCursor = new Range(this.pos, cursor - this.pos)
|
||||
const insertedRange = new Range(cursor, length)
|
||||
const rangeAfterCursor = new Range(
|
||||
cursor + length,
|
||||
this.length - rangeUpToCursor.length
|
||||
)
|
||||
return [rangeUpToCursor, insertedRange, rangeAfterCursor]
|
||||
}
|
||||
|
||||
toRaw() {
|
||||
return {
|
||||
pos: this.pos,
|
||||
length: this.length,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawRange} raw
|
||||
* @return {Range}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
return new Range(raw.pos, raw.length)
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a range into two ranges, at a given cursor
|
||||
* @param {number} cursor
|
||||
* @returns {[Range, Range]}
|
||||
*/
|
||||
splitAt(cursor) {
|
||||
if (!this.containsCursor(cursor)) {
|
||||
throw new Error('The cursor must be contained in the range')
|
||||
}
|
||||
const rangeUpToCursor = new Range(this.pos, cursor - this.pos)
|
||||
const rangeAfterCursor = new Range(
|
||||
cursor,
|
||||
this.length - rangeUpToCursor.length
|
||||
)
|
||||
return [rangeUpToCursor, rangeAfterCursor]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Range
|
||||
142
libraries/overleaf-editor-core/lib/safe_pathname.js
Normal file
142
libraries/overleaf-editor-core/lib/safe_pathname.js
Normal file
@@ -0,0 +1,142 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const path = require('path-browserify')
|
||||
|
||||
/**
|
||||
* Regular expressions for Overleaf v2 taken from
|
||||
* https://github.com/overleaf/internal/blob/f7b287b6a07354000a6b463ca3a5828104e4a811/services/web/app/src/Features/Project/SafePath.js
|
||||
*/
|
||||
|
||||
//
|
||||
// Regex of characters that are invalid in filenames
|
||||
//
|
||||
// eslint-disable-next-line no-control-regex
|
||||
const BAD_CHAR_RX = /[/*\u0000-\u001F\u007F\u0080-\u009F\uD800-\uDFFF]/g
|
||||
|
||||
//
|
||||
// Regex of filename patterns that are invalid ("." ".." and leading/trailing
|
||||
// whitespace)
|
||||
//
|
||||
const BAD_FILE_RX = /(^\.$)|(^\.\.$)|(^\s+)|(\s+$)/g
|
||||
|
||||
//
|
||||
// Put a block on filenames which match javascript property names, as they
|
||||
// can cause exceptions where the code puts filenames into a hash. This is a
|
||||
// temporary workaround until the code in other places is made safe against
|
||||
// property names.
|
||||
//
|
||||
// See https://github.com/overleaf/write_latex/wiki/Using-javascript-Objects-as-Maps
|
||||
//
|
||||
const BLOCKED_FILE_RX =
|
||||
/^(prototype|constructor|toString|toLocaleString|valueOf|hasOwnProperty|isPrototypeOf|propertyIsEnumerable|__defineGetter__|__lookupGetter__|__defineSetter__|__lookupSetter__|__proto__)$/
|
||||
|
||||
//
|
||||
// Maximum path length, in characters. This is fairly arbitrary.
|
||||
//
|
||||
const MAX_PATH = 1024
|
||||
|
||||
/**
|
||||
* Replace invalid characters and filename patterns in a filename with
|
||||
* underscores.
|
||||
* @param {string} filename
|
||||
*/
|
||||
function cleanPart(filename) {
|
||||
filename = filename.replace(BAD_CHAR_RX, '_')
|
||||
filename = filename.replace(BAD_FILE_RX, function (match) {
|
||||
return new Array(match.length + 1).join('_')
|
||||
})
|
||||
return filename
|
||||
}
|
||||
|
||||
/**
|
||||
* All pathnames in a Snapshot must be clean. We want pathnames that:
|
||||
*
|
||||
* 1. are unambiguous (e.g. no `.`s or redundant path separators)
|
||||
* 2. do not allow directory traversal attacks (e.g. no `..`s or absolute paths)
|
||||
* 3. do not contain leading/trailing space
|
||||
* 4. do not contain the character '*' in filenames
|
||||
*
|
||||
* We normalise the pathname, split it by the separator and then clean each part
|
||||
* as a filename
|
||||
*
|
||||
* @param {string} pathname
|
||||
* @return {String}
|
||||
*/
|
||||
exports.clean = function (pathname) {
|
||||
return exports.cleanDebug(pathname)[0]
|
||||
}
|
||||
|
||||
/**
|
||||
* See clean
|
||||
* @param {string} pathname
|
||||
* @return {[string,string]}
|
||||
*/
|
||||
exports.cleanDebug = function (pathname) {
|
||||
let prev = pathname
|
||||
let reason = ''
|
||||
|
||||
/**
|
||||
* @param {string} label
|
||||
*/
|
||||
function recordReasonIfChanged(label) {
|
||||
if (pathname === prev) return
|
||||
if (reason) reason += ','
|
||||
reason += label
|
||||
prev = pathname
|
||||
}
|
||||
pathname = path.normalize(pathname)
|
||||
recordReasonIfChanged('normalize')
|
||||
|
||||
pathname = pathname.replace(/\\/g, '/')
|
||||
recordReasonIfChanged('workaround for IE')
|
||||
|
||||
pathname = pathname.replace(/\/+/g, '/')
|
||||
recordReasonIfChanged('no multiple slashes')
|
||||
|
||||
pathname = pathname.replace(/^(\/.*)$/, '_$1')
|
||||
recordReasonIfChanged('no leading /')
|
||||
|
||||
pathname = pathname.replace(/^(.+)\/$/, '$1')
|
||||
recordReasonIfChanged('no trailing /')
|
||||
|
||||
pathname = pathname.replace(/^ *(.*)$/, '$1')
|
||||
recordReasonIfChanged('no leading spaces')
|
||||
|
||||
pathname = pathname.replace(/^(.*[^ ]) *$/, '$1')
|
||||
recordReasonIfChanged('no trailing spaces')
|
||||
|
||||
if (pathname.length === 0) pathname = '_'
|
||||
recordReasonIfChanged('empty')
|
||||
|
||||
pathname = pathname.split('/').map(cleanPart).join('/')
|
||||
recordReasonIfChanged('cleanPart')
|
||||
|
||||
pathname = pathname.replace(BLOCKED_FILE_RX, '@$1')
|
||||
recordReasonIfChanged('BLOCKED_FILE_RX')
|
||||
return [pathname, reason]
|
||||
}
|
||||
|
||||
/**
|
||||
* A pathname is clean (see clean) and not too long.
|
||||
*
|
||||
* @param {string} pathname
|
||||
* @return {Boolean}
|
||||
*/
|
||||
exports.isClean = function pathnameIsClean(pathname) {
|
||||
return exports.isCleanDebug(pathname)[0]
|
||||
}
|
||||
|
||||
/**
|
||||
* A pathname is clean (see clean) and not too long.
|
||||
*
|
||||
* @param {string} pathname
|
||||
* @return {[boolean,string]}
|
||||
*/
|
||||
exports.isCleanDebug = function (pathname) {
|
||||
if (pathname.length > MAX_PATH) return [false, 'MAX_PATH']
|
||||
if (pathname.length === 0) return [false, 'empty']
|
||||
const [cleanPathname, reason] = exports.cleanDebug(pathname)
|
||||
if (cleanPathname !== pathname) return [false, reason]
|
||||
return [true, '']
|
||||
}
|
||||
284
libraries/overleaf-editor-core/lib/snapshot.js
Normal file
284
libraries/overleaf-editor-core/lib/snapshot.js
Normal file
@@ -0,0 +1,284 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const assert = require('check-types').assert
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
const FileMap = require('./file_map')
|
||||
const V2DocVersions = require('./v2_doc_versions')
|
||||
|
||||
const FILE_LOAD_CONCURRENCY = 50
|
||||
|
||||
/**
|
||||
* @import { BlobStore, RawSnapshot, ReadonlyBlobStore } from "./types"
|
||||
* @import Change from "./change"
|
||||
* @import TextOperation from "./operation/text_operation"
|
||||
* @import File from "./file"
|
||||
*/
|
||||
|
||||
class EditMissingFileError extends OError {}
|
||||
|
||||
/**
|
||||
* A Snapshot represents the state of a {@link Project} at a
|
||||
* particular version.
|
||||
*/
|
||||
class Snapshot {
|
||||
static PROJECT_VERSION_RX_STRING = '^[0-9]+\\.[0-9]+$'
|
||||
static PROJECT_VERSION_RX = new RegExp(Snapshot.PROJECT_VERSION_RX_STRING)
|
||||
static EditMissingFileError = EditMissingFileError
|
||||
|
||||
/**
|
||||
* @param {RawSnapshot} raw
|
||||
* @return {Snapshot}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
assert.object(raw.files, 'bad raw.files')
|
||||
return new Snapshot(
|
||||
FileMap.fromRaw(raw.files),
|
||||
raw.projectVersion,
|
||||
V2DocVersions.fromRaw(raw.v2DocVersions)
|
||||
)
|
||||
}
|
||||
|
||||
toRaw() {
|
||||
/** @type RawSnapshot */
|
||||
const raw = {
|
||||
files: this.fileMap.toRaw(),
|
||||
}
|
||||
if (this.projectVersion) raw.projectVersion = this.projectVersion
|
||||
if (this.v2DocVersions) raw.v2DocVersions = this.v2DocVersions.toRaw()
|
||||
return raw
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileMap} [fileMap]
|
||||
* @param {string} [projectVersion]
|
||||
* @param {V2DocVersions} [v2DocVersions]
|
||||
*/
|
||||
constructor(fileMap, projectVersion, v2DocVersions) {
|
||||
assert.maybe.instance(fileMap, FileMap, 'bad fileMap')
|
||||
|
||||
this.fileMap = fileMap || new FileMap({})
|
||||
this.projectVersion = projectVersion
|
||||
this.v2DocVersions = v2DocVersions
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string | null | undefined}
|
||||
*/
|
||||
getProjectVersion() {
|
||||
return this.projectVersion
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectVersion
|
||||
*/
|
||||
setProjectVersion(projectVersion) {
|
||||
assert.maybe.match(
|
||||
projectVersion,
|
||||
Snapshot.PROJECT_VERSION_RX,
|
||||
'Snapshot: bad projectVersion'
|
||||
)
|
||||
this.projectVersion = projectVersion
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {V2DocVersions | null | undefined}
|
||||
*/
|
||||
getV2DocVersions() {
|
||||
return this.v2DocVersions
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {V2DocVersions} v2DocVersions
|
||||
*/
|
||||
setV2DocVersions(v2DocVersions) {
|
||||
assert.maybe.instance(
|
||||
v2DocVersions,
|
||||
V2DocVersions,
|
||||
'Snapshot: bad v2DocVersions'
|
||||
)
|
||||
this.v2DocVersions = v2DocVersions
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {V2DocVersions} v2DocVersions
|
||||
*/
|
||||
updateV2DocVersions(v2DocVersions) {
|
||||
// merge new v2DocVersions into this.v2DocVersions
|
||||
v2DocVersions.applyTo(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* The underlying file map.
|
||||
* @return {FileMap}
|
||||
*/
|
||||
getFileMap() {
|
||||
return this.fileMap
|
||||
}
|
||||
|
||||
/**
|
||||
* The pathnames of all of the files.
|
||||
*
|
||||
* @return {Array.<string>} in no particular order
|
||||
*/
|
||||
getFilePathnames() {
|
||||
return this.fileMap.getPathnames()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a File by its pathname.
|
||||
* @see FileMap#getFile
|
||||
* @param {string} pathname
|
||||
*/
|
||||
getFile(pathname) {
|
||||
return this.fileMap.getFile(pathname)
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the given file to the snapshot.
|
||||
* @see FileMap#addFile
|
||||
* @param {string} pathname
|
||||
* @param {File} file
|
||||
*/
|
||||
addFile(pathname, file) {
|
||||
this.fileMap.addFile(pathname, file)
|
||||
}
|
||||
|
||||
/**
|
||||
* Move or remove a file.
|
||||
* @see FileMap#moveFile
|
||||
* @param {string} pathname
|
||||
* @param {string} newPathname
|
||||
*/
|
||||
moveFile(pathname, newPathname) {
|
||||
this.fileMap.moveFile(pathname, newPathname)
|
||||
if (this.v2DocVersions) this.v2DocVersions.moveFile(pathname, newPathname)
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of files in the snapshot.
|
||||
*
|
||||
* @return {number}
|
||||
*/
|
||||
countFiles() {
|
||||
return this.fileMap.countFiles()
|
||||
}
|
||||
|
||||
/**
|
||||
* Edit the content of an editable file.
|
||||
*
|
||||
* Throws an error if no file with the given name exists.
|
||||
*
|
||||
* @param {string} pathname
|
||||
* @param {TextOperation} textOperation
|
||||
*/
|
||||
editFile(pathname, textOperation) {
|
||||
const file = this.fileMap.getFile(pathname)
|
||||
if (!file) {
|
||||
throw new Snapshot.EditMissingFileError(
|
||||
`can't find file for editing: ${pathname}`
|
||||
)
|
||||
}
|
||||
file.edit(textOperation)
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply all changes in sequence. Modifies the snapshot in place.
|
||||
*
|
||||
* Ignore recoverable errors (caused by historical bad data) unless opts.strict is true
|
||||
*
|
||||
* @param {Change[]} changes
|
||||
* @param {object} [opts]
|
||||
* @param {boolean} opts.strict - do not ignore recoverable errors
|
||||
*/
|
||||
applyAll(changes, opts) {
|
||||
for (const change of changes) {
|
||||
change.applyTo(this, opts)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If the Files in this Snapshot reference blob hashes, add them to the given
|
||||
* set.
|
||||
*
|
||||
* @param {Set.<String>} blobHashes
|
||||
*/
|
||||
findBlobHashes(blobHashes) {
|
||||
/**
|
||||
* @param {File} file
|
||||
*/
|
||||
function find(file) {
|
||||
const hash = file.getHash()
|
||||
const rangeHash = file.getRangesHash()
|
||||
if (hash) blobHashes.add(hash)
|
||||
if (rangeHash) blobHashes.add(rangeHash)
|
||||
}
|
||||
// TODO(das7pad): refine types to enforce no nulls in FileMapData
|
||||
// @ts-ignore
|
||||
this.fileMap.map(find)
|
||||
}
|
||||
|
||||
/**
|
||||
* Load all of the files in this snapshot.
|
||||
*
|
||||
* @param {string} kind see {File#load}
|
||||
* @param {ReadonlyBlobStore} blobStore
|
||||
* @return {Promise<Record<string, File>>} an object where keys are the pathnames and
|
||||
* values are the files in the snapshot
|
||||
*/
|
||||
async loadFiles(kind, blobStore) {
|
||||
/**
|
||||
* @param {File} file
|
||||
*/
|
||||
function load(file) {
|
||||
return file.load(kind, blobStore)
|
||||
}
|
||||
// TODO(das7pad): refine types to enforce no nulls in FileMapData
|
||||
// @ts-ignore
|
||||
return await this.fileMap.mapAsync(load, FILE_LOAD_CONCURRENCY)
|
||||
}
|
||||
|
||||
/**
|
||||
* Store each of the files in this snapshot and return the raw snapshot for
|
||||
* long term storage.
|
||||
*
|
||||
* @param {BlobStore} blobStore
|
||||
* @param {number} [concurrency]
|
||||
* @return {Promise.<Object>}
|
||||
*/
|
||||
async store(blobStore, concurrency) {
|
||||
assert.maybe.number(concurrency, 'bad concurrency')
|
||||
|
||||
const projectVersion = this.projectVersion
|
||||
const rawV2DocVersions = this.v2DocVersions
|
||||
? this.v2DocVersions.toRaw()
|
||||
: undefined
|
||||
|
||||
/**
|
||||
* @param {File} file
|
||||
*/
|
||||
function store(file) {
|
||||
return file.store(blobStore)
|
||||
}
|
||||
// TODO(das7pad): refine types to enforce no nulls in FileMapData
|
||||
// @ts-ignore
|
||||
const rawFiles = await this.fileMap.mapAsync(store, concurrency)
|
||||
return {
|
||||
files: rawFiles,
|
||||
projectVersion,
|
||||
v2DocVersions: rawV2DocVersions,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a deep clone of this snapshot.
|
||||
*
|
||||
* @return {Snapshot}
|
||||
*/
|
||||
clone() {
|
||||
return Snapshot.fromRaw(this.toRaw())
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Snapshot
|
||||
175
libraries/overleaf-editor-core/lib/types.ts
Normal file
175
libraries/overleaf-editor-core/lib/types.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import Blob from './blob'
|
||||
import TrackingProps from './file_data/tracking_props'
|
||||
import ClearTrackingProps from './file_data/clear_tracking_props'
|
||||
|
||||
export type BlobStore = {
|
||||
getBlob(hash: string): Promise<Blob | null>
|
||||
getString(hash: string): Promise<string>
|
||||
putString(content: string): Promise<Blob>
|
||||
putObject(obj: object): Promise<Blob>
|
||||
getObject<T = unknown>(hash: string): Promise<T>
|
||||
}
|
||||
|
||||
export type ReadonlyBlobStore = Pick<BlobStore, 'getString' | 'getObject'>
|
||||
|
||||
export type RangesBlob = {
|
||||
comments: CommentRawData[]
|
||||
trackedChanges: TrackedChangeRawData[]
|
||||
}
|
||||
|
||||
export type RawRange = {
|
||||
pos: number
|
||||
length: number
|
||||
}
|
||||
|
||||
export type CommentRawData = {
|
||||
id: string
|
||||
ranges: RawRange[]
|
||||
resolved?: boolean
|
||||
}
|
||||
|
||||
export type TrackedChangeRawData = {
|
||||
range: RawRange
|
||||
tracking: TrackingPropsRawData
|
||||
}
|
||||
|
||||
export type TrackingPropsRawData = {
|
||||
type: 'insert' | 'delete'
|
||||
userId: string
|
||||
ts: string
|
||||
}
|
||||
|
||||
export type ClearTrackingPropsRawData = {
|
||||
type: 'none'
|
||||
}
|
||||
|
||||
export type TrackingDirective = TrackingProps | ClearTrackingProps
|
||||
|
||||
export type StringFileRawData = {
|
||||
content: string
|
||||
comments?: CommentRawData[]
|
||||
trackedChanges?: TrackedChangeRawData[]
|
||||
}
|
||||
|
||||
export type RawOrigin = {
|
||||
kind: string
|
||||
}
|
||||
|
||||
export type RawChange = {
|
||||
operations: RawOperation[]
|
||||
timestamp: string
|
||||
authors?: (number | null)[]
|
||||
v2Authors: string[]
|
||||
origin: RawOrigin
|
||||
projectVersion: string
|
||||
v2DocVersions: RawV2DocVersions
|
||||
}
|
||||
|
||||
export type RawOperation =
|
||||
| RawEditFileOperation
|
||||
// TODO(das7pad): add types for all the other operations
|
||||
| object
|
||||
|
||||
export type RawSnapshot = {
|
||||
files: RawFileMap
|
||||
projectVersion?: string
|
||||
v2DocVersions?: RawV2DocVersions | null
|
||||
}
|
||||
|
||||
export type RawHistory = {
|
||||
snapshot: RawSnapshot
|
||||
changes: RawChange[]
|
||||
}
|
||||
|
||||
export type RawChunk = {
|
||||
history: RawHistory
|
||||
startVersion: number
|
||||
}
|
||||
|
||||
export type RawFileMap = Record<string, RawFile>
|
||||
|
||||
export type RawFile = { metadata?: Object } & RawFileData
|
||||
|
||||
export type RawFileData =
|
||||
| RawBinaryFileData
|
||||
| RawHashFileData
|
||||
| RawHollowBinaryFileData
|
||||
| RawHollowStringFileData
|
||||
| RawLazyStringFileData
|
||||
| StringFileRawData
|
||||
|
||||
export type RawHashFileData = { hash: string; rangesHash?: string }
|
||||
export type RawBinaryFileData = { hash: string; byteLength: number }
|
||||
export type RawLazyStringFileData = {
|
||||
hash: string
|
||||
stringLength: number
|
||||
rangesHash?: string
|
||||
operations?: RawEditOperation[]
|
||||
}
|
||||
export type RawHollowBinaryFileData = { byteLength: number }
|
||||
export type RawHollowStringFileData = { stringLength: number }
|
||||
|
||||
export type RawV2DocVersions = Record<string, { pathname: string; v: number }>
|
||||
|
||||
export type RawInsertOp =
|
||||
| {
|
||||
i: string
|
||||
commentIds?: string[]
|
||||
tracking?: TrackingPropsRawData
|
||||
}
|
||||
| string
|
||||
|
||||
export type RawRemoveOp = number
|
||||
export type RawRetainOp =
|
||||
| {
|
||||
r: number
|
||||
commentIds?: string[]
|
||||
tracking?: TrackingPropsRawData | ClearTrackingPropsRawData
|
||||
}
|
||||
| number
|
||||
|
||||
export type RawScanOp = RawInsertOp | RawRemoveOp | RawRetainOp
|
||||
|
||||
export type RawTextOperation = {
|
||||
textOperation: RawScanOp[]
|
||||
contentHash?: string
|
||||
}
|
||||
|
||||
export type RawAddCommentOperation = {
|
||||
commentId: string
|
||||
ranges: RawRange[]
|
||||
resolved?: boolean
|
||||
}
|
||||
|
||||
export type RawDeleteCommentOperation = { deleteComment: string }
|
||||
|
||||
export type RawSetCommentStateOperation = {
|
||||
commentId: string
|
||||
resolved: boolean
|
||||
}
|
||||
|
||||
export type RawEditNoOperation = {
|
||||
noOp: true
|
||||
}
|
||||
|
||||
export type RawEditFileOperation = RawEditOperation & { pathname: string }
|
||||
|
||||
export type RawEditOperation =
|
||||
| RawTextOperation
|
||||
| RawAddCommentOperation
|
||||
| RawDeleteCommentOperation
|
||||
| RawSetCommentStateOperation
|
||||
| RawEditNoOperation
|
||||
|
||||
export type LinkedFileData = {
|
||||
importedAt: string
|
||||
provider: string
|
||||
[other: string]: any
|
||||
}
|
||||
|
||||
export type RawLabel = {
|
||||
text: string
|
||||
authorId: number | null
|
||||
timestamp: string
|
||||
version: number
|
||||
}
|
||||
14
libraries/overleaf-editor-core/lib/util.js
Normal file
14
libraries/overleaf-editor-core/lib/util.js
Normal file
@@ -0,0 +1,14 @@
|
||||
/*
|
||||
* Misc functions
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @param {string} str
|
||||
* @returns {boolean} true if the given string contains non-BMP chars otherwise false
|
||||
*/
|
||||
exports.containsNonBmpChars = function utilContainsNonBmpChars(str) {
|
||||
// check for first (high) surrogate in a non-BMP character
|
||||
return /[\uD800-\uDBFF]/.test(str)
|
||||
}
|
||||
83
libraries/overleaf-editor-core/lib/v2_doc_versions.js
Normal file
83
libraries/overleaf-editor-core/lib/v2_doc_versions.js
Normal file
@@ -0,0 +1,83 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
|
||||
/**
|
||||
* @import File from "./file"
|
||||
* @import Snapshot from "./snapshot"
|
||||
* @import { RawV2DocVersions } from "./types"
|
||||
*/
|
||||
|
||||
class V2DocVersions {
|
||||
/**
|
||||
* @param {RawV2DocVersions} data
|
||||
*/
|
||||
constructor(data) {
|
||||
this.data = data || {}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RawV2DocVersions?} [raw]
|
||||
* @return {V2DocVersions|undefined}
|
||||
*/
|
||||
static fromRaw(raw) {
|
||||
if (!raw) return undefined
|
||||
return new V2DocVersions(raw)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {RawV2DocVersions|null}
|
||||
*/
|
||||
toRaw() {
|
||||
if (!this.data) return null
|
||||
const raw = _.clone(this.data)
|
||||
return raw
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone this object.
|
||||
*
|
||||
* @return {V2DocVersions|undefined} a new object of the same type
|
||||
*/
|
||||
clone() {
|
||||
return V2DocVersions.fromRaw(this.toRaw())
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Snapshot} snapshot
|
||||
*/
|
||||
applyTo(snapshot) {
|
||||
// Only update the snapshot versions if we have new versions
|
||||
if (!_.size(this.data)) return
|
||||
|
||||
// Create v2DocVersions in snapshot if it does not exist
|
||||
// otherwise update snapshot v2docversions
|
||||
if (!snapshot.v2DocVersions) {
|
||||
snapshot.v2DocVersions = this.clone()
|
||||
} else {
|
||||
_.assign(snapshot.v2DocVersions.data, this.data)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move or remove a doc.
|
||||
* Must be called after FileMap#moveFile, which validates the paths.
|
||||
* @param {string} pathname
|
||||
* @param {string} newPathname
|
||||
*/
|
||||
moveFile(pathname, newPathname) {
|
||||
for (const [id, v] of Object.entries(this.data)) {
|
||||
if (v.pathname !== pathname) continue
|
||||
|
||||
if (newPathname === '') {
|
||||
delete this.data[id]
|
||||
} else {
|
||||
v.pathname = newPathname
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = V2DocVersions
|
||||
34
libraries/overleaf-editor-core/package.json
Normal file
34
libraries/overleaf-editor-core/package.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "overleaf-editor-core",
|
||||
"version": "1.0.0",
|
||||
"description": "Library shared between the editor server and clients.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "npm run lint && npm run format && npm run types:check && npm run test:unit",
|
||||
"format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'",
|
||||
"format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'",
|
||||
"lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .",
|
||||
"lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .",
|
||||
"test:ci": "npm run test:unit",
|
||||
"test:unit": "mocha --exit test/**/*.{js,cjs}",
|
||||
"types:check": "tsc --noEmit"
|
||||
},
|
||||
"author": "team@overleaf.com",
|
||||
"license": "Proprietary",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@types/check-types": "^7.3.7",
|
||||
"@types/path-browserify": "^1.0.2",
|
||||
"chai": "^3.3.0",
|
||||
"mocha": "^11.1.0",
|
||||
"sinon": "^9.2.4",
|
||||
"typescript": "^5.0.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@overleaf/o-error": "*",
|
||||
"check-types": "^5.1.0",
|
||||
"lodash": "^4.17.19",
|
||||
"p-map": "^4.0.0",
|
||||
"path-browserify": "^1.0.1"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,127 @@
|
||||
// @ts-check
|
||||
const { expect } = require('chai')
|
||||
const { AddCommentOperation, DeleteCommentOperation } = require('..')
|
||||
const Range = require('../lib/range')
|
||||
const StringFileData = require('../lib/file_data/string_file_data')
|
||||
|
||||
describe('AddCommentOperation', function () {
|
||||
it('constructs an AddCommentOperation fromJSON', function () {
|
||||
const op = AddCommentOperation.fromJSON({
|
||||
commentId: '123',
|
||||
resolved: true,
|
||||
ranges: [{ pos: 0, length: 1 }],
|
||||
})
|
||||
expect(op).to.be.instanceOf(AddCommentOperation)
|
||||
expect(op.commentId).to.equal('123')
|
||||
expect(op.ranges[0]).to.be.instanceOf(Range)
|
||||
expect(op.resolved).to.be.true
|
||||
})
|
||||
|
||||
it('should convert to JSON', function () {
|
||||
const op = new AddCommentOperation('123', [new Range(0, 1)])
|
||||
expect(op.toJSON()).to.eql({
|
||||
commentId: '123',
|
||||
ranges: [
|
||||
{
|
||||
pos: 0,
|
||||
length: 1,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('should apply operation', function () {
|
||||
const fileData = new StringFileData('abc')
|
||||
const op = new AddCommentOperation('123', [new Range(0, 1)])
|
||||
op.apply(fileData)
|
||||
expect(fileData.getComments().toRaw()).to.eql([
|
||||
{
|
||||
id: '123',
|
||||
ranges: [{ pos: 0, length: 1 }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
describe('invert', function () {
|
||||
it('should delete added comment', function () {
|
||||
const initialFileData = new StringFileData('abc')
|
||||
const fileData = StringFileData.fromRaw(initialFileData.toRaw())
|
||||
const op = new AddCommentOperation('123', [new Range(0, 1)])
|
||||
op.apply(fileData)
|
||||
expect(fileData.getComments().toRaw()).to.eql([
|
||||
{
|
||||
id: '123',
|
||||
ranges: [{ pos: 0, length: 1 }],
|
||||
},
|
||||
])
|
||||
const invertedOp = op.invert(initialFileData)
|
||||
invertedOp.apply(fileData)
|
||||
expect(fileData.getComments().toRaw()).to.eql([])
|
||||
})
|
||||
|
||||
it('should restore previous comment ranges', function () {
|
||||
const initialComments = [
|
||||
{
|
||||
id: '123',
|
||||
ranges: [{ pos: 0, length: 1 }],
|
||||
},
|
||||
]
|
||||
|
||||
const initialFileData = new StringFileData(
|
||||
'the quick brown fox jumps over the lazy dog',
|
||||
initialComments
|
||||
)
|
||||
const fileData = StringFileData.fromRaw(initialFileData.toRaw())
|
||||
const op = new AddCommentOperation('123', [new Range(12, 7)], true)
|
||||
op.apply(fileData)
|
||||
expect(fileData.getComments().toRaw()).to.eql([
|
||||
{
|
||||
id: '123',
|
||||
ranges: [{ pos: 12, length: 7 }],
|
||||
resolved: true,
|
||||
},
|
||||
])
|
||||
|
||||
const invertedOp = op.invert(initialFileData)
|
||||
invertedOp.apply(fileData)
|
||||
expect(fileData.getComments().toRaw()).to.deep.equal(initialComments)
|
||||
})
|
||||
|
||||
it('should restore previous comment resolution status', function () {
|
||||
const initialComments = [
|
||||
{
|
||||
id: '123',
|
||||
ranges: [{ pos: 0, length: 1 }],
|
||||
},
|
||||
]
|
||||
|
||||
const initialFileData = new StringFileData(
|
||||
'the quick brown fox jumps over the lazy dog',
|
||||
initialComments
|
||||
)
|
||||
const fileData = StringFileData.fromRaw(initialFileData.toRaw())
|
||||
const op = new AddCommentOperation('123', [new Range(0, 1)], true)
|
||||
op.apply(fileData)
|
||||
expect(fileData.getComments().toRaw()).to.eql([
|
||||
{
|
||||
id: '123',
|
||||
ranges: [{ pos: 0, length: 1 }],
|
||||
resolved: true,
|
||||
},
|
||||
])
|
||||
|
||||
const invertedOp = op.invert(initialFileData)
|
||||
invertedOp.apply(fileData)
|
||||
expect(fileData.getComments().toRaw()).to.deep.equal(initialComments)
|
||||
})
|
||||
})
|
||||
|
||||
it('should compose with DeleteCommentOperation', function () {
|
||||
const addOp = new AddCommentOperation('123', [new Range(0, 1)])
|
||||
const deleteOp = new DeleteCommentOperation('123')
|
||||
expect(addOp.canBeComposedWith(deleteOp)).to.be.true
|
||||
|
||||
const composedOp = addOp.compose(deleteOp)
|
||||
expect(composedOp).to.be.instanceOf(DeleteCommentOperation)
|
||||
})
|
||||
})
|
||||
62
libraries/overleaf-editor-core/test/change.test.js
Normal file
62
libraries/overleaf-editor-core/test/change.test.js
Normal file
@@ -0,0 +1,62 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const core = require('..')
|
||||
const Change = core.Change
|
||||
const File = core.File
|
||||
const Operation = core.Operation
|
||||
|
||||
describe('Change', function () {
|
||||
describe('findBlobHashes', function () {
|
||||
it('finds blob hashes from operations', function () {
|
||||
const blobHashes = new Set()
|
||||
|
||||
const change = Change.fromRaw({
|
||||
operations: [],
|
||||
timestamp: '2015-03-05T12:03:53.035Z',
|
||||
authors: [null],
|
||||
})
|
||||
|
||||
change.findBlobHashes(blobHashes)
|
||||
expect(blobHashes.size).to.equal(0)
|
||||
|
||||
// AddFile with content doesn't have a hash.
|
||||
change.pushOperation(Operation.addFile('a.txt', File.fromString('a')))
|
||||
change.findBlobHashes(blobHashes)
|
||||
expect(blobHashes.size).to.equal(0)
|
||||
|
||||
// AddFile with hash should give us a hash.
|
||||
change.pushOperation(
|
||||
Operation.addFile('b.txt', File.fromHash(File.EMPTY_FILE_HASH))
|
||||
)
|
||||
change.findBlobHashes(blobHashes)
|
||||
expect(blobHashes.size).to.equal(1)
|
||||
expect(blobHashes.has(File.EMPTY_FILE_HASH)).to.be.true
|
||||
})
|
||||
})
|
||||
|
||||
describe('RestoreFileOrigin', function () {
|
||||
it('should convert to and from raw', function () {
|
||||
const origin = new core.RestoreFileOrigin(1, 'path', new Date())
|
||||
const raw = origin.toRaw()
|
||||
const newOrigin = core.Origin.fromRaw(raw)
|
||||
expect(newOrigin).to.eql(origin)
|
||||
})
|
||||
|
||||
it('change should have a correct origin class', function () {
|
||||
const change = Change.fromRaw({
|
||||
operations: [],
|
||||
timestamp: '2015-03-05T12:03:53.035Z',
|
||||
authors: [null],
|
||||
origin: {
|
||||
kind: 'file-restore',
|
||||
version: 1,
|
||||
path: 'path',
|
||||
timestamp: '2015-03-05T12:03:53.035Z',
|
||||
},
|
||||
})
|
||||
|
||||
expect(change.getOrigin()).to.be.an.instanceof(core.RestoreFileOrigin)
|
||||
})
|
||||
})
|
||||
})
|
||||
116
libraries/overleaf-editor-core/test/comment.test.js
Normal file
116
libraries/overleaf-editor-core/test/comment.test.js
Normal file
@@ -0,0 +1,116 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const Comment = require('../lib/comment')
|
||||
const Range = require('../lib/range')
|
||||
|
||||
describe('Comment', function () {
|
||||
it('should move ranges to the right of insert', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
const resComment = comment.applyInsert(3, 5, false)
|
||||
expect(resComment.ranges).to.eql([new Range(10, 10)])
|
||||
})
|
||||
|
||||
describe('applyInsert', function () {
|
||||
it('should insert 1 char before the range', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
expect(comment.applyInsert(4, 1).ranges).to.eql([new Range(6, 10)])
|
||||
})
|
||||
|
||||
it('should insert 1 char at the edge, without expandCommand', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
expect(comment.applyInsert(5, 1).ranges).to.eql([new Range(6, 10)])
|
||||
})
|
||||
|
||||
it('should insert 1 char at the edge, with expandCommand', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
expect(comment.applyInsert(5, 1, true).ranges).to.eql([new Range(5, 11)])
|
||||
})
|
||||
|
||||
it('should expand the range after insert inside it', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
expect(comment.applyInsert(6, 1, true).ranges).to.eql([new Range(5, 11)])
|
||||
})
|
||||
})
|
||||
|
||||
it('should split the range if inside another and expandComment is false', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
const commentRes = comment.applyInsert(6, 10, false)
|
||||
expect(commentRes.ranges).to.eql([new Range(5, 1), new Range(16, 9)])
|
||||
})
|
||||
|
||||
it('should insert the range if expandComment is false', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
const commentRes = comment.applyInsert(14, 10, false)
|
||||
expect(commentRes.ranges).to.eql([new Range(5, 9), new Range(24, 1)])
|
||||
})
|
||||
|
||||
it('should move the range if insert is at range start and expandComment is false', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
const commentRes = comment.applyInsert(5, 10, false)
|
||||
expect(commentRes.ranges).to.eql([new Range(15, 10)])
|
||||
})
|
||||
|
||||
it('should ignore the range if insert is at range end and expandComment is false', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
const commentRes = comment.applyInsert(15, 10, false)
|
||||
expect(commentRes.ranges).to.eql([new Range(5, 10)])
|
||||
})
|
||||
|
||||
it('should expand the range after inserting on the edge of it if expandComment is true', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
const commentRes = comment.applyInsert(15, 10, true)
|
||||
expect(commentRes.ranges).to.eql([new Range(5, 20)])
|
||||
})
|
||||
|
||||
it('should move comment ranges if delete is before it', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10)])
|
||||
const commentRes = comment.applyDelete(new Range(3, 5))
|
||||
expect(commentRes.ranges).to.eql([new Range(3, 7)])
|
||||
})
|
||||
|
||||
it('should merge ranges after delete', function () {
|
||||
const comment = new Comment('c1', [new Range(5, 10), new Range(20, 10)])
|
||||
const commentRes = comment.applyDelete(new Range(7, 18))
|
||||
expect(commentRes.ranges).to.eql([new Range(5, 7)])
|
||||
})
|
||||
|
||||
it('should merge overlapping ranges', function () {
|
||||
const comment = new Comment('c1', [
|
||||
new Range(5, 10),
|
||||
new Range(15, 20),
|
||||
new Range(50, 10),
|
||||
])
|
||||
expect(comment.ranges).to.eql([new Range(5, 30), new Range(50, 10)])
|
||||
})
|
||||
|
||||
it('should merge unsorted ranges', function () {
|
||||
const comment = new Comment('c1', [
|
||||
new Range(15, 20),
|
||||
new Range(50, 10),
|
||||
new Range(5, 10),
|
||||
])
|
||||
expect(comment.ranges).to.eql([new Range(5, 30), new Range(50, 10)])
|
||||
})
|
||||
|
||||
it('should throw error when ranges overlap', function () {
|
||||
expect(
|
||||
() =>
|
||||
new Comment('c1', [
|
||||
new Range(5, 10),
|
||||
new Range(10, 5),
|
||||
new Range(50, 10),
|
||||
])
|
||||
).to.throw()
|
||||
})
|
||||
|
||||
it('should join touching ranges', function () {
|
||||
const comment = new Comment('c1', [
|
||||
new Range(5, 10),
|
||||
new Range(15, 5),
|
||||
new Range(50, 10),
|
||||
])
|
||||
expect(comment.ranges).to.eql([new Range(5, 15), new Range(50, 10)])
|
||||
})
|
||||
})
|
||||
430
libraries/overleaf-editor-core/test/comments_list.test.js
Normal file
430
libraries/overleaf-editor-core/test/comments_list.test.js
Normal file
@@ -0,0 +1,430 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const CommentList = require('../lib/file_data/comment_list')
|
||||
const Comment = require('../lib/comment')
|
||||
const Range = require('../lib/range')
|
||||
|
||||
describe('commentList', function () {
|
||||
it('checks if toRaw() returns a correct comment list', function () {
|
||||
const commentList = new CommentList([
|
||||
new Comment('comm1', [new Range(5, 10)]),
|
||||
new Comment('comm2', [new Range(20, 5)]),
|
||||
new Comment('comm3', [new Range(30, 15)]),
|
||||
])
|
||||
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 20, length: 5 }] },
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should get a comment by id', function () {
|
||||
const commentList = new CommentList([
|
||||
new Comment('comm1', [new Range(5, 10)]),
|
||||
new Comment('comm3', [new Range(30, 15)]),
|
||||
new Comment('comm2', [new Range(20, 5)]),
|
||||
])
|
||||
|
||||
const comment = commentList.getComment('comm2')
|
||||
expect(comment?.toRaw()).to.eql({
|
||||
id: 'comm2',
|
||||
ranges: [
|
||||
{
|
||||
pos: 20,
|
||||
length: 5,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('should add new comment to the list', function () {
|
||||
const commentList = new CommentList([
|
||||
new Comment('comm1', [new Range(5, 10)]),
|
||||
new Comment('comm2', [new Range(20, 5)]),
|
||||
new Comment('comm3', [new Range(30, 15)]),
|
||||
])
|
||||
|
||||
commentList.add(new Comment('comm4', [new Range(40, 10)]))
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 20, length: 5 }] },
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
{
|
||||
id: 'comm4',
|
||||
ranges: [{ pos: 40, length: 10 }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should overwrite existing comment if new one is added', function () {
|
||||
const commentList = new CommentList([
|
||||
new Comment('comm1', [new Range(5, 10)], false),
|
||||
new Comment('comm2', [new Range(20, 5)], true),
|
||||
new Comment('comm3', [new Range(30, 15)]),
|
||||
])
|
||||
|
||||
commentList.add(new Comment('comm1', [new Range(5, 10)], true))
|
||||
commentList.add(new Comment('comm2', [new Range(40, 10)], true))
|
||||
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }], resolved: true },
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 40, length: 10 }],
|
||||
resolved: true,
|
||||
},
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should delete a comment from the list', function () {
|
||||
const commentList = new CommentList([
|
||||
new Comment('comm1', [new Range(5, 10)]),
|
||||
new Comment('comm2', [new Range(20, 5)]),
|
||||
new Comment('comm3', [new Range(30, 15)]),
|
||||
])
|
||||
|
||||
commentList.delete('comm3')
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 20, length: 5 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('should not throw an error if comment id does not exist', function () {
|
||||
const commentList = new CommentList([
|
||||
new Comment('comm1', [new Range(5, 10)]),
|
||||
new Comment('comm2', [new Range(20, 5)]),
|
||||
new Comment('comm3', [new Range(30, 15)]),
|
||||
])
|
||||
|
||||
commentList.delete('comm5')
|
||||
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 20, length: 5 }] },
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should be iterable', function () {
|
||||
const comment = new Comment('comm1', [new Range(5, 10)])
|
||||
const commentList = new CommentList([comment])
|
||||
expect(Array.from(commentList)).to.deep.equal([comment])
|
||||
})
|
||||
|
||||
describe('inserting a comment between ranges', function () {
|
||||
it('should expand comment on the left', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 15, length: 10 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyInsert(new Range(15, 5), { commentIds: ['comm1'] })
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 15 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 20, length: 10 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('should expand comment on the right', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 15, length: 10 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyInsert(new Range(15, 5), { commentIds: ['comm2'] })
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 15, length: 15 }] },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
it('should delete a text overlapping two comments', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }], // 5-14
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 15, length: 10 }], // 15-24
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyDelete(new Range(10, 10)) // 10-19
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 5 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 10, length: 5 }] },
|
||||
])
|
||||
})
|
||||
|
||||
describe('move ranges after insert/delete operations', function () {
|
||||
it('expands comments inside inserted text', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 20, length: 5 }],
|
||||
},
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyInsert(new Range(7, 5), { commentIds: ['comm1'] })
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 15 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 25, length: 5 }] },
|
||||
{ id: 'comm3', ranges: [{ pos: 35, length: 15 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert an overlapping comment without overlapped comment id', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 20, length: 5 }],
|
||||
},
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyInsert(new Range(7, 5), { commentIds: ['comm2'] })
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [
|
||||
{ pos: 5, length: 2 },
|
||||
{ pos: 12, length: 8 },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [
|
||||
{ pos: 7, length: 5 },
|
||||
{ pos: 25, length: 5 },
|
||||
],
|
||||
},
|
||||
{ id: 'comm3', ranges: [{ pos: 35, length: 15 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert an overlapping comment with overlapped comment id', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 15 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 20, length: 5 }],
|
||||
},
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyInsert(new Range(7, 5), {
|
||||
commentIds: ['comm1', 'comm2'],
|
||||
})
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 20 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [
|
||||
{ pos: 7, length: 5 },
|
||||
{ pos: 25, length: 5 },
|
||||
],
|
||||
},
|
||||
{ id: 'comm3', ranges: [{ pos: 35, length: 15 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('moves comments after inserted text', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 20, length: 5 }],
|
||||
},
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyInsert(new Range(16, 5))
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 25, length: 5 }] },
|
||||
{ id: 'comm3', ranges: [{ pos: 35, length: 15 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('does not affect comments outside of inserted text', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 20, length: 5 }],
|
||||
},
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyInsert(new Range(50, 5))
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 20, length: 5 }] },
|
||||
{ id: 'comm3', ranges: [{ pos: 30, length: 15 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('should move comments if delete happened before it', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 20, length: 5 }],
|
||||
},
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyDelete(new Range(0, 4))
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 1, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 16, length: 5 }] },
|
||||
{ id: 'comm3', ranges: [{ pos: 26, length: 15 }] },
|
||||
])
|
||||
})
|
||||
|
||||
describe('should remove part of a comment on delete overlapping', function () {
|
||||
it('should delete intersection from the left', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyDelete(new Range(0, 6))
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 0, length: 9 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('should delete intersection from the right', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
])
|
||||
commentList.applyDelete(new Range(7, 10))
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 2 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('should delete intersection in the middle', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
])
|
||||
commentList.applyDelete(new Range(6, 2))
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 8 }] },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
it('should leave comment without ranges', function () {
|
||||
const commentList = CommentList.fromRaw([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [{ pos: 20, length: 5 }],
|
||||
},
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 30, length: 15 }],
|
||||
},
|
||||
])
|
||||
|
||||
commentList.applyDelete(new Range(19, 10))
|
||||
expect(commentList.toRaw()).to.eql([
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [{ pos: 5, length: 10 }],
|
||||
},
|
||||
{ id: 'comm2', ranges: [] },
|
||||
{
|
||||
id: 'comm3',
|
||||
ranges: [{ pos: 20, length: 15 }],
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,46 @@
|
||||
// @ts-check
|
||||
const { expect } = require('chai')
|
||||
const { AddCommentOperation, DeleteCommentOperation } = require('..')
|
||||
const Comment = require('../lib/comment')
|
||||
const StringFileData = require('../lib/file_data/string_file_data')
|
||||
const Range = require('../lib/range')
|
||||
|
||||
describe('DeleteCommentOperation', function () {
|
||||
it('constructs an DeleteCommentOperation fromJSON', function () {
|
||||
const op = DeleteCommentOperation.fromJSON({
|
||||
deleteComment: '123',
|
||||
})
|
||||
expect(op).to.be.instanceOf(DeleteCommentOperation)
|
||||
})
|
||||
|
||||
it('should convert to JSON', function () {
|
||||
const op = new DeleteCommentOperation('123')
|
||||
expect(op.toJSON()).to.eql({
|
||||
deleteComment: '123',
|
||||
})
|
||||
})
|
||||
|
||||
it('should apply operation', function () {
|
||||
const fileData = new StringFileData('abc')
|
||||
const op = new DeleteCommentOperation('123')
|
||||
fileData.comments.add(new Comment('123', [new Range(0, 1)]))
|
||||
op.apply(fileData)
|
||||
expect(fileData.getComments().toRaw()).to.eql([])
|
||||
})
|
||||
|
||||
it('should invert operation', function () {
|
||||
const fileData = new StringFileData('abc')
|
||||
const op = new DeleteCommentOperation('123')
|
||||
fileData.comments.add(new Comment('123', [new Range(0, 1)]))
|
||||
const invertedOp = /** @type {AddCommentOperation} */ (op.invert(fileData))
|
||||
expect(invertedOp).to.be.instanceOf(AddCommentOperation)
|
||||
expect(invertedOp.commentId).to.equal('123')
|
||||
expect(invertedOp.ranges).to.eql([new Range(0, 1)])
|
||||
})
|
||||
|
||||
it('should not throw if comment not found', function () {
|
||||
const fileData = new StringFileData('abc')
|
||||
const op = new DeleteCommentOperation('123')
|
||||
expect(() => op.invert(fileData)).to.not.throw()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,81 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
|
||||
const ot = require('..')
|
||||
const EditOperationBuilder = require('../lib/operation/edit_operation_builder')
|
||||
const File = ot.File
|
||||
const Operation = ot.Operation
|
||||
|
||||
describe('EditFileOperation', function () {
|
||||
function edit(pathname, textOperationJsonObject) {
|
||||
return Operation.editFile(
|
||||
pathname,
|
||||
EditOperationBuilder.fromJSON({ textOperation: textOperationJsonObject })
|
||||
)
|
||||
}
|
||||
|
||||
describe('canBeComposedWith', function () {
|
||||
it('on the same file', function () {
|
||||
const editFileOperation1 = edit('foo.tex', ['x'])
|
||||
const editFileOperation2 = edit('foo.tex', [1, 'y'])
|
||||
expect(editFileOperation1.canBeComposedWith(editFileOperation2)).to.be
|
||||
.true
|
||||
})
|
||||
|
||||
it('on different files', function () {
|
||||
const editFileOperation1 = edit('foo.tex', ['x'])
|
||||
const editFileOperation2 = edit('bar.tex', ['y'])
|
||||
expect(editFileOperation1.canBeComposedWith(editFileOperation2)).to.be
|
||||
.false
|
||||
})
|
||||
|
||||
it('with a different type of opperation', function () {
|
||||
const editFileOperation1 = edit('foo.tex', ['x'])
|
||||
const editFileOperation2 = Operation.addFile(
|
||||
'bar.tex',
|
||||
File.fromString('')
|
||||
)
|
||||
expect(editFileOperation1.canBeComposedWith(editFileOperation2)).to.be
|
||||
.false
|
||||
})
|
||||
|
||||
it('with incompatible lengths', function () {
|
||||
const editFileOperation1 = edit('foo.tex', ['x'])
|
||||
const editFileOperation2 = edit('foo.tex', [2, 'y'])
|
||||
expect(editFileOperation1.canBeComposedWith(editFileOperation2)).to.be
|
||||
.false
|
||||
})
|
||||
})
|
||||
|
||||
describe('canBeComposedWithForUndo', function () {
|
||||
it('can', function () {
|
||||
const editFileOperation1 = edit('foo.tex', ['x'])
|
||||
const editFileOperation2 = edit('foo.tex', [1, 'y'])
|
||||
expect(editFileOperation1.canBeComposedWithForUndo(editFileOperation2)).to
|
||||
.be.true
|
||||
})
|
||||
|
||||
it('cannot', function () {
|
||||
const editFileOperation1 = edit('foo.tex', ['x'])
|
||||
const editFileOperation2 = edit('foo.tex', ['y', 1, 'z'])
|
||||
expect(editFileOperation1.canBeComposedWithForUndo(editFileOperation2)).to
|
||||
.be.false
|
||||
})
|
||||
})
|
||||
|
||||
describe('compose', function () {
|
||||
it('composes text operations', function () {
|
||||
const editFileOperation1 = edit('foo.tex', ['x'])
|
||||
const editFileOperation2 = edit('foo.tex', [1, 'y'])
|
||||
const composedFileOperation =
|
||||
editFileOperation1.compose(editFileOperation2)
|
||||
const expectedComposedFileOperation = edit('foo.tex', ['xy'])
|
||||
expect(composedFileOperation).to.deep.equal(expectedComposedFileOperation)
|
||||
|
||||
// check that the original operation wasn't modified
|
||||
expect(editFileOperation1).to.deep.equal(edit('foo.tex', ['x']))
|
||||
})
|
||||
})
|
||||
})
|
||||
315
libraries/overleaf-editor-core/test/edit_operation.test.js
Normal file
315
libraries/overleaf-editor-core/test/edit_operation.test.js
Normal file
@@ -0,0 +1,315 @@
|
||||
const { expect } = require('chai')
|
||||
const EditOperationBuilder = require('../lib/operation/edit_operation_builder')
|
||||
const TextOperation = require('../lib/operation/text_operation')
|
||||
const EditOperationTransformer = require('../lib/operation/edit_operation_transformer')
|
||||
const EditOperation = require('../lib/operation/edit_operation')
|
||||
const randomTextOperation = require('./support/random_text_operation')
|
||||
const random = require('./support/random')
|
||||
const AddCommentOperation = require('../lib/operation/add_comment_operation')
|
||||
const DeleteCommentOperation = require('../lib/operation/delete_comment_operation')
|
||||
const SetCommentStateOperation = require('../lib/operation/set_comment_state_operation')
|
||||
const Range = require('../lib/range')
|
||||
const EditNoOperation = require('../lib/operation/edit_no_operation')
|
||||
|
||||
describe('EditOperation', function () {
|
||||
it('Cannot be instantiated', function () {
|
||||
expect(() => new EditOperation()).to.throw(
|
||||
'Cannot instantiate abstract class'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('EditOperationTransformer', function () {
|
||||
it('Transforms two TextOperations', function () {
|
||||
const a = new TextOperation().insert('foo')
|
||||
const b = new TextOperation().insert('bar')
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(bPrime).to.be.an.instanceof(TextOperation)
|
||||
})
|
||||
|
||||
it('Transforms TextOperation and EditNoOperation', function () {
|
||||
const a = new TextOperation().insert('foo')
|
||||
const b = new EditNoOperation()
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(bPrime).to.be.an.instanceof(EditNoOperation)
|
||||
})
|
||||
|
||||
it('Transforms two AddCommentOperations with same commentId', function () {
|
||||
const a = new AddCommentOperation('comm1', [new Range(0, 1)])
|
||||
const b = new AddCommentOperation('comm1', [new Range(2, 3)])
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(EditNoOperation)
|
||||
expect(bPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
})
|
||||
|
||||
it('Transforms two AddCommentOperations with different commentId', function () {
|
||||
const a = new AddCommentOperation('comm1', [new Range(0, 1)])
|
||||
const b = new AddCommentOperation('comm2', [new Range(2, 3)])
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(bPrime.toJSON()).to.eql(b.toJSON())
|
||||
})
|
||||
|
||||
it('Transforms two DeleteCommentOperations with same commentId', function () {
|
||||
const a = new DeleteCommentOperation('comm1')
|
||||
const b = new DeleteCommentOperation('comm1')
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(EditNoOperation)
|
||||
expect(bPrime).to.be.an.instanceof(EditNoOperation)
|
||||
})
|
||||
|
||||
it('Transforms two DeleteCommentOperations with different commentId', function () {
|
||||
const a = new DeleteCommentOperation('comm1')
|
||||
const b = new DeleteCommentOperation('comm2')
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(DeleteCommentOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(DeleteCommentOperation)
|
||||
expect(bPrime.toJSON()).to.eql(b.toJSON())
|
||||
})
|
||||
|
||||
it('Transforms AddCommentOperation and DeleteCommentOperation with same commentId', function () {
|
||||
const a = new AddCommentOperation('comm1', [new Range(0, 1)])
|
||||
const b = new DeleteCommentOperation('comm1')
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(EditNoOperation)
|
||||
expect(bPrime).to.be.an.instanceof(DeleteCommentOperation)
|
||||
expect(bPrime.toJSON()).to.eql(b.toJSON())
|
||||
})
|
||||
|
||||
it('Transforms DeleteCommentOperation and AddCommentOperation with same commentId', function () {
|
||||
const a = new DeleteCommentOperation('comm1')
|
||||
const b = new AddCommentOperation('comm1', [new Range(0, 1)])
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(DeleteCommentOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(EditNoOperation)
|
||||
})
|
||||
|
||||
it('Transforms AddCommentOperation and TextOperation', function () {
|
||||
// abc hello[ world] xyz - insert(9, " world")
|
||||
// abc hello |xyz| - addComment(10, 3, "comment_id")
|
||||
|
||||
const a = new TextOperation().retain(9).insert(' world')
|
||||
const b = new AddCommentOperation('comm1', [new Range(10, 3)])
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(bPrime.toJSON()).to.eql({
|
||||
commentId: 'comm1',
|
||||
ranges: [{ pos: 16, length: 3 }],
|
||||
})
|
||||
})
|
||||
|
||||
it('Transforms TextOperation and AddCommentOperation', function () {
|
||||
// abc hello |xyz| - addComment(10, 3, "comment_id")
|
||||
// abc hello[ world] xyz - insert(9, " world")
|
||||
|
||||
const a = new AddCommentOperation('comm1', [new Range(10, 3)])
|
||||
const b = new TextOperation().retain(9).insert(' world')
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(bPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(bPrime.toJSON()).to.eql(b.toJSON())
|
||||
expect(aPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(aPrime.toJSON()).to.eql({
|
||||
commentId: 'comm1',
|
||||
ranges: [{ pos: 16, length: 3 }],
|
||||
})
|
||||
})
|
||||
|
||||
it('Transforms AddCommentOperation and TextOperation that makes a detached comment', function () {
|
||||
// [abc hello xyz] - delete(0, 13)
|
||||
// abc |hello| xyz - addComment(5, 5, "comment_id")
|
||||
|
||||
const a = new TextOperation().remove(13)
|
||||
const b = new AddCommentOperation('comm1', [new Range(5, 5)])
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(bPrime.toJSON()).to.eql({
|
||||
commentId: 'comm1',
|
||||
ranges: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('Transforms AddCommentOperation and deletion TextOperation', function () {
|
||||
// abc hell{o xy}z - retain(8).delete(4)
|
||||
// abc hello |xyz| - addComment(10, 3, "comment_id")
|
||||
// abc hell|z|
|
||||
|
||||
const a = new TextOperation().retain(8).remove(4)
|
||||
const b = new AddCommentOperation('comm1', [new Range(10, 3)])
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(bPrime.toJSON()).to.eql({
|
||||
commentId: 'comm1',
|
||||
ranges: [{ pos: 8, length: 1 }],
|
||||
})
|
||||
})
|
||||
|
||||
it('Transforms AddCommentOperation and complex TextOperation', function () {
|
||||
// [foo ]abc hell{o xy}z - insert(0, "foo ").retain(8).delete(4)
|
||||
// abc hello |xyz| - addComment(10, 3, "comment_id")
|
||||
// foo abc hell|z|
|
||||
|
||||
const a = new TextOperation().insert('foo ').retain(8).remove(4)
|
||||
const b = new AddCommentOperation('comm1', [new Range(10, 3)])
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(bPrime.toJSON()).to.eql({
|
||||
commentId: 'comm1',
|
||||
ranges: [{ pos: 12, length: 1 }],
|
||||
})
|
||||
})
|
||||
|
||||
it('Transforms DeleteCommentOperation and TextOperation', function () {
|
||||
const a = new TextOperation().retain(9).insert(' world')
|
||||
const b = new DeleteCommentOperation('comm1')
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(DeleteCommentOperation)
|
||||
expect(bPrime.toJSON()).to.eql(b.toJSON())
|
||||
})
|
||||
|
||||
it('Transforms SetCommentStateOperation and TextOperation', function () {
|
||||
const a = new TextOperation().retain(9).insert(' world')
|
||||
const b = new SetCommentStateOperation('comm1', true)
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||
expect(aPrime.toJSON()).to.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(SetCommentStateOperation)
|
||||
expect(bPrime.toJSON()).to.eql(b.toJSON())
|
||||
})
|
||||
|
||||
it('Transforms SetCommentStateOperation and AddCommentOperation', function () {
|
||||
const a = new AddCommentOperation('comm1', [new Range(0, 1)])
|
||||
const b = new SetCommentStateOperation('comm1', true)
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(aPrime.toJSON()).to.deep.eql({
|
||||
commentId: 'comm1',
|
||||
ranges: [{ pos: 0, length: 1 }],
|
||||
resolved: true,
|
||||
})
|
||||
expect(bPrime).to.be.an.instanceof(SetCommentStateOperation)
|
||||
expect(bPrime.toJSON()).to.deep.eql(b.toJSON())
|
||||
})
|
||||
|
||||
it('Transforms SetCommentStateOperation and DeleteCommentOperation', function () {
|
||||
const a = new DeleteCommentOperation('comm1')
|
||||
const b = new SetCommentStateOperation('comm1', true)
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(DeleteCommentOperation)
|
||||
expect(aPrime.toJSON()).to.deep.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(EditNoOperation)
|
||||
})
|
||||
|
||||
it('Transforms SetCommentStateOperation and SetCommentStateOperation', function () {
|
||||
const a = new SetCommentStateOperation('comm1', false)
|
||||
const b = new SetCommentStateOperation('comm1', true)
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime.toJSON()).to.deep.eql({
|
||||
commentId: 'comm1',
|
||||
resolved: false,
|
||||
})
|
||||
expect(bPrime).to.be.an.instanceof(EditNoOperation)
|
||||
})
|
||||
|
||||
it('Transforms two SetCommentStateOperation with different commentId', function () {
|
||||
const a = new SetCommentStateOperation('comm1', false)
|
||||
const b = new SetCommentStateOperation('comm2', true)
|
||||
|
||||
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||
expect(aPrime).to.be.an.instanceof(SetCommentStateOperation)
|
||||
expect(aPrime.toJSON()).to.deep.eql(a.toJSON())
|
||||
expect(bPrime).to.be.an.instanceof(SetCommentStateOperation)
|
||||
expect(bPrime.toJSON()).to.deep.eql(b.toJSON())
|
||||
})
|
||||
})
|
||||
|
||||
describe('EditOperationBuilder', function () {
|
||||
it('Constructs TextOperation from JSON', function () {
|
||||
const raw = {
|
||||
textOperation: [1, 'foo', 3],
|
||||
}
|
||||
const op = EditOperationBuilder.fromJSON(raw)
|
||||
expect(op).to.be.an.instanceof(TextOperation)
|
||||
expect(op.toJSON()).to.deep.equal(raw)
|
||||
})
|
||||
|
||||
it('Constructs AddCommentOperation from JSON', function () {
|
||||
const raw = {
|
||||
commentId: 'comm1',
|
||||
ranges: [{ pos: 0, length: 1 }],
|
||||
}
|
||||
const op = EditOperationBuilder.fromJSON(raw)
|
||||
expect(op).to.be.an.instanceof(AddCommentOperation)
|
||||
expect(op.toJSON()).to.deep.equal(raw)
|
||||
})
|
||||
|
||||
it('Constructs DeleteCommentOperation from JSON', function () {
|
||||
const raw = {
|
||||
deleteComment: 'comm1',
|
||||
}
|
||||
const op = EditOperationBuilder.fromJSON(raw)
|
||||
expect(op).to.be.an.instanceof(DeleteCommentOperation)
|
||||
expect(op.toJSON()).to.deep.equal(raw)
|
||||
})
|
||||
|
||||
it('Constructs SetCommentStateOperation from JSON', function () {
|
||||
const raw = {
|
||||
commentId: 'comm1',
|
||||
resolved: true,
|
||||
}
|
||||
const op = EditOperationBuilder.fromJSON(raw)
|
||||
expect(op).to.be.an.instanceof(SetCommentStateOperation)
|
||||
expect(op.toJSON()).to.deep.equal(raw)
|
||||
})
|
||||
|
||||
it('Constructs EditNoOperation from JSON', function () {
|
||||
const raw = { noOp: true }
|
||||
const op = EditOperationBuilder.fromJSON(raw)
|
||||
expect(op).to.be.an.instanceof(EditNoOperation)
|
||||
expect(op.toJSON()).to.deep.equal(raw)
|
||||
})
|
||||
|
||||
it('Throws error for unsupported operation', function () {
|
||||
const raw = {
|
||||
unsupportedOperation: {
|
||||
op: 'foo',
|
||||
},
|
||||
}
|
||||
expect(() => EditOperationBuilder.fromJSON(raw)).to.throw(
|
||||
'Unsupported operation in EditOperationBuilder.fromJSON'
|
||||
)
|
||||
})
|
||||
|
||||
it('Constructs TextOperation from JSON (randomised)', function () {
|
||||
const str = random.string(50)
|
||||
const randomOperation = randomTextOperation(str)
|
||||
const op = EditOperationBuilder.fromJSON(randomOperation.toJSON())
|
||||
expect(op).to.be.an.instanceof(TextOperation)
|
||||
expect(op.equals(randomOperation)).to.be.true
|
||||
})
|
||||
})
|
||||
96
libraries/overleaf-editor-core/test/file.test.js
Normal file
96
libraries/overleaf-editor-core/test/file.test.js
Normal file
@@ -0,0 +1,96 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const FakeBlobStore = require('./support/fake_blob_store')
|
||||
const ot = require('..')
|
||||
const File = ot.File
|
||||
|
||||
describe('File', function () {
|
||||
it('can have attached metadata', function () {
|
||||
// no metadata
|
||||
let file = File.fromString('foo')
|
||||
expect(file.getMetadata()).to.eql({})
|
||||
|
||||
// metadata passed in at construction time
|
||||
file = File.fromString('foo', { main: true })
|
||||
expect(file.getMetadata()).to.eql({ main: true })
|
||||
|
||||
// metadata set at runtime
|
||||
file.setMetadata({ main: false })
|
||||
expect(file.getMetadata()).to.eql({ main: false })
|
||||
})
|
||||
|
||||
describe('toRaw', function () {
|
||||
it('returns non-empty metadata', function () {
|
||||
const metadata = { main: true }
|
||||
const file = File.fromHash(File.EMPTY_FILE_HASH, undefined, metadata)
|
||||
expect(file.toRaw()).to.eql({
|
||||
hash: File.EMPTY_FILE_HASH,
|
||||
metadata,
|
||||
})
|
||||
|
||||
delete file.getMetadata().main
|
||||
expect(file.toRaw()).to.eql({ hash: File.EMPTY_FILE_HASH })
|
||||
})
|
||||
|
||||
it('returns a deep clone of metadata', function () {
|
||||
const metadata = { externalFile: { id: 123 } }
|
||||
const file = File.fromHash(File.EMPTY_FILE_HASH, undefined, metadata)
|
||||
const raw = file.toRaw()
|
||||
const fileMetadata = file.getMetadata()
|
||||
const rawMetadata = raw.metadata
|
||||
expect(rawMetadata).not.to.equal(fileMetadata)
|
||||
expect(rawMetadata).to.deep.equal(fileMetadata)
|
||||
})
|
||||
})
|
||||
|
||||
describe('store', function () {
|
||||
it('does not return empty metadata', async function () {
|
||||
const file = File.fromHash(File.EMPTY_FILE_HASH)
|
||||
const fakeBlobStore = new FakeBlobStore()
|
||||
const raw = await file.store(fakeBlobStore)
|
||||
expect(raw).to.eql({ hash: File.EMPTY_FILE_HASH })
|
||||
})
|
||||
|
||||
it('returns non-empty metadata', async function () {
|
||||
const metadata = { main: true }
|
||||
const file = File.fromHash(File.EMPTY_FILE_HASH, undefined, metadata)
|
||||
const fakeBlobStore = new FakeBlobStore()
|
||||
const raw = await file.store(fakeBlobStore)
|
||||
expect(raw).to.eql({
|
||||
hash: File.EMPTY_FILE_HASH,
|
||||
metadata,
|
||||
})
|
||||
})
|
||||
|
||||
it('returns a deep clone of metadata', async function () {
|
||||
const metadata = { externalFile: { id: 123 } }
|
||||
const file = File.fromHash(File.EMPTY_FILE_HASH, undefined, metadata)
|
||||
const fakeBlobStore = new FakeBlobStore()
|
||||
const raw = await file.store(fakeBlobStore)
|
||||
raw.metadata.externalFile.id = 456
|
||||
expect(file.getMetadata().externalFile.id).to.equal(123)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with string data', function () {
|
||||
it('can be created from a string', function () {
|
||||
const file = File.fromString('foo')
|
||||
expect(file.getContent()).to.equal('foo')
|
||||
})
|
||||
})
|
||||
|
||||
describe('with hollow string data', function () {
|
||||
it('can be cloned', function () {
|
||||
const file = File.createHollow(null, 0)
|
||||
expect(file.getStringLength()).to.equal(0)
|
||||
const clone = file.clone()
|
||||
expect(clone.getStringLength()).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
||||
it('getComments() returns an empty comment list', function () {
|
||||
const file = File.fromString('foo')
|
||||
expect(file.getComments().toRaw()).to.eql([])
|
||||
})
|
||||
})
|
||||
195
libraries/overleaf-editor-core/test/file_map.test.js
Normal file
195
libraries/overleaf-editor-core/test/file_map.test.js
Normal file
@@ -0,0 +1,195 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const _ = require('lodash')
|
||||
|
||||
const ot = require('..')
|
||||
const File = ot.File
|
||||
const FileMap = ot.FileMap
|
||||
|
||||
describe('FileMap', function () {
|
||||
function makeTestFile(pathname) {
|
||||
return File.fromString(pathname)
|
||||
}
|
||||
|
||||
function makeTestFiles(pathnames) {
|
||||
return _.zipObject(pathnames, _.map(pathnames, makeTestFile))
|
||||
}
|
||||
|
||||
function makeFileMap(pathnames) {
|
||||
return new FileMap(makeTestFiles(pathnames))
|
||||
}
|
||||
|
||||
it('allows construction with a single file', function () {
|
||||
makeFileMap(['a'])
|
||||
})
|
||||
|
||||
it('allows folders to differ by case', function () {
|
||||
expect(() => {
|
||||
makeFileMap(['a/b', 'A/c'])
|
||||
}).not.to.throw
|
||||
expect(() => {
|
||||
makeFileMap(['a/b/c', 'A/b/d'])
|
||||
}).not.to.throw
|
||||
expect(() => {
|
||||
makeFileMap(['a/b/c', 'a/B/d'])
|
||||
}).not.to.throw
|
||||
})
|
||||
|
||||
it('does not allow conflicting paths on construct', function () {
|
||||
expect(() => {
|
||||
makeFileMap(['a', 'a/b'])
|
||||
}).to.throw(FileMap.PathnameConflictError)
|
||||
})
|
||||
|
||||
it('detects conflicting paths with characters that sort before /', function () {
|
||||
const fileMap = makeFileMap(['a', 'a!'])
|
||||
expect(fileMap.wouldConflict('a/b')).to.be.truthy
|
||||
})
|
||||
|
||||
it('detects conflicting paths', function () {
|
||||
const fileMap = makeFileMap(['a/b/c'])
|
||||
expect(fileMap.wouldConflict('a/b/c/d')).to.be.truthy
|
||||
expect(fileMap.wouldConflict('a')).to.be.truthy
|
||||
expect(fileMap.wouldConflict('b')).to.be.falsy
|
||||
expect(fileMap.wouldConflict('a/b')).to.be.truthy
|
||||
expect(fileMap.wouldConflict('a/c')).to.be.falsy
|
||||
expect(fileMap.wouldConflict('a/b/c')).to.be.falsy
|
||||
expect(fileMap.wouldConflict('a/b/d')).to.be.falsy
|
||||
expect(fileMap.wouldConflict('d/b/c')).to.be.falsy
|
||||
})
|
||||
|
||||
it('allows paths that differ by case', function () {
|
||||
const fileMap = makeFileMap(['a/b/c'])
|
||||
expect(fileMap.wouldConflict('a/b/C')).to.be.falsy
|
||||
expect(fileMap.wouldConflict('A')).to.be.falsy
|
||||
expect(fileMap.wouldConflict('A/b')).to.be.falsy
|
||||
expect(fileMap.wouldConflict('a/B')).to.be.falsy
|
||||
expect(fileMap.wouldConflict('A/B')).to.be.falsy
|
||||
})
|
||||
|
||||
it('does not add a file with a conflicting path', function () {
|
||||
const fileMap = makeFileMap(['a/b'])
|
||||
const file = makeTestFile('a/b/c')
|
||||
|
||||
expect(() => {
|
||||
fileMap.addFile('a/b/c', file)
|
||||
}).to.throw(FileMap.PathnameConflictError)
|
||||
})
|
||||
|
||||
it('does not move a file to a conflicting path', function () {
|
||||
const fileMap = makeFileMap(['a/b', 'a/c'])
|
||||
|
||||
expect(() => {
|
||||
fileMap.moveFile('a/b', 'a')
|
||||
}).to.throw(FileMap.PathnameConflictError)
|
||||
})
|
||||
|
||||
it('errors when trying to move a non-existent file', function () {
|
||||
const fileMap = makeFileMap(['a'])
|
||||
expect(() => fileMap.moveFile('b', 'a')).to.throw(FileMap.FileNotFoundError)
|
||||
})
|
||||
|
||||
it('moves a file over an empty folder', function () {
|
||||
const fileMap = makeFileMap(['a/b'])
|
||||
fileMap.moveFile('a/b', 'a')
|
||||
expect(fileMap.countFiles()).to.equal(1)
|
||||
expect(fileMap.getFile('a')).to.exist
|
||||
expect(fileMap.getFile('a').getContent()).to.equal('a/b')
|
||||
})
|
||||
|
||||
it('does not move a file over a non-empty folder', function () {
|
||||
const fileMap = makeFileMap(['a/b', 'a/c'])
|
||||
expect(() => {
|
||||
fileMap.moveFile('a/b', 'a')
|
||||
}).to.throw(FileMap.PathnameConflictError)
|
||||
})
|
||||
|
||||
it('does not overwrite filename that differs by case on add', function () {
|
||||
const fileMap = makeFileMap(['a'])
|
||||
fileMap.addFile('A', makeTestFile('A'))
|
||||
expect(fileMap.countFiles()).to.equal(2)
|
||||
expect(fileMap.files.a).to.exist
|
||||
expect(fileMap.files.A).to.exist
|
||||
expect(fileMap.getFile('a')).to.exist
|
||||
expect(fileMap.getFile('A').getContent()).to.equal('A')
|
||||
})
|
||||
|
||||
it('changes case on move', function () {
|
||||
const fileMap = makeFileMap(['a'])
|
||||
fileMap.moveFile('a', 'A')
|
||||
expect(fileMap.countFiles()).to.equal(1)
|
||||
expect(fileMap.files.a).not.to.exist
|
||||
expect(fileMap.files.A).to.exist
|
||||
expect(fileMap.getFile('A').getContent()).to.equal('a')
|
||||
})
|
||||
|
||||
it('does not overwrite filename that differs by case on move', function () {
|
||||
const fileMap = makeFileMap(['a', 'b'])
|
||||
fileMap.moveFile('a', 'B')
|
||||
expect(fileMap.countFiles()).to.equal(2)
|
||||
expect(fileMap.files.a).not.to.exist
|
||||
expect(fileMap.files.b).to.exist
|
||||
expect(fileMap.files.B).to.exist
|
||||
expect(fileMap.getFile('B').getContent()).to.equal('a')
|
||||
})
|
||||
|
||||
it('does not find pathname that differs by case', function () {
|
||||
const fileMap = makeFileMap(['a'])
|
||||
expect(fileMap.getFile('a')).to.exist
|
||||
expect(fileMap.getFile('A')).not.to.exist
|
||||
expect(fileMap.getFile('b')).not.to.exist
|
||||
})
|
||||
|
||||
it('does not allow non-safe pathnames', function () {
|
||||
expect(() => {
|
||||
makeFileMap(['c*'])
|
||||
}).to.throw(FileMap.BadPathnameError)
|
||||
|
||||
const fileMap = makeFileMap([])
|
||||
|
||||
expect(() => {
|
||||
fileMap.addFile('c*', makeTestFile('c:'))
|
||||
}).to.throw(FileMap.BadPathnameError)
|
||||
|
||||
fileMap.addFile('a', makeTestFile('a'))
|
||||
expect(() => {
|
||||
fileMap.moveFile('a', 'c*')
|
||||
}).to.throw(FileMap.BadPathnameError)
|
||||
|
||||
expect(() => {
|
||||
fileMap.addFile('hasOwnProperty', makeTestFile('hasOwnProperty'))
|
||||
fileMap.addFile('anotherFile', makeTestFile('anotherFile'))
|
||||
}).to.throw()
|
||||
})
|
||||
|
||||
it('removes a file', function () {
|
||||
const fileMap = makeFileMap(['a', 'b'])
|
||||
fileMap.removeFile('a')
|
||||
expect(fileMap.countFiles()).to.equal(1)
|
||||
expect(fileMap.files.a).not.to.exist
|
||||
expect(fileMap.files.b).to.exist
|
||||
})
|
||||
|
||||
it('errors when trying to remove a non-existent file', function () {
|
||||
const fileMap = makeFileMap(['a'])
|
||||
expect(() => fileMap.removeFile('b')).to.throw(FileMap.FileNotFoundError)
|
||||
})
|
||||
|
||||
it('has mapAsync', async function () {
|
||||
const concurrency = 1
|
||||
for (const test of [
|
||||
[[], {}],
|
||||
[['a'], { a: 'a-a' }], // the test is to map to "content-pathname"
|
||||
[['a', 'b'], { a: 'a-a', b: 'b-b' }],
|
||||
]) {
|
||||
const input = test[0]
|
||||
const expectedOutput = test[1]
|
||||
const fileMap = makeFileMap(input)
|
||||
const result = await fileMap.mapAsync((file, pathname) => {
|
||||
return file.getContent() + '-' + pathname
|
||||
}, concurrency)
|
||||
expect(result).to.deep.equal(expectedOutput)
|
||||
}
|
||||
})
|
||||
})
|
||||
124
libraries/overleaf-editor-core/test/hash_file_data.test.js
Normal file
124
libraries/overleaf-editor-core/test/hash_file_data.test.js
Normal file
@@ -0,0 +1,124 @@
|
||||
const HashFileData = require('../lib/file_data/hash_file_data')
|
||||
const { expect } = require('chai')
|
||||
const StringFileData = require('../lib/file_data/string_file_data')
|
||||
const sinon = require('sinon')
|
||||
const Blob = require('../lib/blob')
|
||||
|
||||
describe('HashFileData', function () {
|
||||
beforeEach(function () {
|
||||
this.fileHash = 'a5675307b61ec2517330622a6e649b4ca1ee5612'
|
||||
this.rangesHash = '380de212d09bf8498065833dbf242aaf11184316'
|
||||
this.blobStore = {
|
||||
getString: sinon.stub(),
|
||||
getObject: sinon.stub(),
|
||||
getBlob: sinon.stub(),
|
||||
}
|
||||
})
|
||||
|
||||
describe('constructor', function () {
|
||||
it('should create a new instance of HashFileData from content hash and ranges hash', function () {
|
||||
const fileData = new HashFileData(this.fileHash, this.rangesHash)
|
||||
|
||||
expect(fileData).to.be.instanceOf(HashFileData)
|
||||
expect(fileData.getHash()).to.equal(this.fileHash)
|
||||
expect(fileData.getRangesHash()).to.equal(this.rangesHash)
|
||||
})
|
||||
|
||||
it('should create a new instance of HashFileData with no ranges hash', function () {
|
||||
const fileData = new HashFileData(this.fileHash)
|
||||
expect(fileData).to.be.instanceOf(HashFileData)
|
||||
expect(fileData.getHash()).to.equal(this.fileHash)
|
||||
expect(fileData.getRangesHash()).to.be.undefined
|
||||
})
|
||||
})
|
||||
|
||||
describe('fromRaw', function () {
|
||||
it('should create a new instance of HashFileData from raw data', function () {
|
||||
const raw = { hash: this.fileHash, rangesHash: this.rangesHash }
|
||||
const fileData = HashFileData.fromRaw(raw)
|
||||
|
||||
expect(fileData).to.be.instanceOf(HashFileData)
|
||||
expect(fileData.getHash()).to.equal(raw.hash)
|
||||
expect(fileData.getRangesHash()).to.equal(raw.rangesHash)
|
||||
})
|
||||
|
||||
it('should create a new instance of HashFileData from raw data without ranges hash', function () {
|
||||
const raw = { hash: this.fileHash }
|
||||
const fileData = HashFileData.fromRaw(raw)
|
||||
|
||||
expect(fileData).to.be.instanceOf(HashFileData)
|
||||
expect(fileData.getHash()).to.equal(raw.hash)
|
||||
expect(fileData.getRangesHash()).to.equal(undefined)
|
||||
})
|
||||
})
|
||||
|
||||
describe('toRaw', function () {
|
||||
it('should include ranges hash when present', function () {
|
||||
const fileData = new HashFileData(this.fileHash, this.rangesHash)
|
||||
const raw = fileData.toRaw()
|
||||
expect(raw).to.deep.equal({
|
||||
hash: this.fileHash,
|
||||
rangesHash: this.rangesHash,
|
||||
})
|
||||
})
|
||||
|
||||
it('should omit ranges hash when not present', function () {
|
||||
const fileData = new HashFileData(this.fileHash)
|
||||
const raw = fileData.toRaw()
|
||||
expect(raw).to.deep.equal({
|
||||
hash: this.fileHash,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('toEager', function () {
|
||||
it('should convert HashFileData to StringFileData including ranges', async function () {
|
||||
const trackedChanges = [
|
||||
{
|
||||
range: { pos: 5, length: 10 },
|
||||
tracking: {
|
||||
userId: 'foo',
|
||||
type: 'insert',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
]
|
||||
const comments = [
|
||||
{
|
||||
id: 'comment-1',
|
||||
ranges: [{ pos: 1, length: 4 }],
|
||||
},
|
||||
]
|
||||
const fileData = new HashFileData(this.fileHash, this.rangesHash)
|
||||
this.blobStore.getString.withArgs(this.fileHash).resolves('content')
|
||||
this.blobStore.getObject.withArgs(this.rangesHash).resolves({
|
||||
trackedChanges,
|
||||
comments,
|
||||
})
|
||||
this.blobStore.getBlob
|
||||
.withArgs(this.rangesHash)
|
||||
.resolves(new Blob(this.rangesHash, 20, 20))
|
||||
this.blobStore.getBlob
|
||||
.withArgs(this.fileHash)
|
||||
.resolves(new Blob(this.fileHash, 20, 20))
|
||||
const eagerFileData = await fileData.toEager(this.blobStore)
|
||||
expect(eagerFileData).to.be.instanceOf(StringFileData)
|
||||
expect(eagerFileData.getContent()).to.equal('content')
|
||||
expect(eagerFileData.trackedChanges.toRaw()).to.deep.equal(trackedChanges)
|
||||
expect(eagerFileData.getComments().toRaw()).to.deep.equal(comments)
|
||||
})
|
||||
|
||||
it('should convert HashFileData to StringFileData without ranges', async function () {
|
||||
const fileData = new HashFileData(this.fileHash, undefined)
|
||||
this.blobStore.getString.withArgs(this.fileHash).resolves('content')
|
||||
this.blobStore.getBlob
|
||||
.withArgs(this.fileHash)
|
||||
.resolves(new Blob(this.fileHash, 20, 20))
|
||||
const eagerFileData = await fileData.toEager(this.blobStore)
|
||||
expect(eagerFileData).to.be.instanceOf(StringFileData)
|
||||
expect(eagerFileData.getContent()).to.equal('content')
|
||||
expect(eagerFileData.trackedChanges.toRaw()).to.deep.equal([])
|
||||
expect(eagerFileData.getComments().toRaw()).to.deep.equal([])
|
||||
})
|
||||
})
|
||||
})
|
||||
42
libraries/overleaf-editor-core/test/history.test.js
Normal file
42
libraries/overleaf-editor-core/test/history.test.js
Normal file
@@ -0,0 +1,42 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const core = require('..')
|
||||
const Change = core.Change
|
||||
const File = core.File
|
||||
const History = core.History
|
||||
const Operation = core.Operation
|
||||
const Snapshot = core.Snapshot
|
||||
|
||||
describe('History', function () {
|
||||
describe('findBlobHashes', function () {
|
||||
it('finds blob hashes from snapshot and changes', function () {
|
||||
const history = new History(new Snapshot(), [])
|
||||
|
||||
const blobHashes = new Set()
|
||||
history.findBlobHashes(blobHashes)
|
||||
expect(blobHashes.size).to.equal(0)
|
||||
|
||||
// Add a file with a hash to the snapshot.
|
||||
history.getSnapshot().addFile('foo', File.fromHash(File.EMPTY_FILE_HASH))
|
||||
history.findBlobHashes(blobHashes)
|
||||
expect(Array.from(blobHashes)).to.have.members([File.EMPTY_FILE_HASH])
|
||||
|
||||
// Add a file with a hash to the changes.
|
||||
const testHash = 'a'.repeat(40)
|
||||
const change = Change.fromRaw({
|
||||
operations: [],
|
||||
timestamp: '2015-03-05T12:03:53.035Z',
|
||||
authors: [null],
|
||||
})
|
||||
change.pushOperation(Operation.addFile('bar', File.fromHash(testHash)))
|
||||
|
||||
history.pushChanges([change])
|
||||
history.findBlobHashes(blobHashes)
|
||||
expect(Array.from(blobHashes)).to.have.members([
|
||||
File.EMPTY_FILE_HASH,
|
||||
testHash,
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,22 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const ot = require('..')
|
||||
const HollowStringFileData = require('../lib/file_data/hollow_string_file_data')
|
||||
const TextOperation = ot.TextOperation
|
||||
|
||||
describe('HollowStringFileData', function () {
|
||||
it('validates string length when edited', function () {
|
||||
const maxLength = TextOperation.MAX_STRING_LENGTH
|
||||
const fileData = new HollowStringFileData(maxLength)
|
||||
expect(fileData.getStringLength()).to.equal(maxLength)
|
||||
|
||||
expect(() => {
|
||||
fileData.edit(new TextOperation().retain(maxLength).insert('x'))
|
||||
}).to.throw(TextOperation.TooLongError)
|
||||
expect(fileData.getStringLength()).to.equal(maxLength)
|
||||
|
||||
fileData.edit(new TextOperation().retain(maxLength - 1).remove(1))
|
||||
expect(fileData.getStringLength()).to.equal(maxLength - 1)
|
||||
})
|
||||
})
|
||||
17
libraries/overleaf-editor-core/test/label.test.js
Normal file
17
libraries/overleaf-editor-core/test/label.test.js
Normal file
@@ -0,0 +1,17 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const ot = require('..')
|
||||
const Label = ot.Label
|
||||
|
||||
describe('Label', function () {
|
||||
it('can be created by an anonymous author', function () {
|
||||
const label = Label.fromRaw({
|
||||
text: 'test',
|
||||
authorId: null,
|
||||
timestamp: '2016-01-01T00:00:00Z',
|
||||
version: 123,
|
||||
})
|
||||
expect(label.getAuthorId()).to.be.null
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,196 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const { expect } = require('chai')
|
||||
const sinon = require('sinon')
|
||||
|
||||
const ot = require('..')
|
||||
const File = ot.File
|
||||
const TextOperation = ot.TextOperation
|
||||
const LazyStringFileData = require('../lib/file_data/lazy_string_file_data')
|
||||
const EagerStringFileData = require('../lib/file_data/string_file_data')
|
||||
|
||||
describe('LazyStringFileData', function () {
|
||||
beforeEach(function () {
|
||||
this.rangesHash = '380de212d09bf8498065833dbf242aaf11184316'
|
||||
this.fileHash = 'a5675307b61ec2517330622a6e649b4ca1ee5612'
|
||||
this.blobStore = {
|
||||
getString: sinon.stub(),
|
||||
putString: sinon.stub().resolves(new ot.Blob(this.fileHash, 19, 19)),
|
||||
getObject: sinon.stub(),
|
||||
putObject: sinon.stub().resolves(new ot.Blob(this.rangesHash, 204, 204)),
|
||||
}
|
||||
this.blobStore.getString.withArgs(File.EMPTY_FILE_HASH).resolves('')
|
||||
this.blobStore.getString
|
||||
.withArgs(this.fileHash)
|
||||
.resolves('the quick brown fox')
|
||||
this.blobStore.getObject.withArgs(this.rangesHash).resolves({
|
||||
comments: [{ id: 'foo', ranges: [{ pos: 0, length: 3 }] }],
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 4, length: 5 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('uses raw text operations for toRaw and fromRaw', function () {
|
||||
const testHash = File.EMPTY_FILE_HASH
|
||||
const fileData = new LazyStringFileData(testHash, undefined, 0)
|
||||
let roundTripFileData
|
||||
|
||||
expect(fileData.toRaw()).to.deep.equal({
|
||||
hash: testHash,
|
||||
stringLength: 0,
|
||||
})
|
||||
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
||||
expect(roundTripFileData.getHash()).to.equal(testHash)
|
||||
expect(roundTripFileData.getStringLength()).to.equal(0)
|
||||
expect(roundTripFileData.getOperations()).to.have.length(0)
|
||||
|
||||
fileData.edit(new TextOperation().insert('a'))
|
||||
expect(fileData.toRaw()).to.deep.equal({
|
||||
hash: testHash,
|
||||
stringLength: 1,
|
||||
operations: [{ textOperation: ['a'] }],
|
||||
})
|
||||
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
||||
expect(roundTripFileData.getHash()).not.to.exist // file has changed
|
||||
expect(roundTripFileData.getStringLength()).to.equal(1)
|
||||
expect(roundTripFileData.getOperations()).to.have.length(1)
|
||||
expect(roundTripFileData.getOperations()[0]).to.be.instanceOf(TextOperation)
|
||||
expect(
|
||||
/** @type {InstanceType<TextOperation>} */ (
|
||||
roundTripFileData.getOperations()[0]
|
||||
).ops
|
||||
).to.have.length(1)
|
||||
|
||||
fileData.edit(new TextOperation().retain(1).insert('b'))
|
||||
expect(fileData.toRaw()).to.deep.equal({
|
||||
hash: testHash,
|
||||
stringLength: 2,
|
||||
operations: [{ textOperation: ['a'] }, { textOperation: [1, 'b'] }],
|
||||
})
|
||||
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
||||
expect(roundTripFileData.getHash()).not.to.exist // file has changed
|
||||
expect(roundTripFileData.getStringLength()).to.equal(2)
|
||||
expect(roundTripFileData.getOperations()).to.have.length(2)
|
||||
expect(
|
||||
/** @type {InstanceType<TextOperation>} */ (
|
||||
roundTripFileData.getOperations()[0]
|
||||
).ops
|
||||
).to.have.length(1)
|
||||
expect(
|
||||
/** @type {InstanceType<TextOperation>} */ (
|
||||
roundTripFileData.getOperations()[1]
|
||||
).ops
|
||||
).to.have.length(2)
|
||||
})
|
||||
|
||||
it('should include rangesHash in toRaw and fromRaw when available', function () {
|
||||
const testHash = File.EMPTY_FILE_HASH
|
||||
const rangesHash = this.rangesHash
|
||||
const fileData = new LazyStringFileData(testHash, rangesHash, 19)
|
||||
|
||||
expect(fileData.toRaw()).to.deep.equal({
|
||||
hash: testHash,
|
||||
rangesHash,
|
||||
stringLength: 19,
|
||||
})
|
||||
|
||||
const roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
||||
expect(roundTripFileData.getHash()).to.equal(testHash)
|
||||
expect(roundTripFileData.getRangesHash()).to.equal(rangesHash)
|
||||
expect(roundTripFileData.getStringLength()).to.equal(19)
|
||||
expect(roundTripFileData.getOperations()).to.have.length(0)
|
||||
})
|
||||
|
||||
it('should fetch content from blob store when loading eager string', async function () {
|
||||
const testHash = this.fileHash
|
||||
const rangesHash = this.rangesHash
|
||||
const fileData = new LazyStringFileData(testHash, rangesHash, 19)
|
||||
const eagerString = await fileData.toEager(this.blobStore)
|
||||
expect(eagerString).to.be.instanceOf(EagerStringFileData)
|
||||
expect(eagerString.getContent()).to.equal('the quick brown fox')
|
||||
expect(eagerString.getComments().toRaw()).to.deep.equal([
|
||||
{ id: 'foo', ranges: [{ pos: 0, length: 3 }] },
|
||||
])
|
||||
expect(eagerString.trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 4, length: 5 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
expect(this.blobStore.getObject.calledWith(rangesHash)).to.be.true
|
||||
expect(this.blobStore.getString.calledWith(testHash)).to.be.true
|
||||
})
|
||||
|
||||
it('should not fetch ranges from blob store if not present', async function () {
|
||||
const testHash = this.fileHash
|
||||
const fileData = new LazyStringFileData(testHash, undefined, 19)
|
||||
const eagerString = await fileData.toEager(this.blobStore)
|
||||
expect(eagerString).to.be.instanceOf(EagerStringFileData)
|
||||
expect(eagerString.getContent()).to.equal('the quick brown fox')
|
||||
expect(eagerString.getComments().toRaw()).to.be.empty
|
||||
expect(eagerString.trackedChanges.length).to.equal(0)
|
||||
expect(this.blobStore.getObject.called).to.be.false
|
||||
expect(this.blobStore.getString.calledWith(testHash)).to.be.true
|
||||
})
|
||||
|
||||
it('validates operations when edited', function () {
|
||||
const testHash = File.EMPTY_FILE_HASH
|
||||
const fileData = new LazyStringFileData(testHash, undefined, 0)
|
||||
expect(fileData.getHash()).equal(testHash)
|
||||
expect(fileData.getByteLength()).to.equal(0) // approximately
|
||||
expect(fileData.getStringLength()).to.equal(0)
|
||||
expect(fileData.getOperations()).to.have.length(0)
|
||||
|
||||
fileData.edit(new TextOperation().insert('a'))
|
||||
expect(fileData.getHash()).not.to.exist
|
||||
expect(fileData.getByteLength()).to.equal(1) // approximately
|
||||
expect(fileData.getStringLength()).to.equal(1)
|
||||
expect(fileData.getOperations()).to.have.length(1)
|
||||
|
||||
expect(() => {
|
||||
fileData.edit(new TextOperation().retain(10))
|
||||
}).to.throw(TextOperation.ApplyError)
|
||||
expect(fileData.getHash()).not.to.exist
|
||||
expect(fileData.getByteLength()).to.equal(1) // approximately
|
||||
expect(fileData.getStringLength()).to.equal(1)
|
||||
expect(fileData.getOperations()).to.have.length(1)
|
||||
})
|
||||
|
||||
it('validates string length when edited', function () {
|
||||
const testHash = File.EMPTY_FILE_HASH
|
||||
const fileData = new LazyStringFileData(testHash, undefined, 0)
|
||||
expect(fileData.getHash()).equal(testHash)
|
||||
expect(fileData.getByteLength()).to.equal(0) // approximately
|
||||
expect(fileData.getStringLength()).to.equal(0)
|
||||
expect(fileData.getOperations()).to.have.length(0)
|
||||
|
||||
const longString = _.repeat('a', TextOperation.MAX_STRING_LENGTH)
|
||||
fileData.edit(new TextOperation().insert(longString))
|
||||
expect(fileData.getHash()).not.to.exist
|
||||
expect(fileData.getByteLength()).to.equal(longString.length) // approximate
|
||||
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||
expect(fileData.getOperations()).to.have.length(1)
|
||||
|
||||
expect(() => {
|
||||
fileData.edit(new TextOperation().retain(longString.length).insert('x'))
|
||||
}).to.throw(TextOperation.TooLongError)
|
||||
expect(fileData.getHash()).not.to.exist
|
||||
expect(fileData.getByteLength()).to.equal(longString.length) // approximate
|
||||
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||
expect(fileData.getOperations()).to.have.length(1)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const ot = require('..')
|
||||
const File = ot.File
|
||||
const MoveFileOperation = ot.MoveFileOperation
|
||||
const Snapshot = ot.Snapshot
|
||||
const Operation = ot.Operation
|
||||
const V2DocVersions = ot.V2DocVersions
|
||||
const TextOperation = ot.TextOperation
|
||||
|
||||
describe('MoveFileOperation', function () {
|
||||
function makeEmptySnapshot() {
|
||||
return new Snapshot()
|
||||
}
|
||||
|
||||
function makeOneFileSnapshot() {
|
||||
const snapshot = makeEmptySnapshot()
|
||||
snapshot.addFile('foo', File.fromString('test: foo'))
|
||||
return snapshot
|
||||
}
|
||||
|
||||
function makeTwoFileSnapshot() {
|
||||
const snapshot = makeOneFileSnapshot()
|
||||
snapshot.addFile('bar', File.fromString('test: bar'))
|
||||
return snapshot
|
||||
}
|
||||
|
||||
it('moves a file over another', function () {
|
||||
const snapshot = makeOneFileSnapshot()
|
||||
const operation = new MoveFileOperation('foo', 'bar')
|
||||
operation.applyTo(snapshot)
|
||||
expect(snapshot.countFiles()).to.equal(1)
|
||||
expect(snapshot.getFile('bar').getContent()).to.equal('test: foo')
|
||||
})
|
||||
|
||||
it('moves a file to another pathname', function () {
|
||||
const snapshot = makeTwoFileSnapshot()
|
||||
const operation = new MoveFileOperation('foo', 'a')
|
||||
operation.applyTo(snapshot)
|
||||
expect(snapshot.countFiles()).to.equal(2)
|
||||
expect(snapshot.getFile('a').getContent()).to.equal('test: foo')
|
||||
expect(snapshot.getFile('bar').getContent()).to.equal('test: bar')
|
||||
})
|
||||
|
||||
it('should keep v2DocVersions in-sync', function () {
|
||||
const snapshot = makeTwoFileSnapshot()
|
||||
snapshot.setV2DocVersions(
|
||||
V2DocVersions.fromRaw({
|
||||
id1: { pathname: 'foo', v: 1 },
|
||||
id2: { pathname: 'bar', v: 1 },
|
||||
})
|
||||
)
|
||||
Operation.moveFile('foo', 'foo-after').applyTo(snapshot)
|
||||
Operation.editFile(
|
||||
'foo-after',
|
||||
TextOperation.fromJSON({ textOperation: [9, 'edit'] })
|
||||
).applyTo(snapshot)
|
||||
Operation.removeFile('bar').applyTo(snapshot)
|
||||
expect(snapshot.getV2DocVersions().toRaw()).to.deep.equal({
|
||||
id1: { pathname: 'foo-after', v: 1 },
|
||||
})
|
||||
})
|
||||
})
|
||||
1074
libraries/overleaf-editor-core/test/operation.test.js
Normal file
1074
libraries/overleaf-editor-core/test/operation.test.js
Normal file
File diff suppressed because it is too large
Load Diff
452
libraries/overleaf-editor-core/test/range.test.js
Normal file
452
libraries/overleaf-editor-core/test/range.test.js
Normal file
@@ -0,0 +1,452 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const Range = require('../lib/range')
|
||||
|
||||
describe('Range', function () {
|
||||
it('should create a range', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
expect(from5to14.start).to.eql(5)
|
||||
expect(from5to14.end).to.eql(15)
|
||||
})
|
||||
|
||||
it('should create a range using fromRaw', function () {
|
||||
const from5to14 = Range.fromRaw({ pos: 5, length: 10 })
|
||||
expect(from5to14.start).to.eql(5)
|
||||
expect(from5to14.end).to.eql(15)
|
||||
})
|
||||
|
||||
it('should convert to raw', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
expect(from5to14.toRaw()).to.eql({ pos: 5, length: 10 })
|
||||
})
|
||||
|
||||
it('should check isEmpty method', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
expect(from5to14.isEmpty()).to.be.false
|
||||
|
||||
const range0length = new Range(5, 0)
|
||||
expect(range0length.isEmpty()).to.be.true
|
||||
})
|
||||
|
||||
it('should not create a range with a negative position', function () {
|
||||
expect(() => new Range(-1, 10)).to.throw
|
||||
})
|
||||
|
||||
it('should not create a range with a negative length', function () {
|
||||
expect(() => new Range(0, -2)).to.throw
|
||||
})
|
||||
|
||||
describe('overlaps', function () {
|
||||
it('same ranges should overlap', function () {
|
||||
const range1 = new Range(1, 3)
|
||||
const range2 = new Range(1, 3)
|
||||
expect(range1.overlaps(range2)).to.eql(true)
|
||||
})
|
||||
|
||||
it('non-touching ranges should not overlap', function () {
|
||||
const from1to3 = new Range(1, 3)
|
||||
const from10to12 = new Range(10, 3)
|
||||
expect(from1to3.overlaps(from10to12)).to.eql(false)
|
||||
expect(from10to12.overlaps(from1to3)).to.eql(false)
|
||||
})
|
||||
|
||||
it('touching ranges should not overlap', function () {
|
||||
const from1to3 = new Range(1, 3)
|
||||
const from4to6 = new Range(4, 3)
|
||||
expect(from1to3.overlaps(from4to6)).to.eql(false)
|
||||
expect(from4to6.overlaps(from1to3)).to.eql(false)
|
||||
})
|
||||
|
||||
it('should overlap', function () {
|
||||
const from1to3 = new Range(1, 3)
|
||||
const from2to4 = new Range(2, 3)
|
||||
expect(from1to3.overlaps(from2to4)).to.eql(true)
|
||||
expect(from2to4.overlaps(from1to3)).to.eql(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('touches', function () {
|
||||
it('should not touch if ranges are the same', function () {
|
||||
const range1 = new Range(1, 3)
|
||||
const range2 = new Range(1, 3)
|
||||
expect(range1.touches(range2)).to.eql(false)
|
||||
expect(range2.touches(range1)).to.eql(false)
|
||||
})
|
||||
|
||||
it('should return true when ranges touch at one point', function () {
|
||||
const from1to3 = new Range(1, 3)
|
||||
const from4to5 = new Range(4, 2)
|
||||
expect(from1to3.touches(from4to5)).to.eql(true)
|
||||
expect(from4to5.touches(from1to3)).to.eql(true)
|
||||
})
|
||||
|
||||
it('should return false when ranges do not touch', function () {
|
||||
const from1to3 = new Range(1, 3)
|
||||
const from5to6 = new Range(5, 2)
|
||||
expect(from1to3.touches(from5to6)).to.eql(false)
|
||||
expect(from5to6.touches(from1to3)).to.eql(false)
|
||||
})
|
||||
|
||||
it('should return false when ranges overlap', function () {
|
||||
const from1to3 = new Range(1, 3)
|
||||
const from3to4 = new Range(3, 2)
|
||||
expect(from1to3.touches(from3to4)).to.eql(false)
|
||||
expect(from3to4.touches(from1to3)).to.eql(false)
|
||||
})
|
||||
})
|
||||
|
||||
it('should check if range contains another', function () {
|
||||
const from0to2 = new Range(0, 3)
|
||||
const from4to13 = new Range(4, 10)
|
||||
const from4to14 = new Range(4, 11)
|
||||
const from4to15 = new Range(4, 12)
|
||||
const from5to13 = new Range(5, 9)
|
||||
const from5to14 = new Range(5, 10)
|
||||
const from5to15 = new Range(5, 11)
|
||||
const from0to99 = new Range(0, 100)
|
||||
|
||||
expect(from0to2.contains(from0to2)).to.eql(true)
|
||||
expect(from0to2.contains(from4to13)).to.eql(false)
|
||||
expect(from0to2.contains(from4to14)).to.eql(false)
|
||||
expect(from0to2.contains(from4to15)).to.eql(false)
|
||||
expect(from0to2.contains(from5to13)).to.eql(false)
|
||||
expect(from0to2.contains(from5to14)).to.eql(false)
|
||||
expect(from0to2.contains(from5to15)).to.eql(false)
|
||||
expect(from0to2.contains(from0to99)).to.eql(false)
|
||||
|
||||
expect(from4to13.contains(from0to2)).to.eql(false)
|
||||
expect(from4to13.contains(from4to13)).to.eql(true)
|
||||
expect(from4to13.contains(from4to14)).to.eql(false)
|
||||
expect(from4to13.contains(from4to15)).to.eql(false)
|
||||
expect(from4to13.contains(from5to13)).to.eql(true)
|
||||
expect(from4to13.contains(from5to14)).to.eql(false)
|
||||
expect(from4to13.contains(from5to15)).to.eql(false)
|
||||
expect(from4to13.contains(from0to99)).to.eql(false)
|
||||
|
||||
expect(from4to14.contains(from0to2)).to.eql(false)
|
||||
expect(from4to14.contains(from4to13)).to.eql(true)
|
||||
expect(from4to14.contains(from4to14)).to.eql(true)
|
||||
expect(from4to14.contains(from4to15)).to.eql(false)
|
||||
expect(from4to14.contains(from5to13)).to.eql(true)
|
||||
expect(from4to14.contains(from5to14)).to.eql(true)
|
||||
expect(from4to14.contains(from5to15)).to.eql(false)
|
||||
expect(from4to14.contains(from0to99)).to.eql(false)
|
||||
|
||||
expect(from4to15.contains(from0to2)).to.eql(false)
|
||||
expect(from4to15.contains(from4to13)).to.eql(true)
|
||||
expect(from4to15.contains(from4to14)).to.eql(true)
|
||||
expect(from4to15.contains(from4to15)).to.eql(true)
|
||||
expect(from4to15.contains(from5to13)).to.eql(true)
|
||||
expect(from4to15.contains(from5to14)).to.eql(true)
|
||||
expect(from4to15.contains(from5to15)).to.eql(true)
|
||||
expect(from4to15.contains(from0to99)).to.eql(false)
|
||||
|
||||
expect(from5to13.contains(from0to2)).to.eql(false)
|
||||
expect(from5to13.contains(from4to13)).to.eql(false)
|
||||
expect(from5to13.contains(from4to14)).to.eql(false)
|
||||
expect(from5to13.contains(from4to15)).to.eql(false)
|
||||
expect(from5to13.contains(from5to13)).to.eql(true)
|
||||
expect(from5to13.contains(from5to14)).to.eql(false)
|
||||
expect(from5to13.contains(from5to15)).to.eql(false)
|
||||
expect(from5to13.contains(from0to99)).to.eql(false)
|
||||
|
||||
expect(from5to14.contains(from0to2)).to.eql(false)
|
||||
expect(from5to14.contains(from4to13)).to.eql(false)
|
||||
expect(from5to14.contains(from4to14)).to.eql(false)
|
||||
expect(from5to14.contains(from4to15)).to.eql(false)
|
||||
expect(from5to14.contains(from5to13)).to.eql(true)
|
||||
expect(from5to14.contains(from5to14)).to.eql(true)
|
||||
expect(from5to14.contains(from5to15)).to.eql(false)
|
||||
expect(from5to14.contains(from0to99)).to.eql(false)
|
||||
|
||||
expect(from5to15.contains(from0to2)).to.eql(false)
|
||||
expect(from5to15.contains(from4to13)).to.eql(false)
|
||||
expect(from5to15.contains(from4to14)).to.eql(false)
|
||||
expect(from5to15.contains(from4to15)).to.eql(false)
|
||||
expect(from5to15.contains(from5to13)).to.eql(true)
|
||||
expect(from5to15.contains(from5to14)).to.eql(true)
|
||||
expect(from5to15.contains(from5to15)).to.eql(true)
|
||||
expect(from5to15.contains(from0to99)).to.eql(false)
|
||||
|
||||
expect(from0to99.contains(from0to2)).to.eql(true)
|
||||
expect(from0to99.contains(from4to13)).to.eql(true)
|
||||
expect(from0to99.contains(from4to14)).to.eql(true)
|
||||
expect(from0to99.contains(from4to15)).to.eql(true)
|
||||
expect(from0to99.contains(from5to13)).to.eql(true)
|
||||
expect(from0to99.contains(from5to14)).to.eql(true)
|
||||
expect(from0to99.contains(from5to15)).to.eql(true)
|
||||
expect(from0to99.contains(from0to99)).to.eql(true)
|
||||
})
|
||||
|
||||
it('should check if range contains a cursor', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
expect(from5to14.containsCursor(4)).to.eql(false)
|
||||
expect(from5to14.containsCursor(5)).to.eql(true)
|
||||
expect(from5to14.containsCursor(6)).to.eql(true)
|
||||
expect(from5to14.containsCursor(14)).to.eql(true)
|
||||
expect(from5to14.containsCursor(15)).to.eql(true)
|
||||
expect(from5to14.containsCursor(16)).to.eql(false)
|
||||
})
|
||||
|
||||
describe('subtract range from another', function () {
|
||||
it('should not subtract', function () {
|
||||
const from1to5 = new Range(1, 6)
|
||||
const from0to1 = new Range(0, 1)
|
||||
const subtracted = from1to5.subtract(from0to1)
|
||||
expect(subtracted.start).to.eql(1)
|
||||
expect(subtracted.length).to.eql(6)
|
||||
})
|
||||
|
||||
it('should subtract from the left', function () {
|
||||
const from5to19 = new Range(5, 15)
|
||||
const from15to24 = new Range(15, 10)
|
||||
const subtracted = from15to24.subtract(from5to19)
|
||||
expect(subtracted.start).to.eql(5)
|
||||
expect(subtracted.end).to.eql(10)
|
||||
})
|
||||
|
||||
it('should subtract from the right', function () {
|
||||
const from10to24 = new Range(10, 15)
|
||||
const from5to19 = new Range(5, 15)
|
||||
const subtracted = from5to19.subtract(from10to24)
|
||||
expect(subtracted.start).to.eql(5)
|
||||
expect(subtracted.end).to.eql(10)
|
||||
})
|
||||
|
||||
it('should subtract from the middle', function () {
|
||||
const from5to19 = new Range(5, 15)
|
||||
const from10to14 = new Range(10, 5)
|
||||
const subtracted = from5to19.subtract(from10to14)
|
||||
expect(subtracted.start).to.eql(5)
|
||||
expect(subtracted.end).to.eql(15)
|
||||
})
|
||||
|
||||
it('should delete entire range', function () {
|
||||
const from0to99 = new Range(0, 100)
|
||||
const from5to19 = new Range(5, 15)
|
||||
const subtracted = from5to19.subtract(from0to99)
|
||||
expect(subtracted.start).to.eql(5)
|
||||
expect(subtracted.end).to.eql(5)
|
||||
expect(subtracted.length).to.eql(0)
|
||||
})
|
||||
|
||||
it('should not subtract if ranges do not overlap', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const from20to29 = new Range(20, 10)
|
||||
const subtracted1 = from5to14.subtract(from20to29)
|
||||
const subtracted2 = from20to29.subtract(from5to14)
|
||||
expect(subtracted1.toRaw()).deep.equal(from5to14.toRaw())
|
||||
expect(subtracted2.toRaw()).deep.equal(from20to29.toRaw())
|
||||
})
|
||||
})
|
||||
|
||||
describe('merge ranges', function () {
|
||||
it('should merge ranges overlaping at the end', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const from10to19 = new Range(10, 10)
|
||||
expect(from5to14.canMerge(from10to19)).to.eql(true)
|
||||
const result = from5to14.merge(from10to19)
|
||||
expect(result.start).to.eql(5)
|
||||
expect(result.end).to.eql(20)
|
||||
})
|
||||
|
||||
it('should merge ranges overlaping at the start', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const from0to9 = new Range(0, 10)
|
||||
expect(from5to14.canMerge(from0to9)).to.eql(true)
|
||||
const result = from5to14.merge(from0to9)
|
||||
expect(result.start).to.eql(0)
|
||||
expect(result.end).to.eql(15)
|
||||
})
|
||||
|
||||
it('should merge ranges if one is covered by another', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const from0to19 = new Range(0, 20)
|
||||
expect(from5to14.canMerge(from0to19)).to.eql(true)
|
||||
const result = from5to14.merge(from0to19)
|
||||
expect(result.toRaw()).deep.equal(from0to19.toRaw())
|
||||
})
|
||||
|
||||
it('should produce the same length after merge', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const from0to19 = new Range(0, 20)
|
||||
expect(from0to19.canMerge(from5to14)).to.eql(true)
|
||||
const result = from0to19.merge(from5to14)
|
||||
expect(result.start).to.eql(0)
|
||||
expect(result.end).to.eql(20)
|
||||
})
|
||||
|
||||
it('should not merge ranges if they do not overlap', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const from20to29 = new Range(20, 10)
|
||||
expect(from5to14.canMerge(from20to29)).to.eql(false)
|
||||
expect(from20to29.canMerge(from5to14)).to.eql(false)
|
||||
expect(() => from5to14.merge(from20to29)).to.throw()
|
||||
})
|
||||
})
|
||||
|
||||
it('should check if range starts after a range', function () {
|
||||
const from0to4 = new Range(0, 5)
|
||||
const from1to5 = new Range(1, 5)
|
||||
const from5to9 = new Range(5, 5)
|
||||
const from6to10 = new Range(6, 5)
|
||||
const from10to14 = new Range(10, 5)
|
||||
|
||||
expect(from0to4.startsAfter(from0to4)).to.eql(false)
|
||||
expect(from0to4.startsAfter(from1to5)).to.eql(false)
|
||||
expect(from0to4.startsAfter(from5to9)).to.eql(false)
|
||||
expect(from0to4.startsAfter(from6to10)).to.eql(false)
|
||||
expect(from0to4.startsAfter(from10to14)).to.eql(false)
|
||||
|
||||
expect(from1to5.startsAfter(from0to4)).to.eql(false)
|
||||
expect(from1to5.startsAfter(from1to5)).to.eql(false)
|
||||
expect(from1to5.startsAfter(from5to9)).to.eql(false)
|
||||
expect(from1to5.startsAfter(from6to10)).to.eql(false)
|
||||
expect(from1to5.startsAfter(from10to14)).to.eql(false)
|
||||
|
||||
expect(from5to9.startsAfter(from0to4)).to.eql(true)
|
||||
expect(from5to9.startsAfter(from1to5)).to.eql(false)
|
||||
expect(from5to9.startsAfter(from5to9)).to.eql(false)
|
||||
expect(from5to9.startsAfter(from6to10)).to.eql(false)
|
||||
expect(from5to9.startsAfter(from10to14)).to.eql(false)
|
||||
|
||||
expect(from6to10.startsAfter(from0to4)).to.eql(true)
|
||||
expect(from6to10.startsAfter(from1to5)).to.eql(true)
|
||||
expect(from6to10.startsAfter(from5to9)).to.eql(false)
|
||||
expect(from6to10.startsAfter(from6to10)).to.eql(false)
|
||||
expect(from6to10.startsAfter(from10to14)).to.eql(false)
|
||||
|
||||
expect(from10to14.startsAfter(from0to4)).to.eql(true)
|
||||
expect(from10to14.startsAfter(from1to5)).to.eql(true)
|
||||
expect(from10to14.startsAfter(from5to9)).to.eql(true)
|
||||
expect(from10to14.startsAfter(from6to10)).to.eql(false)
|
||||
expect(from10to14.startsAfter(from10to14)).to.eql(false)
|
||||
})
|
||||
|
||||
it('should check if range starts after a position', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
expect(from5to14.startIsAfter(3)).to.be.true
|
||||
expect(from5to14.startIsAfter(4)).to.be.true
|
||||
expect(from5to14.startIsAfter(5)).to.be.false
|
||||
expect(from5to14.startIsAfter(6)).to.be.false
|
||||
expect(from5to14.startIsAfter(15)).to.be.false
|
||||
expect(from5to14.startIsAfter(16)).to.be.false
|
||||
})
|
||||
|
||||
it('should extend the range', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const result = from5to14.extendBy(3)
|
||||
expect(result.length).to.eql(13)
|
||||
expect(result.start).to.eql(5)
|
||||
expect(result.end).to.eql(18)
|
||||
})
|
||||
|
||||
it('should shrink the range', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const result = from5to14.shrinkBy(3)
|
||||
expect(result.length).to.eql(7)
|
||||
expect(result.start).to.eql(5)
|
||||
expect(result.end).to.eql(12)
|
||||
})
|
||||
|
||||
it('should throw if shrinking too much', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
expect(() => from5to14.shrinkBy(11)).to.throw()
|
||||
})
|
||||
|
||||
it('should move the range', function () {
|
||||
const from5to14 = new Range(5, 10)
|
||||
const result = from5to14.moveBy(3)
|
||||
expect(result.length).to.eql(10)
|
||||
expect(result.start).to.eql(8)
|
||||
expect(result.end).to.eql(18)
|
||||
})
|
||||
|
||||
describe('splitAt', function () {
|
||||
it('should split at the start', function () {
|
||||
const range = new Range(5, 10)
|
||||
const [left, right] = range.splitAt(5)
|
||||
expect(left.isEmpty()).to.be.true
|
||||
expect(right.start).to.eql(5)
|
||||
expect(right.end).to.eql(15)
|
||||
})
|
||||
|
||||
it('should not split before the start', function () {
|
||||
const range = new Range(5, 10)
|
||||
expect(() => range.splitAt(4)).to.throw()
|
||||
})
|
||||
|
||||
it('should split at last cursor in range', function () {
|
||||
const range = new Range(5, 10)
|
||||
const [left, right] = range.splitAt(14)
|
||||
expect(left.start).to.equal(5)
|
||||
expect(left.end).to.equal(14)
|
||||
expect(right.start).to.equal(14)
|
||||
expect(right.end).to.equal(15)
|
||||
})
|
||||
|
||||
it('should not split after the end', function () {
|
||||
const range = new Range(5, 10)
|
||||
expect(() => range.splitAt(16)).to.throw()
|
||||
})
|
||||
|
||||
it('should split at end', function () {
|
||||
const range = new Range(5, 10)
|
||||
const [left, right] = range.splitAt(15)
|
||||
expect(left.start).to.equal(5)
|
||||
expect(left.end).to.equal(15)
|
||||
expect(right.start).to.equal(15)
|
||||
expect(right.end).to.equal(15)
|
||||
})
|
||||
|
||||
it('should split in the middle', function () {
|
||||
const range = new Range(5, 10)
|
||||
const [left, right] = range.splitAt(10)
|
||||
expect(left.start).to.equal(5)
|
||||
expect(left.end).to.equal(10)
|
||||
expect(right.start).to.equal(10)
|
||||
expect(right.end).to.equal(15)
|
||||
})
|
||||
})
|
||||
|
||||
describe('insertAt', function () {
|
||||
it('should insert at the start', function () {
|
||||
const range = new Range(5, 10)
|
||||
const [left, inserted, right] = range.insertAt(5, 3)
|
||||
expect(left.isEmpty()).to.be.true
|
||||
expect(inserted.start).to.eql(5)
|
||||
expect(inserted.end).to.eql(8)
|
||||
expect(right.start).to.eql(8)
|
||||
expect(right.end).to.eql(18)
|
||||
})
|
||||
|
||||
it('should insert at the end', function () {
|
||||
const range = new Range(5, 10)
|
||||
const [left, inserted, right] = range.insertAt(15, 3)
|
||||
expect(left.start).to.eql(5)
|
||||
expect(left.end).to.eql(15)
|
||||
expect(inserted.start).to.eql(15)
|
||||
expect(inserted.end).to.eql(18)
|
||||
expect(right.isEmpty()).to.be.true
|
||||
})
|
||||
|
||||
it('should insert in the middle', function () {
|
||||
const range = new Range(5, 10)
|
||||
const [left, inserted, right] = range.insertAt(10, 3)
|
||||
expect(left.start).to.eql(5)
|
||||
expect(left.end).to.eql(10)
|
||||
expect(inserted.start).to.eql(10)
|
||||
expect(inserted.end).to.eql(13)
|
||||
expect(right.start).to.eql(13)
|
||||
expect(right.end).to.eql(18)
|
||||
})
|
||||
|
||||
it('should throw if cursor is out of range', function () {
|
||||
const range = new Range(5, 10)
|
||||
expect(() => range.insertAt(4, 3)).to.throw()
|
||||
expect(() => range.insertAt(16, 3)).to.throw()
|
||||
})
|
||||
})
|
||||
})
|
||||
126
libraries/overleaf-editor-core/test/safe_pathname.test.js
Normal file
126
libraries/overleaf-editor-core/test/safe_pathname.test.js
Normal file
@@ -0,0 +1,126 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const ot = require('..')
|
||||
const safePathname = ot.safePathname
|
||||
|
||||
describe('safePathname', function () {
|
||||
function expectClean(input, output, reason = '') {
|
||||
// check expected output and also idempotency
|
||||
const [cleanedInput, gotReason] = safePathname.cleanDebug(input)
|
||||
expect(cleanedInput).to.equal(output)
|
||||
expect(gotReason).to.equal(reason)
|
||||
expect(safePathname.clean(cleanedInput)).to.equal(cleanedInput)
|
||||
expect(safePathname.isClean(cleanedInput)).to.be.true
|
||||
}
|
||||
|
||||
it('cleans pathnames', function () {
|
||||
// preserve valid pathnames
|
||||
expectClean('llama.jpg', 'llama.jpg')
|
||||
expectClean('DSC4056.JPG', 'DSC4056.JPG')
|
||||
|
||||
// detects unclean pathnames
|
||||
expect(safePathname.isClean('rm -rf /')).to.be.falsy
|
||||
|
||||
// replace invalid characters with underscores
|
||||
expectClean(
|
||||
'test-s*\u0001\u0002m\u0007st\u0008.jpg',
|
||||
'test-s___m_st_.jpg',
|
||||
'cleanPart'
|
||||
)
|
||||
|
||||
// keep slashes, normalize paths, replace ..
|
||||
expectClean('./foo', 'foo', 'normalize')
|
||||
expectClean('../foo', '__/foo', 'cleanPart')
|
||||
expectClean('foo/./bar', 'foo/bar', 'normalize')
|
||||
expectClean('foo/../bar', 'bar', 'normalize')
|
||||
expectClean('../../tricky/foo.bar', '__/__/tricky/foo.bar', 'cleanPart')
|
||||
expectClean(
|
||||
'foo/../../tricky/foo.bar',
|
||||
'__/tricky/foo.bar',
|
||||
'normalize,cleanPart'
|
||||
)
|
||||
expectClean('foo/bar/../../tricky/foo.bar', 'tricky/foo.bar', 'normalize')
|
||||
expectClean(
|
||||
'foo/bar/baz/../../tricky/foo.bar',
|
||||
'foo/tricky/foo.bar',
|
||||
'normalize'
|
||||
)
|
||||
|
||||
// remove illegal chars even when there is no extension
|
||||
expectClean('**foo', '__foo', 'cleanPart')
|
||||
|
||||
// remove windows file paths
|
||||
expectClean('c:\\temp\\foo.txt', 'c:/temp/foo.txt', 'workaround for IE')
|
||||
|
||||
// do not allow a leading slash (relative paths only)
|
||||
expectClean('/foo', '_/foo', 'no leading /')
|
||||
expectClean('//foo', '_/foo', 'normalize,no leading /')
|
||||
|
||||
// do not allow multiple leading slashes
|
||||
expectClean('//foo', '_/foo', 'normalize,no leading /')
|
||||
|
||||
// do not allow a trailing slash
|
||||
expectClean('/', '_', 'no leading /,no trailing /')
|
||||
expectClean('foo/', 'foo', 'no trailing /')
|
||||
expectClean('foo.tex/', 'foo.tex', 'no trailing /')
|
||||
|
||||
// do not allow multiple trailing slashes
|
||||
expectClean('//', '_', 'normalize,no leading /,no trailing /')
|
||||
expectClean('///', '_', 'normalize,no leading /,no trailing /')
|
||||
expectClean('foo//', 'foo', 'normalize,no trailing /')
|
||||
|
||||
// file and folder names that consist of . and .. are not OK
|
||||
expectClean('.', '_', 'cleanPart')
|
||||
expectClean('..', '__', 'cleanPart')
|
||||
// we will allow name with more dots e.g. ... and ....
|
||||
expectClean('...', '...')
|
||||
expectClean('....', '....')
|
||||
expectClean('foo/...', 'foo/...')
|
||||
expectClean('foo/....', 'foo/....')
|
||||
expectClean('foo/.../bar', 'foo/.../bar')
|
||||
expectClean('foo/..../bar', 'foo/..../bar')
|
||||
|
||||
// leading dots are OK
|
||||
expectClean('._', '._')
|
||||
expectClean('.gitignore', '.gitignore')
|
||||
|
||||
// trailing dots are not OK on Windows but we allow them
|
||||
expectClean('_.', '_.')
|
||||
expectClean('foo/_.', 'foo/_.')
|
||||
expectClean('foo/_./bar', 'foo/_./bar')
|
||||
expectClean('foo/_../bar', 'foo/_../bar')
|
||||
|
||||
// spaces are allowed
|
||||
expectClean('a b.png', 'a b.png')
|
||||
|
||||
// leading and trailing spaces are not OK
|
||||
expectClean(' foo', 'foo', 'no leading spaces')
|
||||
expectClean(' foo', 'foo', 'no leading spaces')
|
||||
expectClean('foo ', 'foo', 'no trailing spaces')
|
||||
expectClean('foo ', 'foo', 'no trailing spaces')
|
||||
|
||||
// reserved file names on Windows should not be OK, but we already have
|
||||
// some in the old system, so have to allow them for now
|
||||
expectClean('AUX', 'AUX')
|
||||
expectClean('foo/AUX', 'foo/AUX')
|
||||
expectClean('AUX/foo', 'AUX/foo')
|
||||
|
||||
// multiple dots are OK
|
||||
expectClean('a.b.png', 'a.b.png')
|
||||
expectClean('a.code.tex', 'a.code.tex')
|
||||
|
||||
// there's no particular reason to allow multiple slashes; sometimes people
|
||||
// seem to rename files to URLs (https://domain/path) in an attempt to
|
||||
// upload a file, and this results in an empty directory name
|
||||
expectClean('foo//bar.png', 'foo/bar.png', 'normalize')
|
||||
expectClean('foo///bar.png', 'foo/bar.png', 'normalize')
|
||||
|
||||
// Check javascript property handling
|
||||
expectClean('foo/prototype', 'foo/prototype') // OK as part of a pathname
|
||||
expectClean('prototype/test.txt', 'prototype/test.txt')
|
||||
expectClean('prototype', '@prototype', 'BLOCKED_FILE_RX') // not OK as whole pathname
|
||||
expectClean('hasOwnProperty', '@hasOwnProperty', 'BLOCKED_FILE_RX')
|
||||
expectClean('**proto**', '@__proto__', 'cleanPart,BLOCKED_FILE_RX')
|
||||
})
|
||||
})
|
||||
477
libraries/overleaf-editor-core/test/scan_op.test.js
Normal file
477
libraries/overleaf-editor-core/test/scan_op.test.js
Normal file
@@ -0,0 +1,477 @@
|
||||
// @ts-check
|
||||
const { expect } = require('chai')
|
||||
const {
|
||||
RetainOp,
|
||||
ScanOp,
|
||||
InsertOp,
|
||||
RemoveOp,
|
||||
} = require('../lib/operation/scan_op')
|
||||
const { UnprocessableError, ApplyError } = require('../lib/errors')
|
||||
const TrackingProps = require('../lib/file_data/tracking_props')
|
||||
|
||||
describe('ScanOp', function () {
|
||||
describe('fromJSON', function () {
|
||||
it('constructs a RetainOp from object', function () {
|
||||
const op = ScanOp.fromJSON({ r: 1 })
|
||||
expect(op).to.be.instanceOf(RetainOp)
|
||||
expect(/** @type {RetainOp} */ (op).length).to.equal(1)
|
||||
})
|
||||
|
||||
it('constructs a RetainOp from number', function () {
|
||||
const op = ScanOp.fromJSON(2)
|
||||
expect(op).to.be.instanceOf(RetainOp)
|
||||
expect(/** @type {RetainOp} */ (op).length).to.equal(2)
|
||||
})
|
||||
|
||||
it('constructs an InsertOp from string', function () {
|
||||
const op = ScanOp.fromJSON('abc')
|
||||
expect(op).to.be.instanceOf(InsertOp)
|
||||
expect(/** @type {InsertOp} */ (op).insertion).to.equal('abc')
|
||||
})
|
||||
|
||||
it('constructs an InsertOp from object', function () {
|
||||
const op = ScanOp.fromJSON({ i: 'abc' })
|
||||
expect(op).to.be.instanceOf(InsertOp)
|
||||
expect(/** @type {InsertOp} */ (op).insertion).to.equal('abc')
|
||||
})
|
||||
|
||||
it('constructs a RemoveOp from number', function () {
|
||||
const op = ScanOp.fromJSON(-2)
|
||||
expect(op).to.be.instanceOf(RemoveOp)
|
||||
expect(/** @type {RemoveOp} */ (op).length).to.equal(2)
|
||||
})
|
||||
|
||||
it('throws an error for invalid input', function () {
|
||||
expect(() => ScanOp.fromJSON(/** @type {any} */ ({}))).to.throw(
|
||||
UnprocessableError
|
||||
)
|
||||
})
|
||||
|
||||
it('throws an error for zero', function () {
|
||||
expect(() => ScanOp.fromJSON(0)).to.throw(UnprocessableError)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('RetainOp', function () {
|
||||
it('is equal to another RetainOp with the same length', function () {
|
||||
const op1 = new RetainOp(1)
|
||||
const op2 = new RetainOp(1)
|
||||
expect(op1.equals(op2)).to.be.true
|
||||
})
|
||||
|
||||
it('is not equal to another RetainOp with a different length', function () {
|
||||
const op1 = new RetainOp(1)
|
||||
const op2 = new RetainOp(2)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another RetainOp with no tracking info', function () {
|
||||
const op1 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new RetainOp(4)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another RetainOp with different tracking info', function () {
|
||||
const op1 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user2', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to an InsertOp', function () {
|
||||
const op1 = new RetainOp(1)
|
||||
const op2 = new InsertOp('a')
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to a RemoveOp', function () {
|
||||
const op1 = new RetainOp(1)
|
||||
const op2 = new RemoveOp(1)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('can merge with another RetainOp', function () {
|
||||
const op1 = new RetainOp(1)
|
||||
const op2 = new RetainOp(2)
|
||||
expect(op1.canMergeWith(op2)).to.be.true
|
||||
op1.mergeWith(op2)
|
||||
expect(op1.equals(new RetainOp(3))).to.be.true
|
||||
})
|
||||
|
||||
it('cannot merge with another RetainOp if tracking info is different', function () {
|
||||
const op1 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user2', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('can merge with another RetainOp if tracking info is the same', function () {
|
||||
const op1 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new RetainOp(
|
||||
4,
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
op1.mergeWith(op2)
|
||||
expect(
|
||||
op1.equals(
|
||||
new RetainOp(
|
||||
8,
|
||||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
)
|
||||
)
|
||||
)
|
||||
).to.be.true
|
||||
})
|
||||
|
||||
it('cannot merge with an InsertOp', function () {
|
||||
const op1 = new RetainOp(1)
|
||||
const op2 = new InsertOp('a')
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('cannot merge with a RemoveOp', function () {
|
||||
const op1 = new RetainOp(1)
|
||||
const op2 = new RemoveOp(1)
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('can be converted to JSON', function () {
|
||||
const op = new RetainOp(3)
|
||||
expect(op.toJSON()).to.equal(3)
|
||||
})
|
||||
|
||||
it('adds to the length and cursor when applied to length', function () {
|
||||
const op = new RetainOp(3)
|
||||
const { length, inputCursor } = op.applyToLength({
|
||||
length: 10,
|
||||
inputCursor: 10,
|
||||
inputLength: 30,
|
||||
})
|
||||
expect(length).to.equal(13)
|
||||
expect(inputCursor).to.equal(13)
|
||||
})
|
||||
})
|
||||
|
||||
describe('InsertOp', function () {
|
||||
it('is equal to another InsertOp with the same insertion', function () {
|
||||
const op1 = new InsertOp('a')
|
||||
const op2 = new InsertOp('a')
|
||||
expect(op1.equals(op2)).to.be.true
|
||||
})
|
||||
|
||||
it('is not equal to another InsertOp with a different insertion', function () {
|
||||
const op1 = new InsertOp('a')
|
||||
const op2 = new InsertOp('b')
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another InsertOp with no tracking info', function () {
|
||||
const op1 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new InsertOp('a')
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another InsertOp with different tracking info', function () {
|
||||
const op1 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps('insert', 'user2', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another InsertOp with no comment ids', function () {
|
||||
const op1 = new InsertOp('a', undefined, ['1'])
|
||||
const op2 = new InsertOp('a')
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another InsertOp with tracking info', function () {
|
||||
const op1 = new InsertOp('a', undefined)
|
||||
const op2 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another InsertOp with comment ids', function () {
|
||||
const op1 = new InsertOp('a')
|
||||
const op2 = new InsertOp('a', undefined, ['1'])
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another InsertOp with different comment ids', function () {
|
||||
const op1 = new InsertOp('a', undefined, ['1'])
|
||||
const op2 = new InsertOp('a', undefined, ['2'])
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to another InsertOp with overlapping comment ids', function () {
|
||||
const op1 = new InsertOp('a', undefined, ['1'])
|
||||
const op2 = new InsertOp('a', undefined, ['2', '1'])
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to a RetainOp', function () {
|
||||
const op1 = new InsertOp('a')
|
||||
const op2 = new RetainOp(1)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to a RemoveOp', function () {
|
||||
const op1 = new InsertOp('a')
|
||||
const op2 = new RemoveOp(1)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('can merge with another InsertOp', function () {
|
||||
const op1 = new InsertOp('a')
|
||||
const op2 = new InsertOp('b')
|
||||
expect(op1.canMergeWith(op2)).to.be.true
|
||||
op1.mergeWith(op2)
|
||||
expect(op1.equals(new InsertOp('ab'))).to.be.true
|
||||
})
|
||||
|
||||
it('cannot merge with another InsertOp if comment id info is different', function () {
|
||||
const op1 = new InsertOp('a', undefined, ['1'])
|
||||
const op2 = new InsertOp('b', undefined, ['1', '2'])
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('cannot merge with another InsertOp if comment id info is different while tracking info matches', function () {
|
||||
const op1 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
),
|
||||
['1', '2']
|
||||
)
|
||||
const op2 = new InsertOp(
|
||||
'b',
|
||||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
),
|
||||
['3']
|
||||
)
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('cannot merge with another InsertOp if comment id is present in other and tracking info matches', function () {
|
||||
const op1 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new InsertOp(
|
||||
'b',
|
||||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
),
|
||||
['1']
|
||||
)
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('cannot merge with another InsertOp if tracking info is different', function () {
|
||||
const op1 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const op2 = new InsertOp(
|
||||
'b',
|
||||
new TrackingProps('insert', 'user2', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('can merge with another InsertOp if tracking and comment info is the same', function () {
|
||||
const op1 = new InsertOp(
|
||||
'a',
|
||||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
),
|
||||
['1', '2']
|
||||
)
|
||||
const op2 = new InsertOp(
|
||||
'b',
|
||||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
),
|
||||
['1', '2']
|
||||
)
|
||||
expect(op1.canMergeWith(op2)).to.be.true
|
||||
op1.mergeWith(op2)
|
||||
expect(
|
||||
op1.equals(
|
||||
new InsertOp(
|
||||
'ab',
|
||||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
),
|
||||
['1', '2']
|
||||
)
|
||||
)
|
||||
).to.be.true
|
||||
})
|
||||
|
||||
it('cannot merge with a RetainOp', function () {
|
||||
const op1 = new InsertOp('a')
|
||||
const op2 = new RetainOp(1)
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('cannot merge with a RemoveOp', function () {
|
||||
const op1 = new InsertOp('a')
|
||||
const op2 = new RemoveOp(1)
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('can be converted to JSON', function () {
|
||||
const op = new InsertOp('a')
|
||||
expect(op.toJSON()).to.equal('a')
|
||||
})
|
||||
|
||||
it('adds to the length when applied to length', function () {
|
||||
const op = new InsertOp('abc')
|
||||
const { length, inputCursor } = op.applyToLength({
|
||||
length: 10,
|
||||
inputCursor: 20,
|
||||
inputLength: 40,
|
||||
})
|
||||
expect(length).to.equal(13)
|
||||
expect(inputCursor).to.equal(20)
|
||||
})
|
||||
|
||||
it('can apply a retain of the rest of the input', function () {
|
||||
const op = new RetainOp(10)
|
||||
const { length, inputCursor } = op.applyToLength({
|
||||
length: 10,
|
||||
inputCursor: 5,
|
||||
inputLength: 15,
|
||||
})
|
||||
expect(length).to.equal(20)
|
||||
expect(inputCursor).to.equal(15)
|
||||
})
|
||||
|
||||
it('cannot apply to length if the input cursor is at the end', function () {
|
||||
const op = new RetainOp(10)
|
||||
expect(() =>
|
||||
op.applyToLength({
|
||||
length: 10,
|
||||
inputCursor: 10,
|
||||
inputLength: 10,
|
||||
})
|
||||
).to.throw(ApplyError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('RemoveOp', function () {
|
||||
it('is equal to another RemoveOp with the same length', function () {
|
||||
const op1 = new RemoveOp(1)
|
||||
const op2 = new RemoveOp(1)
|
||||
expect(op1.equals(op2)).to.be.true
|
||||
})
|
||||
|
||||
it('is not equal to another RemoveOp with a different length', function () {
|
||||
const op1 = new RemoveOp(1)
|
||||
const op2 = new RemoveOp(2)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to a RetainOp', function () {
|
||||
const op1 = new RemoveOp(1)
|
||||
const op2 = new RetainOp(1)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('is not equal to an InsertOp', function () {
|
||||
const op1 = new RemoveOp(1)
|
||||
const op2 = new InsertOp('a')
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('can merge with another RemoveOp', function () {
|
||||
const op1 = new RemoveOp(1)
|
||||
const op2 = new RemoveOp(2)
|
||||
expect(op1.canMergeWith(op2)).to.be.true
|
||||
op1.mergeWith(op2)
|
||||
expect(op1.equals(new RemoveOp(3))).to.be.true
|
||||
})
|
||||
|
||||
it('cannot merge with a RetainOp', function () {
|
||||
const op1 = new RemoveOp(1)
|
||||
const op2 = new RetainOp(1)
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('cannot merge with an InsertOp', function () {
|
||||
const op1 = new RemoveOp(1)
|
||||
const op2 = new InsertOp('a')
|
||||
expect(op1.canMergeWith(op2)).to.be.false
|
||||
expect(() => op1.mergeWith(op2)).to.throw(Error)
|
||||
})
|
||||
|
||||
it('can be converted to JSON', function () {
|
||||
const op = new RemoveOp(3)
|
||||
expect(op.toJSON()).to.equal(-3)
|
||||
})
|
||||
|
||||
it('adds to the input cursor when applied to length', function () {
|
||||
const op = new RemoveOp(3)
|
||||
const { length, inputCursor } = op.applyToLength({
|
||||
length: 10,
|
||||
inputCursor: 10,
|
||||
inputLength: 30,
|
||||
})
|
||||
expect(length).to.equal(10)
|
||||
expect(inputCursor).to.equal(13)
|
||||
})
|
||||
})
|
||||
92
libraries/overleaf-editor-core/test/snapshot.test.js
Normal file
92
libraries/overleaf-editor-core/test/snapshot.test.js
Normal file
@@ -0,0 +1,92 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const {
|
||||
File,
|
||||
Snapshot,
|
||||
TextOperation,
|
||||
Change,
|
||||
EditFileOperation,
|
||||
} = require('..')
|
||||
|
||||
describe('Snapshot', function () {
|
||||
describe('findBlobHashes', function () {
|
||||
it('finds blob hashes from files', function () {
|
||||
const snapshot = new Snapshot()
|
||||
|
||||
const blobHashes = new Set()
|
||||
snapshot.findBlobHashes(blobHashes)
|
||||
expect(blobHashes.size).to.equal(0)
|
||||
|
||||
// Add a file without a hash.
|
||||
snapshot.addFile('foo', File.fromString(''))
|
||||
snapshot.findBlobHashes(blobHashes)
|
||||
expect(blobHashes.size).to.equal(0)
|
||||
|
||||
// Add a file with a hash.
|
||||
snapshot.addFile('bar', File.fromHash(File.EMPTY_FILE_HASH))
|
||||
snapshot.findBlobHashes(blobHashes)
|
||||
expect(Array.from(blobHashes)).to.have.members([File.EMPTY_FILE_HASH])
|
||||
})
|
||||
})
|
||||
|
||||
describe('editFile', function () {
|
||||
let snapshot
|
||||
let operation
|
||||
|
||||
beforeEach(function () {
|
||||
snapshot = new Snapshot()
|
||||
snapshot.addFile('hello.txt', File.fromString('hello'))
|
||||
operation = new TextOperation()
|
||||
operation.retain(5)
|
||||
operation.insert(' world!')
|
||||
})
|
||||
|
||||
it('applies text operations to the file', function () {
|
||||
snapshot.editFile('hello.txt', operation)
|
||||
const file = snapshot.getFile('hello.txt')
|
||||
expect(file.getContent()).to.equal('hello world!')
|
||||
})
|
||||
|
||||
it('rejects text operations for nonexistent file', function () {
|
||||
expect(() => {
|
||||
snapshot.editFile('does-not-exist.txt', operation)
|
||||
}).to.throw(Snapshot.EditMissingFileError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('applyAll', function () {
|
||||
let snapshot
|
||||
let change
|
||||
|
||||
beforeEach(function () {
|
||||
snapshot = new Snapshot()
|
||||
snapshot.addFile('empty.txt', File.fromString(''))
|
||||
const badTextOp = new TextOperation()
|
||||
badTextOp.insert('FAIL!')
|
||||
const goodTextOp = new TextOperation()
|
||||
goodTextOp.insert('SUCCESS!')
|
||||
change = new Change(
|
||||
[
|
||||
new EditFileOperation('missing.txt', badTextOp),
|
||||
new EditFileOperation('empty.txt', goodTextOp),
|
||||
],
|
||||
new Date()
|
||||
)
|
||||
})
|
||||
|
||||
it('ignores recoverable errors', function () {
|
||||
snapshot.applyAll([change])
|
||||
const file = snapshot.getFile('empty.txt')
|
||||
expect(file.getContent()).to.equal('SUCCESS!')
|
||||
})
|
||||
|
||||
it('stops on recoverable errors in strict mode', function () {
|
||||
expect(() => {
|
||||
snapshot.applyAll([change], { strict: true })
|
||||
}).to.throw(Snapshot.EditMissingFileError)
|
||||
const file = snapshot.getFile('empty.txt')
|
||||
expect(file.getContent()).to.equal('')
|
||||
})
|
||||
})
|
||||
})
|
||||
167
libraries/overleaf-editor-core/test/string_file_data.test.js
Normal file
167
libraries/overleaf-editor-core/test/string_file_data.test.js
Normal file
@@ -0,0 +1,167 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const _ = require('lodash')
|
||||
|
||||
const ot = require('..')
|
||||
const StringFileData = require('../lib/file_data/string_file_data')
|
||||
const TextOperation = ot.TextOperation
|
||||
|
||||
describe('StringFileData', function () {
|
||||
it('throws when it contains non BMP chars', function () {
|
||||
const content = '𝌆𝌆𝌆'
|
||||
const fileData = new StringFileData(content)
|
||||
const operation = new TextOperation()
|
||||
operation.insert('aa')
|
||||
expect(() => {
|
||||
fileData.edit(operation)
|
||||
}).to.throw(TextOperation.ApplyError, /string contains non BMP characters/)
|
||||
})
|
||||
|
||||
it('validates string length when edited', function () {
|
||||
const longString = _.repeat('a', TextOperation.MAX_STRING_LENGTH)
|
||||
const fileData = new StringFileData(longString)
|
||||
expect(fileData.getByteLength()).to.equal(longString.length)
|
||||
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||
|
||||
expect(() => {
|
||||
fileData.edit(new TextOperation().retain(longString.length).insert('x'))
|
||||
}).to.throw(TextOperation.TooLongError)
|
||||
expect(fileData.getByteLength()).to.equal(longString.length)
|
||||
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||
|
||||
fileData.edit(new TextOperation().retain(longString.length - 1).remove(1))
|
||||
expect(fileData.getByteLength()).to.equal(longString.length - 1)
|
||||
expect(fileData.getStringLength()).to.equal(longString.length - 1)
|
||||
})
|
||||
|
||||
it('getComments() should return an empty array', function () {
|
||||
const fileData = new StringFileData('test')
|
||||
expect(fileData.getComments().toRaw()).to.eql([])
|
||||
})
|
||||
|
||||
it('creates StringFileData with comments', function () {
|
||||
const fileData = new StringFileData('test', [
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [
|
||||
{
|
||||
pos: 5,
|
||||
length: 10,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [
|
||||
{
|
||||
pos: 20,
|
||||
length: 5,
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
|
||||
expect(fileData.getComments().toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 20, length: 5 }] },
|
||||
])
|
||||
})
|
||||
|
||||
it('fromRaw() should create StringFileData with comments', function () {
|
||||
const fileData = StringFileData.fromRaw({
|
||||
content: 'test',
|
||||
comments: [
|
||||
{
|
||||
id: 'comm1',
|
||||
ranges: [
|
||||
{
|
||||
pos: 5,
|
||||
length: 10,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'comm2',
|
||||
ranges: [
|
||||
{
|
||||
pos: 20,
|
||||
length: 5,
|
||||
},
|
||||
],
|
||||
resolved: true,
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
expect(fileData.getComments().toRaw()).to.eql([
|
||||
{ id: 'comm1', ranges: [{ pos: 5, length: 10 }] },
|
||||
{ id: 'comm2', ranges: [{ pos: 20, length: 5 }], resolved: true },
|
||||
])
|
||||
})
|
||||
|
||||
it('getContent should filter out tracked deletions when passed option', function () {
|
||||
const fileData = new StringFileData(
|
||||
'the quick brown fox jumps over the lazy dog',
|
||||
undefined,
|
||||
[
|
||||
{
|
||||
range: { pos: 4, length: 6 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 35, length: 5 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
userId: 'user2',
|
||||
},
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
expect(fileData.getContent()).to.equal(
|
||||
'the quick brown fox jumps over the lazy dog'
|
||||
)
|
||||
expect(fileData.getContent({ filterTrackedDeletes: true })).to.equal(
|
||||
'the brown fox jumps over the dog'
|
||||
)
|
||||
})
|
||||
|
||||
it('getContent should keep tracked insertions when passed option to remove tracked changes', function () {
|
||||
const fileData = new StringFileData(
|
||||
'the quick brown fox jumps over the lazy dog',
|
||||
undefined,
|
||||
[
|
||||
{
|
||||
range: { pos: 4, length: 6 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 35, length: 5 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
userId: 'user2',
|
||||
},
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
expect(fileData.getContent()).to.equal(
|
||||
'the quick brown fox jumps over the lazy dog'
|
||||
)
|
||||
expect(fileData.getContent({ filterTrackedDeletes: true })).to.equal(
|
||||
'the quick brown fox jumps over the dog'
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,30 @@
|
||||
/**
|
||||
* @import { Blob } from "../.."
|
||||
*/
|
||||
|
||||
/**
|
||||
* Fake blob store for tests
|
||||
*/
|
||||
class FakeBlobStore {
|
||||
/**
|
||||
* Get a string from the blob store
|
||||
*
|
||||
* @param {string} hash
|
||||
* @return {Promise<string>}
|
||||
*/
|
||||
getString(hash) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a string in the blob store
|
||||
*
|
||||
* @param {string} content
|
||||
* @return {Promise<Blob>}
|
||||
*/
|
||||
putString(content) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FakeBlobStore
|
||||
66
libraries/overleaf-editor-core/test/support/random.js
Normal file
66
libraries/overleaf-editor-core/test/support/random.js
Normal file
@@ -0,0 +1,66 @@
|
||||
//
|
||||
// Randomised testing helpers from OT.js:
|
||||
// https://github.com/Operational-Transformation/ot.js/blob/
|
||||
// 8873b7e28e83f9adbf6c3a28ec639c9151a838ae/test/helpers.js
|
||||
//
|
||||
'use strict'
|
||||
|
||||
function randomInt(n) {
|
||||
return Math.floor(Math.random() * n)
|
||||
}
|
||||
|
||||
function randomString(n, newLine = true) {
|
||||
let str = ''
|
||||
while (n--) {
|
||||
if (newLine && Math.random() < 0.15) {
|
||||
str += '\n'
|
||||
} else {
|
||||
const chr = randomInt(26) + 97
|
||||
str += String.fromCharCode(chr)
|
||||
}
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
function randomElement(arr) {
|
||||
return arr[randomInt(arr.length)]
|
||||
}
|
||||
|
||||
function randomTest(numTrials, test) {
|
||||
return function () {
|
||||
while (numTrials--) test()
|
||||
}
|
||||
}
|
||||
|
||||
function randomSubset(arr) {
|
||||
const n = randomInt(arr.length)
|
||||
const subset = []
|
||||
const indices = []
|
||||
for (let i = 0; i < arr.length; i++) indices.push(i)
|
||||
for (let i = 0; i < n; i++) {
|
||||
const index = randomInt(indices.length)
|
||||
subset.push(arr[indices[index]])
|
||||
indices.splice(index, 1)
|
||||
}
|
||||
return subset
|
||||
}
|
||||
|
||||
function randomComments(number) {
|
||||
const ids = new Set()
|
||||
const comments = []
|
||||
while (comments.length < number) {
|
||||
const id = randomString(10, false)
|
||||
if (!ids.has(id)) {
|
||||
comments.push({ id, ranges: [], resolved: false })
|
||||
ids.add(id)
|
||||
}
|
||||
}
|
||||
return { ids: Array.from(ids), comments }
|
||||
}
|
||||
|
||||
exports.int = randomInt
|
||||
exports.string = randomString
|
||||
exports.element = randomElement
|
||||
exports.test = randomTest
|
||||
exports.comments = randomComments
|
||||
exports.subset = randomSubset
|
||||
@@ -0,0 +1,57 @@
|
||||
const TrackingProps = require('../../lib/file_data/tracking_props')
|
||||
const ClearTrackingProps = require('../../lib/file_data/clear_tracking_props')
|
||||
const TextOperation = require('../../lib/operation/text_operation')
|
||||
const random = require('./random')
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {string[]} [commentIds]
|
||||
* @returns {TextOperation}
|
||||
*/
|
||||
function randomTextOperation(str, commentIds) {
|
||||
const operation = new TextOperation()
|
||||
let left
|
||||
while (true) {
|
||||
left = str.length - operation.baseLength
|
||||
if (left === 0) break
|
||||
const r = Math.random()
|
||||
const l = 1 + random.int(Math.min(left - 1, 20))
|
||||
const trackedChange =
|
||||
Math.random() < 0.1
|
||||
? new TrackingProps(
|
||||
random.element(['insert', 'delete']),
|
||||
random.element(['user1', 'user2', 'user3']),
|
||||
new Date(
|
||||
random.element([
|
||||
'2024-01-01T00:00:00.000Z',
|
||||
'2023-01-01T00:00:00.000Z',
|
||||
'2022-01-01T00:00:00.000Z',
|
||||
])
|
||||
)
|
||||
)
|
||||
: undefined
|
||||
if (r < 0.2) {
|
||||
let operationCommentIds
|
||||
if (commentIds?.length > 0 && Math.random() < 0.3) {
|
||||
operationCommentIds = random.subset(commentIds)
|
||||
}
|
||||
operation.insert(random.string(l), {
|
||||
tracking: trackedChange,
|
||||
commentIds: operationCommentIds,
|
||||
})
|
||||
} else if (r < 0.4) {
|
||||
operation.remove(l)
|
||||
} else if (r < 0.5) {
|
||||
operation.retain(l, { tracking: new ClearTrackingProps() })
|
||||
} else {
|
||||
operation.retain(l, { tracking: trackedChange })
|
||||
}
|
||||
}
|
||||
if (Math.random() < 0.3) {
|
||||
operation.insert(1 + random.string(10))
|
||||
}
|
||||
return operation
|
||||
}
|
||||
|
||||
module.exports = randomTextOperation
|
||||
879
libraries/overleaf-editor-core/test/text_operation.test.js
Normal file
879
libraries/overleaf-editor-core/test/text_operation.test.js
Normal file
@@ -0,0 +1,879 @@
|
||||
// @ts-check
|
||||
//
|
||||
// These tests are based on the OT.js tests:
|
||||
// https://github.com/Operational-Transformation/ot.js/blob/
|
||||
// 8873b7e28e83f9adbf6c3a28ec639c9151a838ae/test/lib/test-text-operation.js
|
||||
//
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const random = require('./support/random')
|
||||
const randomOperation = require('./support/random_text_operation')
|
||||
|
||||
const ot = require('..')
|
||||
const TextOperation = ot.TextOperation
|
||||
const StringFileData = require('../lib/file_data/string_file_data')
|
||||
const { RetainOp, InsertOp, RemoveOp } = require('../lib/operation/scan_op')
|
||||
const TrackingProps = require('../lib/file_data/tracking_props')
|
||||
const ClearTrackingProps = require('../lib/file_data/clear_tracking_props')
|
||||
|
||||
describe('TextOperation', function () {
|
||||
const numTrials = 500
|
||||
|
||||
it('tracks base and target lengths', function () {
|
||||
const o = new TextOperation()
|
||||
expect(o.baseLength).to.equal(0)
|
||||
expect(o.targetLength).to.equal(0)
|
||||
o.retain(5)
|
||||
expect(o.baseLength).to.equal(5)
|
||||
expect(o.targetLength).to.equal(5)
|
||||
o.insert('abc')
|
||||
expect(o.baseLength).to.equal(5)
|
||||
expect(o.targetLength).to.equal(8)
|
||||
o.retain(2)
|
||||
expect(o.baseLength).to.equal(7)
|
||||
expect(o.targetLength).to.equal(10)
|
||||
o.remove(2)
|
||||
expect(o.baseLength).to.equal(9)
|
||||
expect(o.targetLength).to.equal(10)
|
||||
})
|
||||
|
||||
it('supports chaining', function () {
|
||||
const o = new TextOperation()
|
||||
.retain(5)
|
||||
.retain(0)
|
||||
.insert('lorem')
|
||||
.insert('')
|
||||
.remove('abc')
|
||||
.remove(3)
|
||||
.remove(0)
|
||||
.remove('')
|
||||
expect(o.ops.length).to.equal(3)
|
||||
})
|
||||
|
||||
it('ignores empty operations', function () {
|
||||
const o = new TextOperation()
|
||||
o.retain(0)
|
||||
o.insert('')
|
||||
o.remove('')
|
||||
expect(o.ops.length).to.equal(0)
|
||||
})
|
||||
|
||||
it('checks for equality', function () {
|
||||
const op1 = new TextOperation().remove(1).insert('lo').retain(2).retain(3)
|
||||
const op2 = new TextOperation().remove(-1).insert('l').insert('o').retain(5)
|
||||
expect(op1.equals(op2)).to.be.true
|
||||
op1.remove(1)
|
||||
op2.retain(1)
|
||||
expect(op1.equals(op2)).to.be.false
|
||||
})
|
||||
|
||||
it('merges ops', function () {
|
||||
function last(arr) {
|
||||
return arr[arr.length - 1]
|
||||
}
|
||||
const o = new TextOperation()
|
||||
expect(o.ops.length).to.equal(0)
|
||||
o.retain(2)
|
||||
expect(o.ops.length).to.equal(1)
|
||||
expect(last(o.ops).equals(new RetainOp(2))).to.be.true
|
||||
o.retain(3)
|
||||
expect(o.ops.length).to.equal(1)
|
||||
expect(last(o.ops).equals(new RetainOp(5))).to.be.true
|
||||
o.insert('abc')
|
||||
expect(o.ops.length).to.equal(2)
|
||||
expect(last(o.ops).equals(new InsertOp('abc'))).to.be.true
|
||||
o.insert('xyz')
|
||||
expect(o.ops.length).to.equal(2)
|
||||
expect(last(o.ops).equals(new InsertOp('abcxyz'))).to.be.true
|
||||
o.remove('d')
|
||||
expect(o.ops.length).to.equal(3)
|
||||
expect(last(o.ops).equals(new RemoveOp(1))).to.be.true
|
||||
o.remove('d')
|
||||
expect(o.ops.length).to.equal(3)
|
||||
expect(last(o.ops).equals(new RemoveOp(2))).to.be.true
|
||||
})
|
||||
|
||||
it('checks for no-ops', function () {
|
||||
const o = new TextOperation()
|
||||
expect(o.isNoop()).to.be.true
|
||||
o.retain(5)
|
||||
expect(o.isNoop()).to.be.true
|
||||
o.retain(3)
|
||||
expect(o.isNoop()).to.be.true
|
||||
o.insert('lorem')
|
||||
expect(o.isNoop()).to.be.false
|
||||
})
|
||||
|
||||
it('converts to string', function () {
|
||||
const o = new TextOperation()
|
||||
o.retain(2)
|
||||
o.insert('lorem')
|
||||
o.remove('ipsum')
|
||||
o.retain(5)
|
||||
expect(o.toString()).to.equal(
|
||||
"retain 2, insert 'lorem', remove 5, retain 5"
|
||||
)
|
||||
})
|
||||
|
||||
it('converts from JSON', function () {
|
||||
const ops = [2, -1, -1, 'cde']
|
||||
const o = TextOperation.fromJSON({ textOperation: ops })
|
||||
expect(o.ops.length).to.equal(3)
|
||||
expect(o.baseLength).to.equal(4)
|
||||
expect(o.targetLength).to.equal(5)
|
||||
|
||||
function assertIncorrectAfter(fn) {
|
||||
const ops2 = ops.slice(0)
|
||||
fn(ops2)
|
||||
expect(() => {
|
||||
TextOperation.fromJSON({ textOperation: ops2 })
|
||||
}).to.throw
|
||||
}
|
||||
|
||||
assertIncorrectAfter(ops2 => {
|
||||
ops2.push({ insert: 'x' })
|
||||
})
|
||||
assertIncorrectAfter(ops2 => {
|
||||
ops2.push(null)
|
||||
})
|
||||
})
|
||||
|
||||
it(
|
||||
'applies (randomised)',
|
||||
random.test(numTrials, () => {
|
||||
const str = random.string(50)
|
||||
const comments = random.comments(6)
|
||||
const o = randomOperation(str, comments.ids)
|
||||
expect(str.length).to.equal(o.baseLength)
|
||||
const file = new StringFileData(str, comments.comments)
|
||||
o.apply(file)
|
||||
const result = file.getContent()
|
||||
expect(result.length).to.equal(o.targetLength)
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'converts to/from JSON (randomised)',
|
||||
random.test(numTrials, () => {
|
||||
const doc = random.string(50)
|
||||
const comments = random.comments(2)
|
||||
const operation = randomOperation(doc, comments.ids)
|
||||
const roundTripOperation = TextOperation.fromJSON(operation.toJSON())
|
||||
expect(operation.equals(roundTripOperation)).to.be.true
|
||||
})
|
||||
)
|
||||
|
||||
it('throws when invalid operations are applied', function () {
|
||||
const operation = new TextOperation().retain(1)
|
||||
expect(() => {
|
||||
operation.apply(new StringFileData(''))
|
||||
}).to.throw(TextOperation.ApplyError)
|
||||
expect(() => {
|
||||
operation.apply(new StringFileData(' '))
|
||||
}).not.to.throw
|
||||
})
|
||||
|
||||
it('throws when insert text contains non BMP chars', function () {
|
||||
const operation = new TextOperation()
|
||||
const str = '𝌆\n'
|
||||
expect(() => {
|
||||
operation.insert(str)
|
||||
}).to.throw(
|
||||
TextOperation.UnprocessableError,
|
||||
/inserted text contains non BMP characters/
|
||||
)
|
||||
})
|
||||
|
||||
it('throws when base string contains non BMP chars', function () {
|
||||
const operation = new TextOperation()
|
||||
const str = '𝌆\n'
|
||||
expect(() => {
|
||||
operation.apply(new StringFileData(str))
|
||||
}).to.throw(
|
||||
TextOperation.UnprocessableError,
|
||||
/string contains non BMP characters/
|
||||
)
|
||||
})
|
||||
|
||||
it('throws at from JSON when it contains non BMP chars', function () {
|
||||
const operation = ['𝌆\n']
|
||||
expect(() => {
|
||||
TextOperation.fromJSON({ textOperation: operation })
|
||||
}).to.throw(
|
||||
TextOperation.UnprocessableError,
|
||||
/inserted text contains non BMP characters/
|
||||
)
|
||||
})
|
||||
|
||||
describe('invert', function () {
|
||||
it(
|
||||
'inverts (randomised)',
|
||||
random.test(numTrials, () => {
|
||||
const str = random.string(50)
|
||||
const comments = random.comments(6)
|
||||
const o = randomOperation(str, comments.ids)
|
||||
const originalFile = new StringFileData(str, comments.comments)
|
||||
const p = o.invert(originalFile)
|
||||
expect(o.baseLength).to.equal(p.targetLength)
|
||||
expect(o.targetLength).to.equal(p.baseLength)
|
||||
const file = new StringFileData(str, comments.comments)
|
||||
o.apply(file)
|
||||
p.apply(file)
|
||||
const result = file.toRaw()
|
||||
expect(result).to.deep.equal(originalFile.toRaw())
|
||||
})
|
||||
)
|
||||
|
||||
it('re-inserts removed range and comment when inverting', function () {
|
||||
expectInverseToLeadToInitialState(
|
||||
new StringFileData(
|
||||
'foo bar baz',
|
||||
[{ id: 'comment1', ranges: [{ pos: 4, length: 3 }] }],
|
||||
[
|
||||
{
|
||||
range: { pos: 4, length: 3 },
|
||||
tracking: {
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
]
|
||||
),
|
||||
new TextOperation().retain(4).remove(4).retain(3)
|
||||
)
|
||||
})
|
||||
|
||||
it('deletes inserted range and comment when inverting', function () {
|
||||
expectInverseToLeadToInitialState(
|
||||
new StringFileData('foo baz', [
|
||||
{ id: 'comment1', ranges: [], resolved: false },
|
||||
]),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.insert('bar', {
|
||||
commentIds: ['comment1'],
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.insert(' ')
|
||||
.retain(3)
|
||||
)
|
||||
})
|
||||
|
||||
it('removes a tracked delete', function () {
|
||||
expectInverseToLeadToInitialState(
|
||||
new StringFileData('foo bar baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(3)
|
||||
)
|
||||
})
|
||||
|
||||
it('restores comments that were removed', function () {
|
||||
expectInverseToLeadToInitialState(
|
||||
new StringFileData('foo bar baz', [
|
||||
{
|
||||
id: 'comment1',
|
||||
ranges: [{ pos: 4, length: 3 }],
|
||||
resolved: false,
|
||||
},
|
||||
]),
|
||||
new TextOperation().retain(4).remove(4).retain(3)
|
||||
)
|
||||
})
|
||||
|
||||
it('re-inserting removed part of comment restores original comment range', function () {
|
||||
expectInverseToLeadToInitialState(
|
||||
new StringFileData('foo bar baz', [
|
||||
{
|
||||
id: 'comment1',
|
||||
ranges: [{ pos: 0, length: 11 }],
|
||||
resolved: false,
|
||||
},
|
||||
]),
|
||||
new TextOperation().retain(4).remove(4).retain(3)
|
||||
)
|
||||
})
|
||||
|
||||
it('re-inserting removed part of tracked change restores tracked change range', function () {
|
||||
expectInverseToLeadToInitialState(
|
||||
new StringFileData('foo bar baz', undefined, [
|
||||
{
|
||||
range: { pos: 0, length: 11 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
]),
|
||||
new TextOperation().retain(4).remove(4).retain(3)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('compose', function () {
|
||||
it(
|
||||
'composes (randomised)',
|
||||
random.test(numTrials, () => {
|
||||
// invariant: apply(str, compose(a, b)) === apply(apply(str, a), b)
|
||||
const str = random.string(20)
|
||||
const comments = random.comments(6)
|
||||
const a = randomOperation(str, comments.ids)
|
||||
const file = new StringFileData(str, comments.comments)
|
||||
a.apply(file)
|
||||
const afterA = file.toRaw()
|
||||
expect(afterA.content.length).to.equal(a.targetLength)
|
||||
const b = randomOperation(afterA.content, comments.ids)
|
||||
b.apply(file)
|
||||
const afterB = file.toRaw()
|
||||
expect(afterB.content.length).to.equal(b.targetLength)
|
||||
const ab = a.compose(b)
|
||||
expect(ab.targetLength).to.equal(b.targetLength)
|
||||
ab.apply(new StringFileData(str, comments.comments))
|
||||
const afterAB = file.toRaw()
|
||||
expect(afterAB).to.deep.equal(afterB)
|
||||
})
|
||||
)
|
||||
|
||||
it('composes two operations with comments', function () {
|
||||
expect(
|
||||
compose(
|
||||
new StringFileData('foo baz', [
|
||||
{ id: 'comment1', ranges: [], resolved: false },
|
||||
]),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.insert('bar', {
|
||||
commentIds: ['comment1'],
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.insert(' ')
|
||||
.retain(3),
|
||||
new TextOperation().retain(4).remove(4).retain(3)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo baz',
|
||||
comments: [{ id: 'comment1', ranges: [] }],
|
||||
})
|
||||
})
|
||||
|
||||
it('prioritizes tracked changes info from the latter operation', function () {
|
||||
expect(
|
||||
compose(
|
||||
new StringFileData('foo bar baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(3),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user2',
|
||||
}),
|
||||
})
|
||||
.retain(3)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo bar baz',
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 4, length: 4 },
|
||||
tracking: {
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user2',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('does not remove tracked change if not overriden by operation 2', function () {
|
||||
expect(
|
||||
compose(
|
||||
new StringFileData('foo bar baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(3),
|
||||
new TextOperation().retain(11)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo bar baz',
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 4, length: 4 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('adds comment ranges from both operations', function () {
|
||||
expect(
|
||||
compose(
|
||||
new StringFileData('foo bar baz', [
|
||||
{
|
||||
id: 'comment1',
|
||||
ranges: [{ pos: 4, length: 3 }],
|
||||
resolved: false,
|
||||
},
|
||||
{
|
||||
id: 'comment2',
|
||||
ranges: [{ pos: 8, length: 3 }],
|
||||
resolved: false,
|
||||
},
|
||||
]),
|
||||
new TextOperation()
|
||||
.retain(5)
|
||||
.insert('aa', {
|
||||
commentIds: ['comment1'],
|
||||
})
|
||||
.retain(6),
|
||||
new TextOperation()
|
||||
.retain(11)
|
||||
.insert('bb', { commentIds: ['comment2'] })
|
||||
.retain(2)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo baaar bbbaz',
|
||||
comments: [
|
||||
{ id: 'comment1', ranges: [{ pos: 4, length: 5 }] },
|
||||
{ id: 'comment2', ranges: [{ pos: 10, length: 5 }] },
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('it removes the tracking range from a tracked delete if operation 2 resolves it', function () {
|
||||
expect(
|
||||
compose(
|
||||
new StringFileData('foo bar baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(3),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: new ClearTrackingProps(),
|
||||
})
|
||||
.retain(3)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo bar baz',
|
||||
})
|
||||
})
|
||||
|
||||
it('it removes the tracking from an insert if operation 2 resolves it', function () {
|
||||
expect(
|
||||
compose(
|
||||
new StringFileData('foo bar baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.insert('quux ', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(7),
|
||||
new TextOperation()
|
||||
.retain(6)
|
||||
.retain(5, {
|
||||
tracking: new ClearTrackingProps(),
|
||||
})
|
||||
.retain(5)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo quux bar baz',
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 4, length: 2 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('transform', function () {
|
||||
it(
|
||||
'transforms (randomised)',
|
||||
random.test(numTrials, () => {
|
||||
// invariant: compose(a, b') = compose(b, a')
|
||||
// where (a', b') = transform(a, b)
|
||||
const str = random.string(20)
|
||||
const comments = random.comments(6)
|
||||
const a = randomOperation(str, comments.ids)
|
||||
const b = randomOperation(str, comments.ids)
|
||||
const primes = TextOperation.transform(a, b)
|
||||
const aPrime = primes[0]
|
||||
const bPrime = primes[1]
|
||||
const abPrime = a.compose(bPrime)
|
||||
const baPrime = b.compose(aPrime)
|
||||
const abFile = new StringFileData(str, comments.comments)
|
||||
const baFile = new StringFileData(str, comments.comments)
|
||||
abPrime.apply(abFile)
|
||||
baPrime.apply(baFile)
|
||||
expect(abPrime.equals(baPrime)).to.be.true
|
||||
expect(abFile.toRaw()).to.deep.equal(baFile.toRaw())
|
||||
})
|
||||
)
|
||||
|
||||
it('adds a tracked change from operation 1', function () {
|
||||
expect(
|
||||
transform(
|
||||
new StringFileData('foo baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.insert('bar', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.insert(' ')
|
||||
.retain(3),
|
||||
new TextOperation().retain(7).insert(' qux')
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo bar baz qux',
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 4, length: 3 },
|
||||
tracking: {
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('prioritizes tracked change from the first operation', function () {
|
||||
expect(
|
||||
transform(
|
||||
new StringFileData('foo bar baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(3),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user2',
|
||||
}),
|
||||
})
|
||||
.retain(3)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo bar baz',
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 4, length: 4 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('splits a tracked change in two to resolve conflicts', function () {
|
||||
expect(
|
||||
transform(
|
||||
new StringFileData('foo bar baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(4, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(3),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.retain(5, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user2',
|
||||
}),
|
||||
})
|
||||
.retain(2)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo bar baz',
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 4, length: 4 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 8, length: 1 },
|
||||
tracking: {
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'delete',
|
||||
userId: 'user2',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('inserts a tracked change from operation 2 after a tracked change from operation 1', function () {
|
||||
expect(
|
||||
transform(
|
||||
new StringFileData('aaabbbccc'),
|
||||
new TextOperation()
|
||||
.retain(3)
|
||||
.insert('xxx', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(6),
|
||||
new TextOperation()
|
||||
.retain(3)
|
||||
.insert('yyy', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
}),
|
||||
})
|
||||
.retain(6)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'aaaxxxyyybbbccc',
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 3, length: 3 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 6, length: 3 },
|
||||
tracking: {
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('preserves a comment even if it is completely removed in one operation', function () {
|
||||
expect(
|
||||
transform(
|
||||
new StringFileData('foo bar baz', [
|
||||
{
|
||||
id: 'comment1',
|
||||
ranges: [{ pos: 4, length: 3 }],
|
||||
resolved: false,
|
||||
},
|
||||
]),
|
||||
new TextOperation().retain(4).remove(4).retain(3),
|
||||
new TextOperation()
|
||||
.retain(7)
|
||||
.insert('qux ', {
|
||||
commentIds: ['comment1'],
|
||||
})
|
||||
.retain(4)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo qux baz',
|
||||
comments: [{ id: 'comment1', ranges: [{ pos: 4, length: 4 }] }],
|
||||
})
|
||||
})
|
||||
|
||||
it('extends a comment to both ranges if both operations add text in it', function () {
|
||||
expect(
|
||||
transform(
|
||||
new StringFileData('foo bar baz', [
|
||||
{
|
||||
id: 'comment1',
|
||||
ranges: [{ pos: 4, length: 3 }],
|
||||
resolved: false,
|
||||
},
|
||||
]),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.insert('qux ', {
|
||||
commentIds: ['comment1'],
|
||||
})
|
||||
.retain(7),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.insert('corge ', { commentIds: ['comment1'] })
|
||||
.retain(7)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo qux corge bar baz',
|
||||
comments: [{ id: 'comment1', ranges: [{ pos: 4, length: 13 }] }],
|
||||
})
|
||||
})
|
||||
|
||||
it('adds a tracked change from both operations at different places', function () {
|
||||
expect(
|
||||
transform(
|
||||
new StringFileData('foo bar baz'),
|
||||
new TextOperation()
|
||||
.retain(4)
|
||||
.insert('qux ', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
}),
|
||||
})
|
||||
.retain(7),
|
||||
new TextOperation()
|
||||
.retain(8)
|
||||
.insert('corge ', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
}),
|
||||
})
|
||||
.retain(3)
|
||||
)
|
||||
).to.deep.equal({
|
||||
content: 'foo qux bar corge baz',
|
||||
trackedChanges: [
|
||||
{
|
||||
range: { pos: 4, length: 4 },
|
||||
tracking: {
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 12, length: 6 },
|
||||
tracking: {
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function expectInverseToLeadToInitialState(fileData, operation) {
|
||||
const initialState = fileData
|
||||
const result = initialState.toRaw()
|
||||
const invertedOperation = operation.invert(initialState)
|
||||
operation.apply(initialState)
|
||||
invertedOperation.apply(initialState)
|
||||
const invertedResult = initialState.toRaw()
|
||||
expect(invertedResult).to.deep.equal(result)
|
||||
}
|
||||
|
||||
function compose(fileData, op1, op2) {
|
||||
const copy = StringFileData.fromRaw(fileData.toRaw())
|
||||
op1.apply(fileData)
|
||||
op2.apply(fileData)
|
||||
const result1 = fileData.toRaw()
|
||||
|
||||
const composed = op1.compose(op2)
|
||||
composed.apply(copy)
|
||||
const result2 = copy.toRaw()
|
||||
|
||||
expect(result1).to.deep.equal(result2)
|
||||
return fileData.toRaw()
|
||||
}
|
||||
|
||||
function transform(fileData, a, b) {
|
||||
const initialState = fileData
|
||||
const aFileData = StringFileData.fromRaw(initialState.toRaw())
|
||||
const bFileData = StringFileData.fromRaw(initialState.toRaw())
|
||||
|
||||
const [aPrime, bPrime] = TextOperation.transform(a, b)
|
||||
a.apply(aFileData)
|
||||
bPrime.apply(aFileData)
|
||||
b.apply(bFileData)
|
||||
aPrime.apply(bFileData)
|
||||
|
||||
const resultA = aFileData.toRaw()
|
||||
const resultB = bFileData.toRaw()
|
||||
expect(resultA).to.deep.equal(resultB)
|
||||
|
||||
return aFileData.toRaw()
|
||||
}
|
||||
55
libraries/overleaf-editor-core/test/tracked_change.test.js
Normal file
55
libraries/overleaf-editor-core/test/tracked_change.test.js
Normal file
@@ -0,0 +1,55 @@
|
||||
// @ts-check
|
||||
const TrackedChange = require('../lib/file_data/tracked_change')
|
||||
const Range = require('../lib/range')
|
||||
const TrackingProps = require('../lib/file_data/tracking_props')
|
||||
const { expect } = require('chai')
|
||||
|
||||
describe('TrackedChange', function () {
|
||||
it('should survive serialization', function () {
|
||||
const trackedChange = new TrackedChange(
|
||||
new Range(1, 2),
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
const newTrackedChange = TrackedChange.fromRaw(trackedChange.toRaw())
|
||||
expect(newTrackedChange).to.be.instanceOf(TrackedChange)
|
||||
expect(newTrackedChange).to.deep.equal(trackedChange)
|
||||
})
|
||||
|
||||
it('can be created from a raw object', function () {
|
||||
const trackedChange = TrackedChange.fromRaw({
|
||||
range: { pos: 1, length: 2 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
})
|
||||
expect(trackedChange).to.be.instanceOf(TrackedChange)
|
||||
expect(trackedChange).to.deep.equal(
|
||||
new TrackedChange(
|
||||
new Range(1, 2),
|
||||
new TrackingProps(
|
||||
'insert',
|
||||
'user1',
|
||||
new Date('2024-01-01T00:00:00.000Z')
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it('can be serialized to a raw object', function () {
|
||||
const change = new TrackedChange(
|
||||
new Range(1, 2),
|
||||
new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z'))
|
||||
)
|
||||
expect(change).to.be.instanceOf(TrackedChange)
|
||||
expect(change.toRaw()).to.deep.equal({
|
||||
range: { pos: 1, length: 2 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
869
libraries/overleaf-editor-core/test/tracked_change_list.test.js
Normal file
869
libraries/overleaf-editor-core/test/tracked_change_list.test.js
Normal file
@@ -0,0 +1,869 @@
|
||||
// @ts-check
|
||||
const TrackedChangeList = require('../lib/file_data/tracked_change_list')
|
||||
const TrackingProps = require('../lib/file_data/tracking_props')
|
||||
const ClearTrackingProps = require('../lib/file_data/clear_tracking_props')
|
||||
const { expect } = require('chai')
|
||||
/** @import { TrackedChangeRawData } from '../lib/types' */
|
||||
|
||||
describe('TrackedChangeList', function () {
|
||||
describe('applyInsert', function () {
|
||||
describe('with same author', function () {
|
||||
it('should merge consecutive tracked changes and use the latest timestamp', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(3, 'foo', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 6 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should extend tracked changes when inserting in the middle', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(5, 'foobar', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 16 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should merge two tracked changes starting at the same position', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(0, 'foo', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 6 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should not extend range when there is a gap between the ranges', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(4, 'foobar', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(2)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 4, length: 6 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should not merge tracked changes if there is a space between them', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 5, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(4, 'foo', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(2)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 4, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 8, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with different authors', function () {
|
||||
it('should not merge consecutive tracked changes', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(3, 'foo', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(2)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 3, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should not merge tracked changes at same position', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(0, 'foo', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(2)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 3, length: 3 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert tracked changes in the middle of a tracked range', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(5, 'foobar', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(3)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 5, length: 6 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 11, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert tracked changes at the end of a tracked range', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(5, 'foobar', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(2)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 5, length: 6 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should split a track range when inserting at last contained cursor', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(4, 'foobar', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(3)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 4 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 4, length: 6 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 10, length: 1 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert a new range if inserted just before the first cursor of a tracked range', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 5, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyInsert(5, 'foobar', {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(2)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 5, length: 6 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 11, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('applyDelete', function () {
|
||||
it('should shrink tracked changes', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyDelete(5, 2)
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 8 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should delete tracked changes when the whole range is deleted', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyDelete(0, 10)
|
||||
expect(trackedChanges.length).to.equal(0)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([])
|
||||
})
|
||||
|
||||
it('should delete tracked changes when more than the whole range is deleted', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 5, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyDelete(0, 25)
|
||||
expect(trackedChanges.length).to.equal(0)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([])
|
||||
})
|
||||
|
||||
it('should shrink the tracked change from start with overlap', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyDelete(1, 9)
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 1 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should shrink the tracked change from end with overlap', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyDelete(0, 9)
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 1 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('fromRaw & toRaw', function () {
|
||||
it('should survive serialization', function () {
|
||||
/** @type {TrackedChangeRawData[]} */
|
||||
const initialRaw = [
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
const trackedChanges = TrackedChangeList.fromRaw(initialRaw)
|
||||
const raw = trackedChanges.toRaw()
|
||||
const newTrackedChanges = TrackedChangeList.fromRaw(raw)
|
||||
|
||||
expect(newTrackedChanges).to.deep.equal(trackedChanges)
|
||||
expect(raw).to.deep.equal(initialRaw)
|
||||
})
|
||||
})
|
||||
|
||||
describe('applyRetain', function () {
|
||||
it('should add tracking information to an untracked range', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([])
|
||||
trackedChanges.applyRetain(0, 10, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should shrink a tracked range to make room for retained operation', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 3, length: 7 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(0, 5, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(2)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 5, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should break up a tracked range to make room for retained operation', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(5, 1, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(3)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 5 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 5, length: 1 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 6, length: 4 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should update the timestamp of a tracked range', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(1, 12, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 13 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should leave ignore a retain operation with no tracking info', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(0, 10)
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should leave not break up a tracked change for a retain with no tracking info', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(4, 1)
|
||||
expect(trackedChanges.length).to.equal(1)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should delete a tracked change which is being resolved', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(0, 10, {
|
||||
tracking: new ClearTrackingProps(),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(0)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([])
|
||||
})
|
||||
|
||||
it('should delete a tracked change which is being resolved by other user', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'insert',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(0, 10, {
|
||||
tracking: new ClearTrackingProps(),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(0)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([])
|
||||
})
|
||||
|
||||
it('should delete a tracked change which is being rejected', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(0, 10, {
|
||||
tracking: new ClearTrackingProps(),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(0)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([])
|
||||
})
|
||||
|
||||
it('should delete a tracked change which is being rejected by other user', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 0, length: 10 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(0, 10, {
|
||||
tracking: new ClearTrackingProps(),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(0)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([])
|
||||
})
|
||||
|
||||
it('should append a new tracked change when retaining a range from another user with tracking info', function () {
|
||||
const trackedChanges = TrackedChangeList.fromRaw([
|
||||
{
|
||||
range: { pos: 4, length: 4 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
trackedChanges.applyRetain(8, 1, {
|
||||
tracking: TrackingProps.fromRaw({
|
||||
type: 'delete',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
}),
|
||||
})
|
||||
expect(trackedChanges.length).to.equal(2)
|
||||
expect(trackedChanges.toRaw()).to.deep.equal([
|
||||
{
|
||||
range: { pos: 4, length: 4 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
userId: 'user1',
|
||||
ts: '2023-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: { pos: 8, length: 1 },
|
||||
tracking: {
|
||||
type: 'delete',
|
||||
userId: 'user2',
|
||||
ts: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
7
libraries/overleaf-editor-core/tsconfig.json
Normal file
7
libraries/overleaf-editor-core/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"extends": "../../tsconfig.backend.json",
|
||||
"include": [
|
||||
"**/*.js",
|
||||
"**/*.cjs"
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user