first commit

This commit is contained in:
2025-04-24 13:11:28 +08:00
commit ff9c54d5e4
5960 changed files with 834111 additions and 0 deletions

View File

@@ -0,0 +1,72 @@
'use strict'
const assert = require('check-types').assert
/**
* An author of a {@link Change}. We want to store user IDs, and then fill in
* the other properties (which the user can change over time) when changes are
* loaded.
*
* At present, we're assuming that all authors have a user ID; we may need to
* generalise this to cover users for whom we only have a name and email, e.g.
* from git. For now, though, this seems to do what we need.
*/
class Author {
/**
* @param {number} id
* @param {string} email
* @param {string} name
*/
constructor(id, email, name) {
assert.number(id, 'bad id')
assert.string(email, 'bad email')
assert.string(name, 'bad name')
this.id = id
this.email = email
this.name = name
}
/**
* Create an Author from its raw form.
*
* @param {Object} [raw]
* @return {Author | null}
*/
static fromRaw(raw) {
if (!raw) return null
return new Author(raw.id, raw.email, raw.name)
}
/**
* Convert the Author to raw form for storage or transmission.
*
* @return {Object}
*/
toRaw() {
return { id: this.id, email: this.email, name: this.name }
}
/**
* @return {number}
*/
getId() {
return this.id
}
/**
* @return {string}
*/
getEmail() {
return this.email
}
/**
* @return {string}
*/
getName() {
return this.name
}
}
module.exports = Author

View File

@@ -0,0 +1,45 @@
/** @module */
'use strict'
const _ = require('lodash')
const check = require('check-types')
const Author = require('./author')
/**
* Check that every member of the list is a number or every member is
* an Author value, disregarding null or undefined values.
*
* @param {Array.<number|Author>} authors author list
* @param {string} msg
*/
function assertV1(authors, msg) {
const authors_ = authors.filter(function (a) {
return a !== null && a !== undefined
})
if (authors_.length > 0) {
const checker = check.integer(authors_[0])
? check.assert.integer
: _.partial(check.assert.instance, _, Author)
_.each(authors_, function (author) {
checker(author, msg)
})
}
}
/**
* Check that every member of the list is a v2 author ID, disregarding
* null or undefined values.
*
* @param {Array.<string>} authors author list
* @param {string} msg
*/
function assertV2(authors, msg) {
_.each(authors, function (author) {
check.assert.maybe.match(author, /^[0-9a-f]{24}$/, msg)
})
}
module.exports = { assertV1, assertV2 }

View File

@@ -0,0 +1,109 @@
'use strict'
const assert = require('check-types').assert
const OError = require('@overleaf/o-error')
const TextOperation = require('./operation/text_operation')
class NotFoundError extends OError {
constructor(hash) {
super(`blob ${hash} not found`, { hash })
this.hash = hash
}
}
/**
* Metadata record for the content of a file.
*/
class Blob {
static HEX_HASH_RX_STRING = '^[0-9a-f]{40,40}$'
static HEX_HASH_RX = new RegExp(Blob.HEX_HASH_RX_STRING)
/**
* Size of the largest file that we'll read to determine whether we can edit it
* or not, in bytes. The final decision on whether a file is editable or not is
* based on the number of characters it contains, but we need to read the file
* in to determine that; so it is useful to have an upper bound on the byte
* length of a file that might be editable.
*
* The reason for the factor of 3 is as follows. We cannot currently edit files
* that contain characters outside of the basic multilingual plane, so we're
* limited to characters that can be represented in a single, two-byte UCS-2
* code unit. Encoding the largest such value, 0xFFFF (which is not actually
* a valid character), takes three bytes in UTF-8: 0xEF 0xBF 0xBF. A file
* composed entirely of three-byte UTF-8 codepoints is the worst case; in
* practice, this is a very conservative upper bound.
*
* @type {number}
*/
static MAX_EDITABLE_BYTE_LENGTH_BOUND = 3 * TextOperation.MAX_STRING_LENGTH
static NotFoundError = NotFoundError
/**
* @param {string} hash
* @param {number} byteLength
* @param {number} [stringLength]
*/
constructor(hash, byteLength, stringLength) {
this.setHash(hash)
this.setByteLength(byteLength)
this.setStringLength(stringLength)
}
static fromRaw(raw) {
if (raw) {
return new Blob(raw.hash, raw.byteLength, raw.stringLength)
}
return null
}
toRaw() {
return {
hash: this.hash,
byteLength: this.byteLength,
stringLength: this.stringLength,
}
}
/**
* Hex hash.
* @return {String}
*/
getHash() {
return this.hash
}
setHash(hash) {
assert.match(hash, Blob.HEX_HASH_RX, 'bad hash')
this.hash = hash
}
/**
* Length of the blob in bytes.
* @return {number}
*/
getByteLength() {
return this.byteLength
}
setByteLength(byteLength) {
assert.integer(byteLength, 'bad byteLength')
this.byteLength = byteLength
}
/**
* Utf-8 length of the blob content, if it appears to be valid UTF-8.
* @return {number|undefined}
*/
getStringLength() {
return this.stringLength
}
setStringLength(stringLength) {
assert.maybe.integer(stringLength, 'bad stringLength')
this.stringLength = stringLength
}
}
module.exports = Blob

View File

@@ -0,0 +1,352 @@
'use strict'
const _ = require('lodash')
const assert = require('check-types').assert
const pMap = require('p-map')
const AuthorList = require('./author_list')
const Operation = require('./operation')
const Origin = require('./origin')
const Snapshot = require('./snapshot')
const FileMap = require('./file_map')
const V2DocVersions = require('./v2_doc_versions')
/**
* @import Author from "./author"
* @import { BlobStore } from "./types"
*/
/**
* A Change is a list of {@link Operation}s applied atomically by given
* {@link Author}(s) at a given time.
*/
class Change {
static PROJECT_VERSION_RX_STRING = '^[0-9]+\\.[0-9]+$'
static PROJECT_VERSION_RX = new RegExp(Change.PROJECT_VERSION_RX_STRING)
/**
* @param {Array.<Operation>} operations
* @param {Date} timestamp
* @param {number[] | Author[]} [authors]
* @param {Origin} [origin]
* @param {string[]} [v2Authors]
* @param {string} [projectVersion]
* @param {V2DocVersions} [v2DocVersions]
*/
constructor(
operations,
timestamp,
authors,
origin,
v2Authors,
projectVersion,
v2DocVersions
) {
this.setOperations(operations)
this.setTimestamp(timestamp)
this.setAuthors(authors || [])
this.setOrigin(origin)
this.setV2Authors(v2Authors || [])
this.setProjectVersion(projectVersion)
this.setV2DocVersions(v2DocVersions)
}
/**
* For serialization.
*
* @return {Object}
*/
toRaw() {
function toRaw(object) {
return object.toRaw()
}
const raw = {
operations: this.operations.map(toRaw),
timestamp: this.timestamp.toISOString(),
authors: this.authors,
}
if (this.v2Authors) raw.v2Authors = this.v2Authors
if (this.origin) raw.origin = this.origin.toRaw()
if (this.projectVersion) raw.projectVersion = this.projectVersion
if (this.v2DocVersions) raw.v2DocVersions = this.v2DocVersions.toRaw()
return raw
}
static fromRaw(raw) {
if (!raw) return null
assert.array.of.object(raw.operations, 'bad raw.operations')
assert.nonEmptyString(raw.timestamp, 'bad raw.timestamp')
// Hack to clean up bad data where author id of some changes was 0, instead of
// null. The root cause of the bug is fixed in
// https://github.com/overleaf/write_latex/pull/3804 but the bad data persists
// on S3
let authors
if (raw.authors) {
authors = raw.authors.map(
// Null represents an anonymous author
author => (author === 0 ? null : author)
)
}
return new Change(
raw.operations.map(Operation.fromRaw),
new Date(raw.timestamp),
authors,
raw.origin && Origin.fromRaw(raw.origin),
raw.v2Authors,
raw.projectVersion,
raw.v2DocVersions && V2DocVersions.fromRaw(raw.v2DocVersions)
)
}
/**
* @return {Operation[]}
*/
getOperations() {
return this.operations
}
setOperations(operations) {
assert.array.of.object(operations, 'Change: bad operations')
this.operations = operations
}
getTimestamp() {
return this.timestamp
}
setTimestamp(timestamp) {
assert.date(timestamp, 'Change: bad timestamp')
this.timestamp = timestamp
}
/**
* @return {Array.<Author>} zero or more
*/
getAuthors() {
return this.authors
}
setAuthors(authors) {
assert.array(authors, 'Change: bad author ids array')
if (authors.length > 1) {
assert.maybe.emptyArray(
this.v2Authors,
'Change: cannot set v1 authors if v2 authors is set'
)
}
AuthorList.assertV1(authors, 'Change: bad author ids')
this.authors = authors
}
/**
* @return {Array.<Author>} zero or more
*/
getV2Authors() {
return this.v2Authors
}
setV2Authors(v2Authors) {
assert.array(v2Authors, 'Change: bad v2 author ids array')
if (v2Authors.length > 1) {
assert.maybe.emptyArray(
this.authors,
'Change: cannot set v2 authors if v1 authors is set'
)
}
AuthorList.assertV2(v2Authors, 'Change: not a v2 author id')
this.v2Authors = v2Authors
}
/**
* @return {Origin | null | undefined}
*/
getOrigin() {
return this.origin
}
setOrigin(origin) {
assert.maybe.instance(origin, Origin, 'Change: bad origin')
this.origin = origin
}
/**
* @return {string | null | undefined}
*/
getProjectVersion() {
return this.projectVersion
}
setProjectVersion(projectVersion) {
assert.maybe.match(
projectVersion,
Change.PROJECT_VERSION_RX,
'Change: bad projectVersion'
)
this.projectVersion = projectVersion
}
/**
* @return {V2DocVersions | null | undefined}
*/
getV2DocVersions() {
return this.v2DocVersions
}
setV2DocVersions(v2DocVersions) {
assert.maybe.instance(
v2DocVersions,
V2DocVersions,
'Change: bad v2DocVersions'
)
this.v2DocVersions = v2DocVersions
}
/**
* If this Change references blob hashes, add them to the given set.
*
* @param {Set.<String>} blobHashes
*/
findBlobHashes(blobHashes) {
for (const operation of this.operations) {
operation.findBlobHashes(blobHashes)
}
}
/**
* If this Change contains any File objects, load them.
*
* @param {string} kind see {File#load}
* @param {BlobStore} blobStore
* @return {Promise<void>}
*/
async loadFiles(kind, blobStore) {
for (const operation of this.operations) {
await operation.loadFiles(kind, blobStore)
}
}
/**
* Append an operation to the end of the operations list.
*
* @param {Operation} operation
* @return {this}
*/
pushOperation(operation) {
this.getOperations().push(operation)
return this
}
/**
* Apply this change to a snapshot. All operations are applied, and then the
* snapshot version is increased.
*
* Recoverable errors (caused by historical bad data) are ignored unless
* opts.strict is true
*
* @param {Snapshot} snapshot modified in place
* @param {object} opts
* @param {boolean} [opts.strict] - Do not ignore recoverable errors
*/
applyTo(snapshot, opts = {}) {
// eslint-disable-next-line no-unused-vars
for (const operation of this.iterativelyApplyTo(snapshot, opts)) {
// Nothing to do: we're just consuming the iterator for the side effects
}
}
/**
* Generator that applies this change to a snapshot and yields each
* operation after it has been applied.
*
* Recoverable errors (caused by historical bad data) are ignored unless
* opts.strict is true
*
* @param {Snapshot} snapshot modified in place
* @param {object} opts
* @param {boolean} [opts.strict] - Do not ignore recoverable errors
*/
*iterativelyApplyTo(snapshot, opts = {}) {
assert.object(snapshot, 'bad snapshot')
for (const operation of this.operations) {
try {
operation.applyTo(snapshot, opts)
} catch (err) {
const recoverable =
err instanceof Snapshot.EditMissingFileError ||
err instanceof FileMap.FileNotFoundError
if (!recoverable || opts.strict) {
throw err
}
}
yield operation
}
// update project version if present in change
if (this.projectVersion) {
snapshot.setProjectVersion(this.projectVersion)
}
// update doc versions
if (this.v2DocVersions) {
snapshot.updateV2DocVersions(this.v2DocVersions)
}
}
/**
* Transform this change to account for the fact that the other change occurred
* simultaneously and was applied first.
*
* This change is modified in place (by transforming its operations).
*
* @param {Change} other
*/
transformAfter(other) {
assert.object(other, 'bad other')
const thisOperations = this.getOperations()
const otherOperations = other.getOperations()
for (let i = 0; i < otherOperations.length; ++i) {
for (let j = 0; j < thisOperations.length; ++j) {
thisOperations[j] = Operation.transform(
thisOperations[j],
otherOperations[i]
)[0]
}
}
}
clone() {
return Change.fromRaw(this.toRaw())
}
async store(blobStore, concurrency) {
assert.maybe.number(concurrency, 'bad concurrency')
const raw = this.toRaw()
raw.authors = _.uniq(raw.authors)
const rawOperations = await pMap(
this.operations,
operation => operation.store(blobStore),
{ concurrency: concurrency || 1 }
)
raw.operations = rawOperations
return raw
}
canBeComposedWith(other) {
const operations = this.getOperations()
const otherOperations = other.getOperations()
// We ignore complex changes with more than 1 operation
if (operations.length > 1 || otherOperations.length > 1) return false
return operations[0].canBeComposedWith(otherOperations[0])
}
}
module.exports = Change

View File

@@ -0,0 +1,61 @@
'use strict'
const assert = require('check-types').assert
const Change = require('./change')
/**
* A `ChangeNote` is returned when the server has applied a {@link Change}.
*/
class ChangeNote {
/**
* @param {number} baseVersion the new base version for the change
* @param {Change} [change]
*/
constructor(baseVersion, change) {
assert.integer(baseVersion, 'bad baseVersion')
assert.maybe.instance(change, Change, 'bad change')
this.baseVersion = baseVersion
this.change = change
}
/**
* For serialization.
*
* @return {Object}
*/
toRaw() {
return {
baseVersion: this.baseVersion,
change: this.change.toRaw(),
}
}
toRawWithoutChange() {
return {
baseVersion: this.baseVersion,
}
}
static fromRaw(raw) {
assert.integer(raw.baseVersion, 'bad raw.baseVersion')
assert.maybe.object(raw.change, 'bad raw.changes')
return new ChangeNote(raw.baseVersion, Change.fromRaw(raw.change))
}
getBaseVersion() {
return this.baseVersion
}
getResultVersion() {
return this.baseVersion + 1
}
getChange() {
return this.change
}
}
module.exports = ChangeNote

View File

@@ -0,0 +1,90 @@
'use strict'
const assert = require('check-types').assert
const AuthorList = require('./author_list')
const Change = require('./change')
const Operation = require('./operation')
/**
* @import Author from "./author"
*/
/**
* A `ChangeRequest` is a list of {@link Operation}s that the server can apply
* as a {@link Change}.
*
* If the change is marked as `untransformable`, then the server will not
* attempt to transform it if it is out of date (i.e. if the baseVersion no
* longer matches the project's latest version). For example, if the client
* needs to ensure that a metadata property is set on exactly one file, it can't
* do that reliably if there's a chance that other clients will also change the
* metadata at the same time. The expectation is that if the change is rejected,
* the client will retry on a later version.
*/
class ChangeRequest {
/**
* @param {number} baseVersion
* @param {Array.<Operation>} operations
* @param {boolean} [untransformable]
* @param {number[] | Author[]} [authors]
*/
constructor(baseVersion, operations, untransformable, authors) {
assert.integer(baseVersion, 'bad baseVersion')
assert.array.of.object(operations, 'bad operations')
assert.maybe.boolean(untransformable, 'ChangeRequest: bad untransformable')
// TODO remove authors once we have JWTs working --- pass as parameter to
// makeChange instead
authors = authors || []
// check all are the same type
AuthorList.assertV1(authors, 'bad authors')
this.authors = authors
this.baseVersion = baseVersion
this.operations = operations
this.untransformable = untransformable || false
}
/**
* For serialization.
*
* @return {Object}
*/
toRaw() {
function operationToRaw(operation) {
return operation.toRaw()
}
return {
baseVersion: this.baseVersion,
operations: this.operations.map(operationToRaw),
untransformable: this.untransformable,
authors: this.authors,
}
}
static fromRaw(raw) {
assert.array.of.object(raw.operations, 'bad raw.operations')
return new ChangeRequest(
raw.baseVersion,
raw.operations.map(Operation.fromRaw),
raw.untransformable,
raw.authors
)
}
getBaseVersion() {
return this.baseVersion
}
isUntransformable() {
return this.untransformable
}
makeChange(timestamp) {
return new Change(this.operations, timestamp, this.authors)
}
}
module.exports = ChangeRequest

View File

@@ -0,0 +1,172 @@
'use strict'
const assert = require('check-types').assert
const OError = require('@overleaf/o-error')
const History = require('./history')
/**
* @import { BlobStore, RawChunk } from "./types"
* @import Change from "./change"
* @import Snapshot from "./snapshot"
*/
class ConflictingEndVersion extends OError {
constructor(clientEndVersion, latestEndVersion) {
const message =
'client sent updates with end_version ' +
clientEndVersion +
' but latest chunk has end_version ' +
latestEndVersion
super(message, { clientEndVersion, latestEndVersion })
this.clientEndVersion = clientEndVersion
this.latestEndVersion = latestEndVersion
}
}
class NotFoundError extends OError {
// `message` and `info` optional arguments allow children classes to override
// these values, ensuring backwards compatibility with previous implementation
// based on the `overleaf-error-type` library
constructor(projectId, message, info) {
const errorMessage = message || `no chunks for project ${projectId}`
const errorInfo = info || { projectId }
super(errorMessage, errorInfo)
this.projectId = projectId
}
}
class VersionNotFoundError extends NotFoundError {
constructor(projectId, version) {
super(projectId, `chunk for ${projectId} v ${version} not found`, {
projectId,
version,
})
this.projectId = projectId
this.version = version
}
}
class BeforeTimestampNotFoundError extends NotFoundError {
constructor(projectId, timestamp) {
super(projectId, `chunk for ${projectId} timestamp ${timestamp} not found`)
this.projectId = projectId
this.timestamp = timestamp
}
}
class NotPersistedError extends NotFoundError {
constructor(projectId) {
super(projectId, `chunk for ${projectId} not persisted yet`)
this.projectId = projectId
}
}
/**
* A Chunk is a {@link History} that is part of a project's overall history. It
* has a start and an end version that place its History in context.
*/
class Chunk {
static ConflictingEndVersion = ConflictingEndVersion
static NotFoundError = NotFoundError
static VersionNotFoundError = VersionNotFoundError
static BeforeTimestampNotFoundError = BeforeTimestampNotFoundError
static NotPersistedError = NotPersistedError
/**
* @param {History} history
* @param {number} startVersion
*/
constructor(history, startVersion) {
assert.instance(history, History, 'bad history')
assert.integer(startVersion, 'bad startVersion')
this.history = history
this.startVersion = startVersion
}
/**
* @param {RawChunk} raw
* @return {Chunk}
*/
static fromRaw(raw) {
return new Chunk(History.fromRaw(raw.history), raw.startVersion)
}
toRaw() {
return { history: this.history.toRaw(), startVersion: this.startVersion }
}
/**
* The history for this chunk.
*
* @return {History}
*/
getHistory() {
return this.history
}
/**
* {@see History#getSnapshot}
* @return {Snapshot}
*/
getSnapshot() {
return this.history.getSnapshot()
}
/**
* {@see History#getChanges}
* @return {Array.<Change>}
*/
getChanges() {
return this.history.getChanges()
}
/**
* {@see History#pushChanges}
* @param {Array.<Change>} changes
*/
pushChanges(changes) {
this.history.pushChanges(changes)
}
/**
* The version of the project after applying all changes in this chunk.
*
* @return {number} non-negative, greater than or equal to start version
*/
getEndVersion() {
return this.startVersion + this.history.countChanges()
}
/**
* The timestamp of the last change in this chunk
*/
getEndTimestamp() {
if (!this.history.countChanges()) return null
return this.history.getChanges().slice(-1)[0].getTimestamp()
}
/**
* The version of the project before applying all changes in this chunk.
*
* @return {number} non-negative, less than or equal to end version
*/
getStartVersion() {
return this.startVersion
}
/**
* {@see History#loadFiles}
*
* @param {string} kind
* @param {BlobStore} blobStore
* @return {Promise<void>}
*/
async loadFiles(kind, blobStore) {
await this.history.loadFiles(kind, blobStore)
}
}
module.exports = Chunk

View File

@@ -0,0 +1,33 @@
'use strict'
const assert = require('check-types').assert
const Chunk = require('./chunk')
/**
* The ChunkResponse allows for additional data to be sent back with the chunk
* at present there are no extra data to send.
*/
class ChunkResponse {
constructor(chunk) {
assert.instance(chunk, Chunk)
this.chunk = chunk
}
toRaw() {
return {
chunk: this.chunk.toRaw(),
}
}
static fromRaw(raw) {
if (!raw) return null
return new ChunkResponse(Chunk.fromRaw(raw.chunk))
}
getChunk() {
return this.chunk
}
}
module.exports = ChunkResponse

View File

@@ -0,0 +1,206 @@
// @ts-check
const { RetainOp, InsertOp, RemoveOp } = require('./operation/scan_op')
const Range = require('./range')
/**
* @import { CommentRawData } from "./types"
* @import TextOperation from "./operation/text_operation"
*/
class Comment {
/**
* @readonly
* @type {ReadonlyArray<Range>}
*/
ranges = []
/**
* @readonly
* @type {boolean}
*/
resolved = false
/**
* @param {string} id
* @param {ReadonlyArray<Range>} ranges
* @param {boolean} [resolved]
*/
constructor(id, ranges, resolved = false) {
this.id = id
this.resolved = resolved
this.ranges = this.mergeRanges(ranges)
}
/**
*
* @param {number} cursor
* @param {number} length
* @param {boolean} [extendComment]
* @returns {Comment}
*/
applyInsert(cursor, length, extendComment = false) {
let existingRangeExtended = false
const newRanges = []
for (const commentRange of this.ranges) {
if (cursor === commentRange.end) {
// insert right after the comment
if (extendComment) {
newRanges.push(commentRange.extendBy(length))
existingRangeExtended = true
} else {
newRanges.push(commentRange)
}
} else if (cursor === commentRange.start) {
// insert at the start of the comment
if (extendComment) {
newRanges.push(commentRange.extendBy(length))
existingRangeExtended = true
} else {
newRanges.push(commentRange.moveBy(length))
}
} else if (commentRange.startIsAfter(cursor)) {
// insert before the comment
newRanges.push(commentRange.moveBy(length))
} else if (commentRange.containsCursor(cursor)) {
// insert is inside the comment
if (extendComment) {
newRanges.push(commentRange.extendBy(length))
existingRangeExtended = true
} else {
const [rangeUpToCursor, , rangeAfterCursor] = commentRange.insertAt(
cursor,
length
)
// use current commentRange for the part before the cursor
newRanges.push(new Range(commentRange.pos, rangeUpToCursor.length))
// add the part after the cursor as a new range
newRanges.push(rangeAfterCursor)
}
} else {
// insert is after the comment
newRanges.push(commentRange)
}
}
// if the insert is not inside any range, add a new range
if (extendComment && !existingRangeExtended) {
newRanges.push(new Range(cursor, length))
}
return new Comment(this.id, newRanges, this.resolved)
}
/**
*
* @param {Range} deletedRange
* @returns {Comment}
*/
applyDelete(deletedRange) {
const newRanges = []
for (const commentRange of this.ranges) {
if (commentRange.overlaps(deletedRange)) {
newRanges.push(commentRange.subtract(deletedRange))
} else if (commentRange.startsAfter(deletedRange)) {
newRanges.push(commentRange.moveBy(-deletedRange.length))
} else {
newRanges.push(commentRange)
}
}
return new Comment(this.id, newRanges, this.resolved)
}
/**
*
* @param {TextOperation} operation
* @param {string} commentId
* @returns {Comment}
*/
applyTextOperation(operation, commentId) {
/** @type {Comment} */
let comment = this
let cursor = 0
for (const op of operation.ops) {
if (op instanceof RetainOp) {
cursor += op.length
} else if (op instanceof InsertOp) {
comment = comment.applyInsert(
cursor,
op.insertion.length,
op.commentIds?.includes(commentId)
)
cursor += op.insertion.length
} else if (op instanceof RemoveOp) {
comment = comment.applyDelete(new Range(cursor, op.length))
}
}
return comment
}
isEmpty() {
return this.ranges.length === 0
}
/**
*
* @returns {CommentRawData}
*/
toRaw() {
/** @type CommentRawData */
const raw = {
id: this.id,
ranges: this.ranges.map(range => range.toRaw()),
}
if (this.resolved) {
raw.resolved = true
}
return raw
}
/**
* @param {ReadonlyArray<Range>} ranges
* @returns {ReadonlyArray<Range>}
*/
mergeRanges(ranges) {
/** @type {Range[]} */
const mergedRanges = []
const sortedRanges = [...ranges].sort((a, b) => a.start - b.start)
for (const range of sortedRanges) {
if (range.isEmpty()) {
continue
}
const lastMerged = mergedRanges[mergedRanges.length - 1]
if (lastMerged?.overlaps(range)) {
throw new Error('Ranges cannot overlap')
}
if (range.isEmpty()) {
throw new Error('Comment range cannot be empty')
}
if (lastMerged?.canMerge(range)) {
mergedRanges[mergedRanges.length - 1] = lastMerged.merge(range)
} else {
mergedRanges.push(range)
}
}
return mergedRanges
}
/**
* @param {CommentRawData} rawComment
* @returns {Comment}
*/
static fromRaw(rawComment) {
return new Comment(
rawComment.id,
rawComment.ranges.map(range => Range.fromRaw(range)),
rawComment.resolved
)
}
}
module.exports = Comment

View File

@@ -0,0 +1,34 @@
const OError = require('@overleaf/o-error')
class UnprocessableError extends OError {}
class ApplyError extends UnprocessableError {
constructor(message, operation, operand) {
super(message)
this.operation = operation
this.operand = operand
}
}
class InvalidInsertionError extends UnprocessableError {
constructor(str, operation) {
super('inserted text contains non BMP characters')
this.str = str
this.operation = operation
}
}
class TooLongError extends UnprocessableError {
constructor(operation, resultLength) {
super('resulting string would be too long', { resultLength })
this.operation = operation
this.resultLength = resultLength
}
}
module.exports = {
UnprocessableError,
ApplyError,
InvalidInsertionError,
TooLongError,
}

View File

@@ -0,0 +1,280 @@
// @ts-check
'use strict'
const _ = require('lodash')
const assert = require('check-types').assert
const OError = require('@overleaf/o-error')
const FileData = require('./file_data')
const HashFileData = require('./file_data/hash_file_data')
const StringFileData = require('./file_data/string_file_data')
/**
* @import Blob from "./blob"
* @import { BlobStore, ReadonlyBlobStore, RawFileData, RawFile } from "./types"
* @import { StringFileRawData, CommentRawData } from "./types"
* @import CommentList from "./file_data/comment_list"
* @import TextOperation from "./operation/text_operation"
* @import TrackedChangeList from "./file_data/tracked_change_list"
*
* @typedef {{filterTrackedDeletes?: boolean}} FileGetContentOptions
*/
class NotEditableError extends OError {
constructor() {
super('File is not editable')
}
}
/**
* A file in a {@link Snapshot}. A file has both data and metadata. There
* are several classes of data that represent the various types of file
* data that are supported, namely text and binary, and also the various
* states that a file's data can be in, namely:
*
* 1. Hash only: all we know is the file's hash; this is how we encode file
* content in long term storage.
* 2. Lazily loaded: the hash of the file, its length, and its type are known,
* but its content is not loaded. Operations are cached for application
* later.
* 3. Eagerly loaded: the content of a text file is fully loaded into memory
* as a string.
* 4. Hollow: only the byte and/or UTF-8 length of the file are known; this is
* used to allow for validation of operations when editing collaboratively
* without having to keep file data in memory on the server.
*/
class File {
/**
* Blob hash for an empty file.
*
* @type {String}
*/
static EMPTY_FILE_HASH = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'
static NotEditableError = NotEditableError
/**
* @param {FileData} data
* @param {Object} [metadata]
*/
constructor(data, metadata) {
assert.instance(data, FileData, 'File: bad data')
this.data = data
this.metadata = {}
this.setMetadata(metadata || {})
}
/**
* @param {RawFile} raw
* @return {File|null}
*/
static fromRaw(raw) {
if (!raw) return null
return new File(FileData.fromRaw(raw), raw.metadata)
}
/**
* @param {string} hash
* @param {string} [rangesHash]
* @param {Object} [metadata]
* @return {File}
*/
static fromHash(hash, rangesHash, metadata) {
return new File(new HashFileData(hash, rangesHash), metadata)
}
/**
* @param {string} string
* @param {Object} [metadata]
* @return {File}
*/
static fromString(string, metadata) {
return new File(new StringFileData(string), metadata)
}
/**
* @param {number} byteLength
* @param {number} [stringLength]
* @param {Object} [metadata]
* @return {File}
*/
static createHollow(byteLength, stringLength, metadata) {
return new File(FileData.createHollow(byteLength, stringLength), metadata)
}
/**
* @param {Blob} blob
* @param {Blob} [rangesBlob]
* @param {Object} [metadata]
* @return {File}
*/
static createLazyFromBlobs(blob, rangesBlob, metadata) {
return new File(FileData.createLazyFromBlobs(blob, rangesBlob), metadata)
}
/**
* @returns {RawFile}
*/
toRaw() {
/** @type RawFile */
const rawFileData = this.data.toRaw()
storeRawMetadata(this.metadata, rawFileData)
return rawFileData
}
/**
* Hexadecimal SHA-1 hash of the file's content, if known.
*
* @return {string | null | undefined}
*/
getHash() {
return this.data.getHash()
}
/**
* Hexadecimal SHA-1 hash of the ranges content (comments + tracked changes),
* if known.
*
* @return {string | null | undefined}
*/
getRangesHash() {
return this.data.getRangesHash()
}
/**
* The content of the file, if it is known and if this file has UTF-8 encoded
* content.
*
* @param {FileGetContentOptions} [opts]
* @return {string | null | undefined}
*/
getContent(opts = {}) {
return this.data.getContent(opts)
}
/**
* Whether this file has string content and is small enough to be edited using
* {@link TextOperation}s.
*
* @return {boolean | null | undefined} null if it is not currently known
*/
isEditable() {
return this.data.isEditable()
}
/**
* The length of the file's content in bytes, if known.
*
* @return {number | null | undefined}
*/
getByteLength() {
return this.data.getByteLength()
}
/**
* The length of the file's content in characters, if known.
*
* @return {number | null | undefined}
*/
getStringLength() {
return this.data.getStringLength()
}
/**
* Return the metadata object for this file.
*
* @return {Object}
*/
getMetadata() {
return this.metadata
}
/**
* Set the metadata object for this file.
*
* @param {Object} metadata
*/
setMetadata(metadata) {
assert.object(metadata, 'File: bad metadata')
this.metadata = metadata
}
/**
* Edit this file, if possible.
*
* @param {TextOperation} textOperation
*/
edit(textOperation) {
if (!this.data.isEditable()) throw new File.NotEditableError()
this.data.edit(textOperation)
}
/**
* Get the comments for this file.
*
* @return {CommentList}
*/
getComments() {
return this.data.getComments()
}
/**
* Get the tracked changes for this file.
* @return {TrackedChangeList}
*/
getTrackedChanges() {
return this.data.getTrackedChanges()
}
/**
* Clone a file.
*
* @return {File} a new object of the same type
*/
clone() {
return /** @type {File} */ (File.fromRaw(this.toRaw()))
}
/**
* Convert this file's data to the given kind. This may require us to load file
* size or content from the given blob store, so this is an asynchronous
* operation.
*
* @param {string} kind
* @param {ReadonlyBlobStore} blobStore
* @return {Promise.<File>} for this
*/
async load(kind, blobStore) {
const data = await this.data.load(kind, blobStore)
this.data = data
return this
}
/**
* Store the file's content in the blob store and return a raw file with
* the corresponding hash. As a side effect, make this object consistent with
* the hash.
*
* @param {BlobStore} blobStore
* @return {Promise<RawFile>} a raw HashFile
*/
async store(blobStore) {
/** @type RawFile */
const raw = await this.data.store(blobStore)
storeRawMetadata(this.metadata, raw)
return raw
}
}
/**
* @param {Object} metadata
* @param {RawFile} raw
*/
function storeRawMetadata(metadata, raw) {
if (!_.isEmpty(metadata)) {
raw.metadata = _.cloneDeep(metadata)
}
}
module.exports = File

View File

@@ -0,0 +1,82 @@
'use strict'
const assert = require('check-types').assert
const Blob = require('../blob')
const FileData = require('./')
/**
* @import { RawBinaryFileData } from '../types'
*/
class BinaryFileData extends FileData {
/**
* @param {string} hash
* @param {number} byteLength
* @see FileData
*/
constructor(hash, byteLength) {
super()
assert.match(hash, Blob.HEX_HASH_RX, 'BinaryFileData: bad hash')
assert.integer(byteLength, 'BinaryFileData: bad byteLength')
assert.greaterOrEqual(byteLength, 0, 'BinaryFileData: low byteLength')
this.hash = hash
this.byteLength = byteLength
}
/**
* @param {RawBinaryFileData} raw
* @returns {BinaryFileData}
*/
static fromRaw(raw) {
return new BinaryFileData(raw.hash, raw.byteLength)
}
/**
* @inheritdoc
* @returns {RawBinaryFileData}
*/
toRaw() {
return { hash: this.hash, byteLength: this.byteLength }
}
/** @inheritdoc */
getHash() {
return this.hash
}
/** @inheritdoc */
isEditable() {
return false
}
/** @inheritdoc */
getByteLength() {
return this.byteLength
}
/** @inheritdoc */
async toEager() {
return this
}
/** @inheritdoc */
async toLazy() {
return this
}
/** @inheritdoc */
async toHollow() {
return FileData.createHollow(this.byteLength, null)
}
/** @inheritdoc
* @return {Promise<RawFileData>}
*/
async store() {
return { hash: this.hash }
}
}
module.exports = BinaryFileData

View File

@@ -0,0 +1,28 @@
// @ts-check
/**
* @import { ClearTrackingPropsRawData } from '../types'
*/
class ClearTrackingProps {
constructor() {
this.type = 'none'
}
/**
* @param {any} other
* @returns {boolean}
*/
equals(other) {
return other instanceof ClearTrackingProps
}
/**
* @returns {ClearTrackingPropsRawData}
*/
toRaw() {
return { type: 'none' }
}
}
module.exports = ClearTrackingProps

View File

@@ -0,0 +1,124 @@
// @ts-check
const Comment = require('../comment')
/**
* @import { CommentRawData } from "../types"
* @import Range from "../range"
*/
class CommentList {
/**
* @param {Comment[]} comments
*/
constructor(comments) {
this.comments = new Map(comments.map(comment => [comment.id, comment]))
}
/**
* @returns {IterableIterator<Comment>}
*/
[Symbol.iterator]() {
return this.comments.values()
}
/**
* Returns the contents of this list in an array
*
* @returns {Comment[]}
*/
toArray() {
return Array.from(this)
}
/**
* Return the length of the comment list
*
* @returns {number}
*/
get length() {
return this.comments.size
}
/**
* Return the raw version of the comment list
*
* @returns {CommentRawData[]}
*/
toRaw() {
const raw = []
for (const comment of this.comments.values()) {
raw.push(comment.toRaw())
}
return raw
}
/**
* @param {string} id
* @returns {Comment | undefined}
*/
getComment(id) {
return this.comments.get(id)
}
/**
* @param {Comment} newComment
*/
add(newComment) {
this.comments.set(newComment.id, newComment)
}
/**
* @param {string} id
*/
delete(id) {
return this.comments.delete(id)
}
/**
* @param {CommentRawData[]} rawComments
*/
static fromRaw(rawComments) {
return new CommentList(rawComments.map(Comment.fromRaw))
}
/**
* @param {Range} range
* @param {{ commentIds?: string[] }} opts
*/
applyInsert(range, opts = { commentIds: [] }) {
if (!opts.commentIds) {
opts.commentIds = []
}
for (const [commentId, comment] of this.comments) {
const commentAfterInsert = comment.applyInsert(
range.pos,
range.length,
opts.commentIds.includes(commentId)
)
this.comments.set(commentId, commentAfterInsert)
}
}
/**
* @param {Range} range
*/
applyDelete(range) {
for (const [commentId, comment] of this.comments) {
const commentAfterDelete = comment.applyDelete(range)
this.comments.set(commentId, commentAfterDelete)
}
}
/**
*
* @param {Range} range
* @returns {string[]}
*/
idsCoveringRange(range) {
return Array.from(this.comments.entries())
.filter(([, comment]) => comment.ranges.some(r => r.contains(range)))
.map(([id]) => id)
}
}
module.exports = CommentList

View File

@@ -0,0 +1,134 @@
// @ts-check
'use strict'
const assert = require('check-types').assert
const Blob = require('../blob')
const FileData = require('./')
/**
* @import StringFileData from './string_file_data'
* @import LazyStringFileData from './lazy_string_file_data'
* @import HollowStringFileData from './hollow_string_file_data'
* @import { BlobStore, RawHashFileData } from '../types'
*/
class HashFileData extends FileData {
/**
* @constructor
* @param {string} hash
* @param {string} [rangesHash]
* @see FileData
*/
constructor(hash, rangesHash) {
super()
assert.match(hash, Blob.HEX_HASH_RX, 'HashFileData: bad hash')
if (rangesHash) {
assert.match(
rangesHash,
Blob.HEX_HASH_RX,
'HashFileData: bad ranges hash'
)
}
this.hash = hash
this.rangesHash = rangesHash
}
/**
*
* @param {RawHashFileData} raw
*/
static fromRaw(raw) {
return new HashFileData(raw.hash, raw.rangesHash)
}
/**
* @inheritdoc
* @returns {RawHashFileData}
*/
toRaw() {
/** @type RawHashFileData */
const raw = { hash: this.hash }
if (this.rangesHash) {
raw.rangesHash = this.rangesHash
}
return raw
}
/**
* @inheritdoc
* @returns {string}
*/
getHash() {
return this.hash
}
/**
* @inheritdoc
* @returns {string | undefined}
*/
getRangesHash() {
return this.rangesHash
}
/**
* @inheritdoc
* @param {BlobStore} blobStore
* @returns {Promise<StringFileData>}
*/
async toEager(blobStore) {
const lazyFileData = await this.toLazy(blobStore)
return await lazyFileData.toEager(blobStore)
}
/**
* @inheritdoc
* @param {BlobStore} blobStore
* @returns {Promise<LazyStringFileData>}
*/
async toLazy(blobStore) {
const [blob, rangesBlob] = await Promise.all([
blobStore.getBlob(this.hash),
this.rangesHash
? blobStore.getBlob(this.rangesHash)
: Promise.resolve(undefined),
])
if (rangesBlob === null) {
// We attempted to look up the blob, but none was found
throw new Error('Failed to look up rangesHash in blobStore')
}
if (!blob) throw new Error('blob not found: ' + this.hash)
// TODO(das7pad): inline 2nd path of FileData.createLazyFromBlobs?
// @ts-ignore
return FileData.createLazyFromBlobs(blob, rangesBlob)
}
/**
* @inheritdoc
* @param {BlobStore} blobStore
* @returns {Promise<HollowStringFileData>}
*/
async toHollow(blobStore) {
const blob = await blobStore.getBlob(this.hash)
if (!blob) {
throw new Error('Failed to look up hash in blobStore')
}
// TODO(das7pad): inline 2nd path of FileData.createHollow?
// @ts-ignore
return FileData.createHollow(blob.getByteLength(), blob.getStringLength())
}
/**
* @inheritdoc
* @returns {Promise<RawHashFileData>}
*/
async store() {
/** @type RawHashFileData */
const raw = { hash: this.hash }
if (this.rangesHash) {
raw.rangesHash = this.rangesHash
}
return raw
}
}
module.exports = HashFileData

View File

@@ -0,0 +1,55 @@
'use strict'
const assert = require('check-types').assert
const FileData = require('./')
/**
* @import { RawHollowBinaryFileData } from '../types'
*/
class HollowBinaryFileData extends FileData {
/**
* @param {number} byteLength
* @see FileData
*/
constructor(byteLength) {
super()
assert.integer(byteLength, 'HollowBinaryFileData: bad byteLength')
assert.greaterOrEqual(byteLength, 0, 'HollowBinaryFileData: low byteLength')
this.byteLength = byteLength
}
/**
* @param {RawHollowBinaryFileData} raw
* @returns {HollowBinaryFileData}
*/
static fromRaw(raw) {
return new HollowBinaryFileData(raw.byteLength)
}
/**
* @inheritdoc
* @returns {RawHollowBinaryFileData}
*/
toRaw() {
return { byteLength: this.byteLength }
}
/** @inheritdoc */
getByteLength() {
return this.byteLength
}
/** @inheritdoc */
isEditable() {
return false
}
/** @inheritdoc */
async toHollow() {
return this
}
}
module.exports = HollowBinaryFileData

View File

@@ -0,0 +1,69 @@
// @ts-check
'use strict'
const assert = require('check-types').assert
const FileData = require('./')
/**
* @import { RawHollowStringFileData } from '../types'
* @import EditOperation from '../operation/edit_operation'
*/
class HollowStringFileData extends FileData {
/**
* @param {number} stringLength
* @see FileData
*/
constructor(stringLength) {
super()
assert.integer(stringLength, 'HollowStringFileData: bad stringLength')
assert.greaterOrEqual(
stringLength,
0,
'HollowStringFileData: low stringLength'
)
this.stringLength = stringLength
}
/**
* @param {RawHollowStringFileData} raw
* @returns {HollowStringFileData}
*/
static fromRaw(raw) {
return new HollowStringFileData(raw.stringLength)
}
/**
* @inheritdoc
* @returns {RawHollowStringFileData}
*/
toRaw() {
return { stringLength: this.stringLength }
}
/** @inheritdoc */
getStringLength() {
return this.stringLength
}
/** @inheritdoc */
isEditable() {
return true
}
/** @inheritdoc */
async toHollow() {
return this
}
/**
* @inheritdoc
* @param {EditOperation} operation
*/
edit(operation) {
this.stringLength = operation.applyToLength(this.stringLength)
}
}
module.exports = HollowStringFileData

View File

@@ -0,0 +1,229 @@
// @ts-check
'use strict'
const assert = require('check-types').assert
const Blob = require('../blob')
/**
* @import { BlobStore, ReadonlyBlobStore, RawFileData, CommentRawData } from "../types"
* @import EditOperation from "../operation/edit_operation"
* @import CommentList from "../file_data/comment_list"
* @import TrackedChangeList from "../file_data/tracked_change_list"
*/
/**
* Helper to represent the content of a file. This class and its subclasses
* should be used only through {@link File}.
*/
class FileData {
/** @see File.fromRaw
* @param {RawFileData} raw
*/
static fromRaw(raw) {
// TODO(das7pad): can we teach typescript to understand our polymorphism?
if (Object.prototype.hasOwnProperty.call(raw, 'hash')) {
if (Object.prototype.hasOwnProperty.call(raw, 'byteLength'))
// @ts-ignore
return BinaryFileData.fromRaw(raw)
if (Object.prototype.hasOwnProperty.call(raw, 'stringLength'))
// @ts-ignore
return LazyStringFileData.fromRaw(raw)
// @ts-ignore
return HashFileData.fromRaw(raw)
}
if (Object.prototype.hasOwnProperty.call(raw, 'byteLength'))
// @ts-ignore
return HollowBinaryFileData.fromRaw(raw)
if (Object.prototype.hasOwnProperty.call(raw, 'stringLength'))
// @ts-ignore
return HollowStringFileData.fromRaw(raw)
if (Object.prototype.hasOwnProperty.call(raw, 'content'))
// @ts-ignore
return StringFileData.fromRaw(raw)
throw new Error('FileData: bad raw object ' + JSON.stringify(raw))
}
/** @see File.createHollow
* @param {number} byteLength
* @param {number} [stringLength]
*/
static createHollow(byteLength, stringLength) {
if (stringLength == null) {
return new HollowBinaryFileData(byteLength)
}
return new HollowStringFileData(stringLength)
}
/**
* @see File.createLazyFromBlob
* @param {Blob} blob
* @param {Blob} [rangesBlob]
*/
static createLazyFromBlobs(blob, rangesBlob) {
assert.instance(blob, Blob, 'FileData: bad blob')
const stringLength = blob.getStringLength()
if (stringLength == null) {
return new BinaryFileData(blob.getHash(), blob.getByteLength())
}
return new LazyStringFileData(
blob.getHash(),
rangesBlob?.getHash(),
stringLength
)
}
/**
* @returns {RawFileData}
*/
toRaw() {
throw new Error('FileData: toRaw not implemented')
}
/**
* @see File#getHash
* @return {string | null | undefined}
*/
getHash() {
return null
}
/**
* @see File#getHash
* @return {string | null | undefined}
*/
getRangesHash() {
return null
}
/**
* @see File#getContent
* @param {import('../file').FileGetContentOptions} [opts]
* @return {string | null | undefined}
*/
getContent(opts = {}) {
return null
}
/**
* @see File#isEditable
* @return {boolean | null | undefined} null if it is not currently known
*/
isEditable() {
return null
}
/**
* @see File#getByteLength
* @return {number | null | undefined}
*/
getByteLength() {
return null
}
/**
* @see File#getStringLength
* @return {number | null | undefined}
*/
getStringLength() {
return null
}
/**
* @see File#edit
* @param {EditOperation} editOperation
*/
edit(editOperation) {
throw new Error('edit not implemented for ' + JSON.stringify(this))
}
/**
* @function
* @param {ReadonlyBlobStore} blobStore
* @return {Promise<FileData>}
* @abstract
* @see FileData#load
*/
async toEager(blobStore) {
throw new Error('toEager not implemented for ' + JSON.stringify(this))
}
/**
* @function
* @param {ReadonlyBlobStore} blobStore
* @return {Promise<FileData>}
* @abstract
* @see FileData#load
*/
async toLazy(blobStore) {
throw new Error('toLazy not implemented for ' + JSON.stringify(this))
}
/**
* @function
* @param {ReadonlyBlobStore} blobStore
* @return {Promise<FileData>}
* @abstract
* @see FileData#load
*/
async toHollow(blobStore) {
throw new Error('toHollow not implemented for ' + JSON.stringify(this))
}
/**
* @see File#load
* @param {string} kind
* @param {ReadonlyBlobStore} blobStore
* @return {Promise<FileData>}
*/
async load(kind, blobStore) {
if (kind === 'eager') return await this.toEager(blobStore)
if (kind === 'lazy') return await this.toLazy(blobStore)
if (kind === 'hollow') return await this.toHollow(blobStore)
throw new Error('bad file data load kind: ' + kind)
}
/**
* @see File#store
* @function
* @param {BlobStore} blobStore
* @return {Promise<RawFileData>} a raw HashFile
* @abstract
*/
async store(blobStore) {
throw new Error('store not implemented for ' + JSON.stringify(this))
}
/**
* @see File#getComments
* @function
* @return {CommentList}
* @abstract
*/
getComments() {
throw new Error('getComments not implemented for ' + JSON.stringify(this))
}
/**
* @see File#getTrackedChanges
* @function
* @return {TrackedChangeList}
* @abstract
*/
getTrackedChanges() {
throw new Error(
'getTrackedChanges not implemented for ' + JSON.stringify(this)
)
}
}
module.exports = FileData
const BinaryFileData = require('./binary_file_data')
const HashFileData = require('./hash_file_data')
const HollowBinaryFileData = require('./hollow_binary_file_data')
const HollowStringFileData = require('./hollow_string_file_data')
const LazyStringFileData = require('./lazy_string_file_data')
const StringFileData = require('./string_file_data')

View File

@@ -0,0 +1,190 @@
// @ts-check
'use strict'
const _ = require('lodash')
const assert = require('check-types').assert
const Blob = require('../blob')
const FileData = require('./')
const EagerStringFileData = require('./string_file_data')
const EditOperation = require('../operation/edit_operation')
const EditOperationBuilder = require('../operation/edit_operation_builder')
/**
* @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types'
*/
class LazyStringFileData extends FileData {
/**
* @param {string} hash
* @param {string | undefined} rangesHash
* @param {number} stringLength
* @param {Array.<EditOperation>} [operations]
* @see FileData
*/
constructor(hash, rangesHash, stringLength, operations) {
super()
assert.match(hash, Blob.HEX_HASH_RX)
if (rangesHash) {
assert.match(rangesHash, Blob.HEX_HASH_RX)
}
assert.greaterOrEqual(stringLength, 0)
assert.maybe.array.of.instance(operations, EditOperation)
this.hash = hash
this.rangesHash = rangesHash
this.stringLength = stringLength
this.operations = operations || []
}
/**
* @param {RawLazyStringFileData} raw
* @returns {LazyStringFileData}
*/
static fromRaw(raw) {
return new LazyStringFileData(
raw.hash,
raw.rangesHash,
raw.stringLength,
raw.operations && _.map(raw.operations, EditOperationBuilder.fromJSON)
)
}
/**
* @inheritdoc
* @returns {RawLazyStringFileData}
*/
toRaw() {
/** @type RawLazyStringFileData */
const raw = {
hash: this.hash,
stringLength: this.stringLength,
}
if (this.rangesHash) {
raw.rangesHash = this.rangesHash
}
if (this.operations.length) {
raw.operations = _.map(this.operations, function (operation) {
return operation.toJSON()
})
}
return raw
}
/** @inheritdoc */
getHash() {
if (this.operations.length) return null
return this.hash
}
/** @inheritdoc */
getRangesHash() {
if (this.operations.length) return null
return this.rangesHash
}
/** @inheritdoc */
isEditable() {
return true
}
/**
* For project quota checking, we approximate the byte length by the UTF-8
* length for hollow files. This isn't strictly speaking correct; it is an
* underestimate of byte length.
*
* @return {number}
*/
getByteLength() {
return this.stringLength
}
/** @inheritdoc */
getStringLength() {
return this.stringLength
}
/**
* Get the cached text operations that are to be applied to this file to get
* from the content with its last known hash to its latest content.
*
* @return {Array.<EditOperation>}
*/
getOperations() {
return this.operations
}
/**
* @inheritdoc
* @param {ReadonlyBlobStore} blobStore
* @returns {Promise<EagerStringFileData>}
*/
async toEager(blobStore) {
const [content, ranges] = await Promise.all([
blobStore.getString(this.hash),
this.rangesHash
? /** @type {Promise<RangesBlob>} */ (
blobStore.getObject(this.rangesHash)
)
: Promise.resolve(undefined),
])
const file = new EagerStringFileData(
content,
ranges?.comments,
ranges?.trackedChanges
)
applyOperations(this.operations, file)
return file
}
/** @inheritdoc */
async toLazy() {
return this
}
/** @inheritdoc */
async toHollow() {
// TODO(das7pad): inline 2nd path of FileData.createLazyFromBlobs?
// @ts-ignore
return FileData.createHollow(null, this.stringLength)
}
/** @inheritdoc
* @param {EditOperation} operation
*/
edit(operation) {
this.stringLength = operation.applyToLength(this.stringLength)
this.operations.push(operation)
}
/** @inheritdoc
* @param {BlobStore} blobStore
* @return {Promise<RawFileData>}
*/
async store(blobStore) {
if (this.operations.length === 0) {
/** @type RawFileData */
const raw = { hash: this.hash }
if (this.rangesHash) {
raw.rangesHash = this.rangesHash
}
return raw
}
const eager = await this.toEager(blobStore)
this.operations.length = 0
/** @type RawFileData */
return await eager.store(blobStore)
}
}
/**
*
* @param {EditOperation[]} operations
* @param {EagerStringFileData} file
* @returns {void}
*/
function applyOperations(operations, file) {
_.each(operations, operation => operation.apply(file))
}
module.exports = LazyStringFileData

View File

@@ -0,0 +1,151 @@
// @ts-check
'use strict'
const assert = require('check-types').assert
const FileData = require('./')
const CommentList = require('./comment_list')
const TrackedChangeList = require('./tracked_change_list')
/**
* @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types"
* @import { TrackedChangeRawData, RangesBlob } from "../types"
* @import EditOperation from "../operation/edit_operation"
*/
class StringFileData extends FileData {
/**
* @param {string} content
* @param {CommentRawData[]} [rawComments]
* @param {TrackedChangeRawData[]} [rawTrackedChanges]
*/
constructor(content, rawComments = [], rawTrackedChanges = []) {
super()
assert.string(content)
this.content = content
this.comments = CommentList.fromRaw(rawComments)
this.trackedChanges = TrackedChangeList.fromRaw(rawTrackedChanges)
}
/**
* @param {StringFileRawData} raw
* @returns {StringFileData}
*/
static fromRaw(raw) {
return new StringFileData(
raw.content,
raw.comments || [],
raw.trackedChanges || []
)
}
/**
* @inheritdoc
* @returns {StringFileRawData}
*/
toRaw() {
/** @type StringFileRawData */
const raw = { content: this.content }
if (this.comments.length) {
raw.comments = this.comments.toRaw()
}
if (this.trackedChanges.length) {
raw.trackedChanges = this.trackedChanges.toRaw()
}
return raw
}
/** @inheritdoc */
isEditable() {
return true
}
/**
* @inheritdoc
* @param {import('../file').FileGetContentOptions} [opts]
*/
getContent(opts = {}) {
let content = ''
let cursor = 0
if (opts.filterTrackedDeletes) {
for (const tc of this.trackedChanges.asSorted()) {
if (tc.tracking.type !== 'delete') {
continue
}
if (cursor < tc.range.start) {
content += this.content.slice(cursor, tc.range.start)
}
// skip the tracked change
cursor = tc.range.end
}
}
if (cursor < this.content.length) {
content += this.content.slice(cursor)
}
return content
}
/** @inheritdoc */
getByteLength() {
return Buffer.byteLength(this.content)
}
/** @inheritdoc */
getStringLength() {
return this.content.length
}
/**
* @inheritdoc
* @param {EditOperation} operation */
edit(operation) {
operation.apply(this)
}
/** @inheritdoc */
getComments() {
return this.comments
}
/** @inheritdoc */
getTrackedChanges() {
return this.trackedChanges
}
/**
* @inheritdoc
* @returns {Promise<StringFileData>}
*/
async toEager() {
return this
}
/** @inheritdoc */
async toHollow() {
return FileData.createHollow(this.getByteLength(), this.getStringLength())
}
/**
* @inheritdoc
* @param {BlobStore} blobStore
* @return {Promise<RawFileData>}
*/
async store(blobStore) {
const blob = await blobStore.putString(this.content)
if (this.comments.comments.size || this.trackedChanges.length) {
/** @type {RangesBlob} */
const ranges = {
comments: this.getComments().toRaw(),
trackedChanges: this.trackedChanges.toRaw(),
}
const rangesBlob = await blobStore.putObject(ranges)
return { hash: blob.getHash(), rangesHash: rangesBlob.getHash() }
}
return { hash: blob.getHash() }
}
}
module.exports = StringFileData

View File

@@ -0,0 +1,89 @@
// @ts-check
const Range = require('../range')
const TrackingProps = require('./tracking_props')
/**
* @import { TrackedChangeRawData } from "../types"
*/
class TrackedChange {
/**
* @param {Range} range
* @param {TrackingProps} tracking
*/
constructor(range, tracking) {
/**
* @readonly
* @type {Range}
*/
this.range = range
/**
* @readonly
* @type {TrackingProps}
*/
this.tracking = tracking
}
/**
*
* @param {TrackedChangeRawData} raw
* @returns {TrackedChange}
*/
static fromRaw(raw) {
return new TrackedChange(
Range.fromRaw(raw.range),
TrackingProps.fromRaw(raw.tracking)
)
}
/**
* @returns {TrackedChangeRawData}
*/
toRaw() {
return {
range: this.range.toRaw(),
tracking: this.tracking.toRaw(),
}
}
/**
* Checks whether the tracked change can be merged with another
* @param {TrackedChange} other
* @returns {boolean}
*/
canMerge(other) {
if (!(other instanceof TrackedChange)) {
return false
}
return (
this.tracking.type === other.tracking.type &&
this.tracking.userId === other.tracking.userId &&
this.range.touches(other.range) &&
this.range.canMerge(other.range)
)
}
/**
* Merges another tracked change into this, updating the range and tracking
* timestamp
* @param {TrackedChange} other
* @returns {TrackedChange}
*/
merge(other) {
if (!this.canMerge(other)) {
throw new Error('Cannot merge tracked changes')
}
return new TrackedChange(
this.range.merge(other.range),
new TrackingProps(
this.tracking.type,
this.tracking.userId,
this.tracking.ts.getTime() > other.tracking.ts.getTime()
? this.tracking.ts
: other.tracking.ts
)
)
}
}
module.exports = TrackedChange

View File

@@ -0,0 +1,276 @@
// @ts-check
const Range = require('../range')
const TrackedChange = require('./tracked_change')
const TrackingProps = require('../file_data/tracking_props')
/**
* @import { TrackingDirective, TrackedChangeRawData } from "../types"
*/
class TrackedChangeList {
/**
*
* @param {TrackedChange[]} trackedChanges
*/
constructor(trackedChanges) {
/**
* @type {TrackedChange[]}
*/
this._trackedChanges = trackedChanges
}
/**
*
* @param {TrackedChangeRawData[]} raw
* @returns {TrackedChangeList}
*/
static fromRaw(raw) {
return new TrackedChangeList(raw.map(TrackedChange.fromRaw))
}
/**
* Converts the tracked changes to a raw object
* @returns {TrackedChangeRawData[]}
*/
toRaw() {
return this._trackedChanges.map(change => change.toRaw())
}
get length() {
return this._trackedChanges.length
}
/**
* @returns {readonly TrackedChange[]}
*/
asSorted() {
// NOTE: Once all code dependent on this is typed, we can just return
// _trackedChanges.
return Array.from(this._trackedChanges)
}
/**
* Returns the tracked changes that are fully included in the range
* @param {Range} range
* @returns {TrackedChange[]}
*/
inRange(range) {
return this._trackedChanges.filter(change => range.contains(change.range))
}
/**
* Returns the tracking props for a given range.
* @param {Range} range
* @returns {TrackingProps | undefined}
*/
propsAtRange(range) {
return this._trackedChanges.find(change => change.range.contains(range))
?.tracking
}
/**
* Removes the tracked changes that are fully included in the range
* @param {Range} range
*/
removeInRange(range) {
this._trackedChanges = this._trackedChanges.filter(
change => !range.contains(change.range)
)
}
/**
* Adds a tracked change to the list
* @param {TrackedChange} trackedChange
*/
add(trackedChange) {
this._trackedChanges.push(trackedChange)
this._mergeRanges()
}
/**
* Collapses consecutive (and compatible) ranges
* @returns {void}
*/
_mergeRanges() {
if (this._trackedChanges.length < 2) {
return
}
// ranges are non-overlapping so we can sort based on their first indices
this._trackedChanges.sort((a, b) => a.range.start - b.range.start)
const newTrackedChanges = [this._trackedChanges[0]]
for (let i = 1; i < this._trackedChanges.length; i++) {
const last = newTrackedChanges[newTrackedChanges.length - 1]
const current = this._trackedChanges[i]
if (last.range.overlaps(current.range)) {
throw new Error('Ranges cannot overlap')
}
if (current.range.isEmpty()) {
throw new Error('Tracked changes range cannot be empty')
}
if (last.canMerge(current)) {
newTrackedChanges[newTrackedChanges.length - 1] = last.merge(current)
} else {
newTrackedChanges.push(current)
}
}
this._trackedChanges = newTrackedChanges
}
/**
*
* @param {number} cursor
* @param {string} insertedText
* @param {{tracking?: TrackingProps}} opts
*/
applyInsert(cursor, insertedText, opts = {}) {
const newTrackedChanges = []
for (const trackedChange of this._trackedChanges) {
if (
// If the cursor is before or at the insertion point, we need to move
// the tracked change
trackedChange.range.startIsAfter(cursor) ||
cursor === trackedChange.range.start
) {
newTrackedChanges.push(
new TrackedChange(
trackedChange.range.moveBy(insertedText.length),
trackedChange.tracking
)
)
} else if (cursor === trackedChange.range.end) {
// The insertion is at the end of the tracked change. So we don't need
// to move it.
newTrackedChanges.push(trackedChange)
} else if (trackedChange.range.containsCursor(cursor)) {
// If the tracked change is in the inserted text, we need to expand it
// split in three chunks. The middle one is added if it is a tracked insertion
const [firstRange, , thirdRange] = trackedChange.range.insertAt(
cursor,
insertedText.length
)
const firstPart = new TrackedChange(firstRange, trackedChange.tracking)
if (!firstPart.range.isEmpty()) {
newTrackedChanges.push(firstPart)
}
// second part will be added at the end if it is a tracked insertion
const thirdPart = new TrackedChange(thirdRange, trackedChange.tracking)
if (!thirdPart.range.isEmpty()) {
newTrackedChanges.push(thirdPart)
}
} else {
newTrackedChanges.push(trackedChange)
}
}
if (opts.tracking) {
// This is a new tracked change
const newTrackedChange = new TrackedChange(
new Range(cursor, insertedText.length),
opts.tracking
)
newTrackedChanges.push(newTrackedChange)
}
this._trackedChanges = newTrackedChanges
this._mergeRanges()
}
/**
*
* @param {number} cursor
* @param {number} length
*/
applyDelete(cursor, length) {
const newTrackedChanges = []
for (const trackedChange of this._trackedChanges) {
const deletedRange = new Range(cursor, length)
// If the tracked change is after the deletion, we need to move it
if (deletedRange.contains(trackedChange.range)) {
continue
} else if (deletedRange.overlaps(trackedChange.range)) {
const newRange = trackedChange.range.subtract(deletedRange)
if (!newRange.isEmpty()) {
newTrackedChanges.push(
new TrackedChange(newRange, trackedChange.tracking)
)
}
} else if (trackedChange.range.startIsAfter(cursor)) {
newTrackedChanges.push(
new TrackedChange(
trackedChange.range.moveBy(-length),
trackedChange.tracking
)
)
} else {
newTrackedChanges.push(trackedChange)
}
}
this._trackedChanges = newTrackedChanges
this._mergeRanges()
}
/**
* @param {number} cursor
* @param {number} length
* @param {{tracking?: TrackingDirective}} opts
*/
applyRetain(cursor, length, opts = {}) {
// If there's no tracking info, leave everything as-is
if (!opts.tracking) {
return
}
const newTrackedChanges = []
const retainedRange = new Range(cursor, length)
for (const trackedChange of this._trackedChanges) {
if (retainedRange.contains(trackedChange.range)) {
// Remove the range
} else if (retainedRange.overlaps(trackedChange.range)) {
if (trackedChange.range.contains(retainedRange)) {
const [leftRange, rightRange] = trackedChange.range.splitAt(cursor)
if (!leftRange.isEmpty()) {
newTrackedChanges.push(
new TrackedChange(leftRange, trackedChange.tracking)
)
}
if (!rightRange.isEmpty() && rightRange.length > length) {
newTrackedChanges.push(
new TrackedChange(
rightRange.moveBy(length).shrinkBy(length),
trackedChange.tracking
)
)
}
} else if (retainedRange.start <= trackedChange.range.start) {
// overlaps to the left
const [, reducedRange] = trackedChange.range.splitAt(
retainedRange.end
)
if (!reducedRange.isEmpty()) {
newTrackedChanges.push(
new TrackedChange(reducedRange, trackedChange.tracking)
)
}
} else {
// overlaps to the right
const [reducedRange] = trackedChange.range.splitAt(cursor)
if (!reducedRange.isEmpty()) {
newTrackedChanges.push(
new TrackedChange(reducedRange, trackedChange.tracking)
)
}
}
} else {
// keep the range
newTrackedChanges.push(trackedChange)
}
}
if (opts.tracking instanceof TrackingProps) {
// This is a new tracked change
const newTrackedChange = new TrackedChange(retainedRange, opts.tracking)
newTrackedChanges.push(newTrackedChange)
}
this._trackedChanges = newTrackedChanges
this._mergeRanges()
}
}
module.exports = TrackedChangeList

View File

@@ -0,0 +1,67 @@
// @ts-check
/**
* @import { TrackingPropsRawData, TrackingDirective } from "../types"
*/
class TrackingProps {
/**
*
* @param {'insert' | 'delete'} type
* @param {string} userId
* @param {Date} ts
*/
constructor(type, userId, ts) {
/**
* @readonly
* @type {'insert' | 'delete'}
*/
this.type = type
/**
* @readonly
* @type {string}
*/
this.userId = userId
/**
* @readonly
* @type {Date}
*/
this.ts = ts
}
/**
*
* @param {TrackingPropsRawData} raw
* @returns {TrackingProps}
*/
static fromRaw(raw) {
return new TrackingProps(raw.type, raw.userId, new Date(raw.ts))
}
/**
* @returns {TrackingPropsRawData}
*/
toRaw() {
return {
type: this.type,
userId: this.userId,
ts: this.ts.toISOString(),
}
}
/**
* @param {TrackingDirective} [other]
* @returns {boolean}
*/
equals(other) {
if (!(other instanceof TrackingProps)) {
return false
}
return (
this.type === other.type &&
this.userId === other.userId &&
this.ts.getTime() === other.ts.getTime()
)
}
}
module.exports = TrackingProps

View File

@@ -0,0 +1,390 @@
// @ts-check
'use strict'
const _ = require('lodash')
const assert = require('check-types').assert
const OError = require('@overleaf/o-error')
const pMap = require('p-map')
const File = require('./file')
const safePathname = require('./safe_pathname')
/**
* @import { RawFile, RawFileMap } from './types'
*
* @typedef {Record<String, File | null>} FileMapData
*/
class PathnameError extends OError {}
class NonUniquePathnameError extends PathnameError {
/**
* @param {string[]} pathnames
*/
constructor(pathnames) {
super('pathnames are not unique', { pathnames })
this.pathnames = pathnames
}
}
class BadPathnameError extends PathnameError {
/**
* @param {string} pathname
* @param {string} reason
*/
constructor(pathname, reason) {
if (pathname.length > 10) {
pathname = pathname.slice(0, 5) + '...' + pathname.slice(-5)
}
super('invalid pathname', { reason, pathname })
this.pathname = pathname
}
}
class PathnameConflictError extends PathnameError {
/**
* @param {string} pathname
*/
constructor(pathname) {
super('pathname conflicts with another file', { pathname })
this.pathname = pathname
}
}
class FileNotFoundError extends PathnameError {
/**
* @param {string} pathname
*/
constructor(pathname) {
super('file does not exist', { pathname })
this.pathname = pathname
}
}
/**
* A set of {@link File}s. Several properties are enforced on the pathnames:
*
* 1. File names and paths are case sensitive and can differ by case alone. This
* is consistent with most Linux file systems, but it is not consistent with
* Windows or OS X. Ideally, we would be case-preserving and case insensitive,
* like they are. And we used to be, but it caused too many incompatibilities
* with the old system, which was case sensitive. See
* https://github.com/overleaf/overleaf-ot-prototype/blob/
* 19ed046c09f5a4d14fa12b3ea813ce0d977af88a/editor/core/lib/file_map.js
* for an implementation of this map with those properties.
*
* 2. Uniqueness: No two pathnames are the same.
*
* 3. No type conflicts: A pathname cannot refer to both a file and a directory
* within the same snapshot. That is, you can't have pathnames `a` and `a/b` in
* the same file map; {@see FileMap#wouldConflict}.
*/
class FileMap {
static PathnameError = PathnameError
static NonUniquePathnameError = NonUniquePathnameError
static BadPathnameError = BadPathnameError
static PathnameConflictError = PathnameConflictError
static FileNotFoundError = FileNotFoundError
/**
* @param {Record<String, File | null>} files
*/
constructor(files) {
// create bare object for use as Map
// http://ryanmorr.com/true-hash-maps-in-javascript/
/** @type FileMapData */
this.files = Object.create(null)
_.assign(this.files, files)
checkPathnamesAreUnique(this.files)
checkPathnamesDoNotConflict(this)
}
/**
* @param {RawFileMap} raw
* @returns {FileMap}
*/
static fromRaw(raw) {
assert.object(raw, 'bad raw files')
return new FileMap(_.mapValues(raw, File.fromRaw))
}
/**
* Convert to raw object for serialization.
*
* @return {RawFileMap}
*/
toRaw() {
/**
* @param {File} file
* @return {RawFile}
*/
function fileToRaw(file) {
return file.toRaw()
}
// TODO(das7pad): refine types to enforce no nulls in FileMapData
// @ts-ignore
return _.mapValues(this.files, fileToRaw)
}
/**
* Create the given file.
*
* @param {string} pathname
* @param {File} file
*/
addFile(pathname, file) {
checkPathname(pathname)
assert.object(file, 'bad file')
// TODO(das7pad): make ignoredPathname argument fully optional
// @ts-ignore
checkNewPathnameDoesNotConflict(this, pathname)
addFile(this.files, pathname, file)
}
/**
* Remove the given file.
*
* @param {string} pathname
*/
removeFile(pathname) {
checkPathname(pathname)
const key = findPathnameKey(this.files, pathname)
if (!key) {
throw new FileMap.FileNotFoundError(pathname)
}
delete this.files[key]
}
/**
* Move or remove a file. If the origin file does not exist, or if the old
* and new paths are identical, this has no effect.
*
* @param {string} pathname
* @param {string} newPathname if a blank string, {@link FileMap#removeFile}
*/
moveFile(pathname, newPathname) {
if (pathname === newPathname) return
if (newPathname === '') return this.removeFile(pathname)
checkPathname(pathname)
checkPathname(newPathname)
checkNewPathnameDoesNotConflict(this, newPathname, pathname)
const key = findPathnameKey(this.files, pathname)
if (!key) {
throw new FileMap.FileNotFoundError(pathname)
}
const file = this.files[key]
delete this.files[key]
addFile(this.files, newPathname, file)
}
/**
* The number of files in the file map.
*
* @return {number}
*/
countFiles() {
return _.size(this.files)
}
/**
* Get a file by its pathname.
*
* @param {string} pathname
* @return {File | null | undefined}
*/
getFile(pathname) {
const key = findPathnameKey(this.files, pathname)
if (key) return this.files[key]
}
/**
* Whether the given pathname conflicts with any file in the map.
*
* Paths conflict in type if one path is a strict prefix of the other path. For
* example, 'a/b' conflicts with 'a', because in the former case 'a' is a
* folder, but in the latter case it is a file. Similarly, the pathname 'a/b/c'
* conflicts with 'a' and 'a/b', but it does not conflict with 'a/b/c', 'a/x',
* or 'a/b/x'. (In our case, identical paths don't conflict, because AddFile
* and MoveFile overwrite existing files.)
*
* @param {string} pathname
* @param {string?} ignoredPathname pretend this pathname does not exist
*/
wouldConflict(pathname, ignoredPathname) {
checkPathname(pathname)
assert.maybe.string(ignoredPathname)
const pathnames = this.getPathnames()
const dirname = pathname + '/'
// Check the filemap to see whether the supplied pathname is a
// parent of any entry, or any entry is a parent of the pathname.
for (let i = 0; i < pathnames.length; i++) {
// First check if pathname is a strict prefix of pathnames[i] (and that
// pathnames[i] is not ignored)
if (
pathnames[i].startsWith(dirname) &&
!pathnamesEqual(pathnames[i], ignoredPathname)
) {
return true
}
// Now make the reverse check, whether pathnames[i] is a strict prefix of
// pathname. To avoid expensive string concatenation on each pathname we
// first perform a partial check with a.startsWith(b), and then do the
// full check for a subsequent '/' if this passes. This saves about 25%
// of the runtime. Again only return a conflict if pathnames[i] is not
// ignored.
if (
pathname.startsWith(pathnames[i]) &&
pathname.length > pathnames[i].length &&
pathname[pathnames[i].length] === '/' &&
!pathnamesEqual(pathnames[i], ignoredPathname)
) {
return true
}
}
// No conflicts - after excluding ignoredPathname, there were no entries
// which were a strict prefix of pathname, and pathname was not a strict
// prefix of any entry.
return false
}
/** @see Snapshot#getFilePathnames */
getPathnames() {
return _.keys(this.files)
}
/**
* Map the files in this map to new values.
* @template T
* @param {(file: File | null, path: string) => T} iteratee
* @return {Record<String, T>}
*/
map(iteratee) {
return _.mapValues(this.files, iteratee)
}
/**
* Map the files in this map to new values asynchronously, with an optional
* limit on concurrency.
* @template T
* @param {(file: File | null | undefined, path: string, pathnames: string[]) => T} iteratee
* @param {number} [concurrency]
* @return {Promise<Record<String, T>>}
*/
async mapAsync(iteratee, concurrency) {
assert.maybe.number(concurrency, 'bad concurrency')
const pathnames = this.getPathnames()
const files = await pMap(
pathnames,
file => {
return iteratee(this.getFile(file), file, pathnames)
},
{ concurrency: concurrency || 1 }
)
return _.zipObject(pathnames, files)
}
}
/**
* @param {string} pathname0
* @param {string?} pathname1
* @returns {boolean}
*/
function pathnamesEqual(pathname0, pathname1) {
return pathname0 === pathname1
}
/**
* @param {FileMapData} files
* @returns {boolean}
*/
function pathnamesAreUnique(files) {
const keys = _.keys(files)
return _.uniqWith(keys, pathnamesEqual).length === keys.length
}
/**
* @param {FileMapData} files
*/
function checkPathnamesAreUnique(files) {
if (pathnamesAreUnique(files)) return
throw new FileMap.NonUniquePathnameError(_.keys(files))
}
/**
* @param {string} pathname
*/
function checkPathname(pathname) {
assert.nonEmptyString(pathname, 'bad pathname')
const [isClean, reason] = safePathname.isCleanDebug(pathname)
if (isClean) return
throw new FileMap.BadPathnameError(pathname, reason)
}
/**
* @param {FileMap} fileMap
* @param {string} pathname
* @param {string?} ignoredPathname
*/
function checkNewPathnameDoesNotConflict(fileMap, pathname, ignoredPathname) {
if (fileMap.wouldConflict(pathname, ignoredPathname)) {
throw new FileMap.PathnameConflictError(pathname)
}
}
/**
* @param {FileMap} fileMap
*/
function checkPathnamesDoNotConflict(fileMap) {
const pathnames = fileMap.getPathnames()
// check pathnames for validity first
pathnames.forEach(checkPathname)
// convert pathnames to candidate directory names
const dirnames = []
for (let i = 0; i < pathnames.length; i++) {
dirnames[i] = pathnames[i] + '/'
}
// sort in lexical order and check if one directory contains another
dirnames.sort()
for (let i = 0; i < dirnames.length - 1; i++) {
if (dirnames[i + 1].startsWith(dirnames[i])) {
// strip trailing slash to get original pathname
const conflictPathname = dirnames[i + 1].substr(0, -1)
throw new FileMap.PathnameConflictError(conflictPathname)
}
}
}
/**
* This function is somewhat vestigial: it was used when this map used
* case-insensitive pathname comparison. We could probably simplify some of the
* logic in the callers, but in the hope that we will one day return to
* case-insensitive semantics, we've just left things as-is for now.
*
* TODO(das7pad): In a followup, inline this function and make types stricter.
*
* @param {FileMapData} files
* @param {string} pathname
* @returns {string | undefined}
*/
function findPathnameKey(files, pathname) {
// we can check for the key without worrying about properties
// in the prototype because we are now using a bare object/
if (pathname in files) return pathname
}
/**
* @param {FileMapData} files
* @param {string} pathname
* @param {File?} file
*/
function addFile(files, pathname, file) {
const key = findPathnameKey(files, pathname)
if (key) delete files[key]
files[pathname] = file
}
module.exports = FileMap

View File

@@ -0,0 +1,135 @@
'use strict'
const assert = require('check-types').assert
const pMap = require('p-map')
const Change = require('./change')
const Snapshot = require('./snapshot')
/**
* @import { BlobStore } from "./types"
*/
class History {
/**
* @constructor
* @param {Snapshot} snapshot
* @param {Array.<Change>} changes
*
* @classdesc
* A History is a {@link Snapshot} and a sequence of {@link Change}s that can
* be applied to produce a new snapshot.
*/
constructor(snapshot, changes) {
assert.instance(snapshot, Snapshot, 'bad snapshot')
assert.maybe.array.of.instance(changes, Change, 'bad changes')
this.snapshot = snapshot
/** @type {Array<Change>} */
this.changes = changes || []
}
static fromRaw(raw) {
return new History(
Snapshot.fromRaw(raw.snapshot),
raw.changes.map(Change.fromRaw)
)
}
toRaw() {
function changeToRaw(change) {
return change.toRaw()
}
return {
snapshot: this.snapshot.toRaw(),
changes: this.changes.map(changeToRaw),
}
}
getSnapshot() {
return this.snapshot
}
getChanges() {
return this.changes
}
countChanges() {
return this.changes.length
}
/**
* Add changes to this history.
*
* @param {Array.<Change>} changes
*/
pushChanges(changes) {
this.changes.push.apply(this.changes, changes)
}
/**
* If this History references blob hashes, either in the Snapshot or the
* Changes, add them to the given set.
*
* @param {Set.<String>} blobHashes
*/
findBlobHashes(blobHashes) {
function findChangeBlobHashes(change) {
change.findBlobHashes(blobHashes)
}
this.snapshot.findBlobHashes(blobHashes)
this.changes.forEach(findChangeBlobHashes)
}
/**
* If this History contains any File objects, load them.
*
* @param {string} kind see {File#load}
* @param {BlobStore} blobStore
* @return {Promise<void>}
*/
async loadFiles(kind, blobStore) {
async function loadChangeFiles(changes) {
for (const change of changes) {
await change.loadFiles(kind, blobStore)
}
}
await Promise.all([
this.snapshot.loadFiles(kind, blobStore),
loadChangeFiles(this.changes),
])
}
/**
* Return a version of this history that is suitable for long term storage.
* This requires that we store the content of file objects in the provided
* blobStore.
*
* @param {BlobStore} blobStore
* @param {number} [concurrency] applies separately to files, changes and
* operations
* @return {Promise<import('overleaf-editor-core/lib/types').RawHistory>}
*/
async store(blobStore, concurrency) {
assert.maybe.number(concurrency, 'bad concurrency')
/**
* @param {Change} change
*/
async function storeChange(change) {
return await change.store(blobStore, concurrency)
}
const [rawSnapshot, rawChanges] = await Promise.all([
this.snapshot.store(blobStore, concurrency),
pMap(this.changes, storeChange, { concurrency: concurrency || 1 }),
])
return {
snapshot: rawSnapshot,
changes: rawChanges,
}
}
}
module.exports = History

View File

@@ -0,0 +1,99 @@
// @ts-check
'use strict'
const assert = require('check-types').assert
/**
* @import { RawLabel } from './types'
*/
/**
* @classdesc
* A user-configurable label that can be attached to a specific change. Labels
* are not versioned, and they are not stored alongside the Changes in Chunks.
* They are instead intended to provide external markers into the history of the
* project.
*/
class Label {
/**
* @constructor
* @param {string} text
* @param {number?} authorId
* @param {Date} timestamp
* @param {number} version
*/
constructor(text, authorId, timestamp, version) {
assert.string(text, 'bad text')
assert.maybe.integer(authorId, 'bad author id')
assert.date(timestamp, 'bad timestamp')
assert.integer(version, 'bad version')
this.text = text
this.authorId = authorId
this.timestamp = timestamp
this.version = version
}
/**
* Create a Label from its raw form.
*
* @param {RawLabel} raw
* @return {Label}
*/
static fromRaw(raw) {
return new Label(
raw.text,
raw.authorId,
new Date(raw.timestamp),
raw.version
)
}
/**
* Convert the Label to raw form for transmission.
*
* @return {RawLabel}
*/
toRaw() {
return {
text: this.text,
authorId: this.authorId,
timestamp: this.timestamp.toISOString(),
version: this.version,
}
}
/**
* @return {string}
*/
getText() {
return this.text
}
/**
* The ID of the author, if any. Note that we now require all saved versions to
* have an author, but this was not always the case, so we have to allow nulls
* here for historical reasons.
*
* @return {number | null | undefined}
*/
getAuthorId() {
return this.authorId
}
/**
* @return {Date}
*/
getTimestamp() {
return this.timestamp
}
/**
* @return {number}
*/
getVersion() {
return this.version
}
}
module.exports = Label

View File

@@ -0,0 +1,150 @@
// @ts-check
const core = require('../../index')
const Comment = require('../comment')
const Range = require('../range')
const EditOperation = require('./edit_operation')
/**
* @import DeleteCommentOperation from './delete_comment_operation'
* @import { CommentRawData, RawAddCommentOperation } from '../types'
* @import StringFileData from '../file_data/string_file_data'
*/
/**
* @extends EditOperation
*/
class AddCommentOperation extends EditOperation {
/**
* @param {string} commentId
* @param {ReadonlyArray<Range>} ranges
* @param {boolean} resolved
*/
constructor(commentId, ranges, resolved = false) {
super()
for (const range of ranges) {
if (range.isEmpty()) {
throw new Error("AddCommentOperation can't be built with empty ranges")
}
}
/** @readonly */
this.commentId = commentId
/** @readonly */
this.ranges = ranges
/** @readonly */
this.resolved = resolved
}
/**
*
* @returns {RawAddCommentOperation}
*/
toJSON() {
/** @type RawAddCommentOperation */
const raw = {
commentId: this.commentId,
ranges: this.ranges.map(range => range.toRaw()),
}
if (this.resolved) {
raw.resolved = true
}
return raw
}
/**
* @param {StringFileData} fileData
*/
apply(fileData) {
fileData.comments.add(
new Comment(this.commentId, this.ranges, this.resolved)
)
}
/**
* @inheritdoc
* @param {StringFileData} previousState
* @returns {EditOperation}
*/
invert(previousState) {
const comment = previousState.comments.getComment(this.commentId)
if (!comment) {
return new core.DeleteCommentOperation(this.commentId)
}
return new core.AddCommentOperation(
comment.id,
comment.ranges.slice(),
comment.resolved
)
}
/**
* @inheritdoc
* @param {EditOperation} other
* @returns {boolean}
*/
canBeComposedWith(other) {
return (
(other instanceof AddCommentOperation &&
this.commentId === other.commentId) ||
(other instanceof core.DeleteCommentOperation &&
this.commentId === other.commentId) ||
(other instanceof core.SetCommentStateOperation &&
this.commentId === other.commentId)
)
}
/**
* @inheritdoc
* @param {EditOperation} other
* @returns {EditOperation}
*/
compose(other) {
if (
other instanceof core.DeleteCommentOperation &&
other.commentId === this.commentId
) {
return other
}
if (
other instanceof AddCommentOperation &&
other.commentId === this.commentId
) {
return other
}
if (
other instanceof core.SetCommentStateOperation &&
other.commentId === this.commentId
) {
return new AddCommentOperation(
this.commentId,
this.ranges,
other.resolved
)
}
throw new Error(
`Trying to compose AddCommentOperation with ${other?.constructor?.name}.`
)
}
/**
* @inheritdoc
* @param {RawAddCommentOperation} raw
* @returns {AddCommentOperation}
*/
static fromJSON(raw) {
return new AddCommentOperation(
raw.commentId,
raw.ranges.map(Range.fromRaw),
raw.resolved ?? false
)
}
}
module.exports = AddCommentOperation

View File

@@ -0,0 +1,78 @@
'use strict'
const assert = require('check-types').assert
const File = require('../file')
const Operation = require('./')
/**
* Adds a new file to a project.
*/
class AddFileOperation extends Operation {
/**
* @param {string} pathname
* @param {File} file
*/
constructor(pathname, file) {
super()
assert.string(pathname, 'bad pathname')
assert.object(file, 'bad file')
this.pathname = pathname
this.file = file
}
/**
* @return {String}
*/
getPathname() {
return this.pathname
}
/**
* TODO
* @param {Object} raw
* @return {AddFileOperation}
*/
static fromRaw(raw) {
return new AddFileOperation(raw.pathname, File.fromRaw(raw.file))
}
/**
* @inheritdoc
*/
toRaw() {
return { pathname: this.pathname, file: this.file.toRaw() }
}
/**
* @inheritdoc
*/
getFile() {
return this.file
}
/** @inheritdoc */
findBlobHashes(blobHashes) {
const hash = this.file.getHash()
if (hash) blobHashes.add(hash)
}
/** @inheritdoc */
async loadFiles(kind, blobStore) {
return await this.file.load(kind, blobStore)
}
async store(blobStore) {
const rawFile = await this.file.store(blobStore)
return { pathname: this.pathname, file: rawFile }
}
/**
* @inheritdoc
*/
applyTo(snapshot) {
snapshot.addFile(this.pathname, this.file.clone())
}
}
module.exports = AddFileOperation

View File

@@ -0,0 +1,70 @@
// @ts-check
const core = require('../../index')
const EditNoOperation = require('./edit_no_operation')
const EditOperation = require('./edit_operation')
/**
* @import AddCommentOperation from './add_comment_operation'
* @import StringFileData from '../file_data/string_file_data'
* @import { RawDeleteCommentOperation } from '../types'
*/
/**
* @extends EditOperation
*/
class DeleteCommentOperation extends EditOperation {
/**
* @param {string} commentId
*/
constructor(commentId) {
super()
this.commentId = commentId
}
/**
* @inheritdoc
* @returns {RawDeleteCommentOperation}
*/
toJSON() {
return {
deleteComment: this.commentId,
}
}
/**
* @inheritdoc
* @param {StringFileData} fileData
*/
apply(fileData) {
fileData.comments.delete(this.commentId)
}
/**
* @inheritdoc
* @param {StringFileData} previousState
* @returns {AddCommentOperation | EditNoOperation}
*/
invert(previousState) {
const comment = previousState.comments.getComment(this.commentId)
if (!comment) {
return new EditNoOperation()
}
return new core.AddCommentOperation(
comment.id,
comment.ranges.slice(),
comment.resolved
)
}
/**
* @inheritdoc
* @param {RawDeleteCommentOperation} raw
* @returns {DeleteCommentOperation}
*/
static fromJSON(raw) {
return new DeleteCommentOperation(raw.deleteComment)
}
}
module.exports = DeleteCommentOperation

View File

@@ -0,0 +1,105 @@
// @ts-check
'use strict'
/**
* @import EditOperation from './edit_operation'
* @import { RawEditFileOperation } from '../types'
* @import Snapshot from "../snapshot"
*/
const Operation = require('./')
const EditOperationBuilder = require('./edit_operation_builder')
/**
* Edit a file in place. It is a wrapper around a single EditOperation.
*/
class EditFileOperation extends Operation {
/**
* @param {string} pathname
* @param {EditOperation} operation
*/
constructor(pathname, operation) {
super()
this.pathname = pathname
this.operation = operation
}
/**
* @inheritdoc
*/
toRaw() {
return {
pathname: this.pathname,
...this.operation.toJSON(),
}
}
/**
* Deserialize an EditFileOperation.
*
* @param {RawEditFileOperation} raw
* @return {EditFileOperation}
*/
static fromRaw(raw) {
return new EditFileOperation(
raw.pathname,
EditOperationBuilder.fromJSON(raw)
)
}
getPathname() {
return this.pathname
}
getOperation() {
return this.operation
}
/**
* @inheritdoc
* @param {Snapshot} snapshot
*/
applyTo(snapshot) {
// TODO(das7pad): can we teach typescript our polymorphism?
// @ts-ignore
snapshot.editFile(this.pathname, this.operation)
}
/**
* @inheritdoc
* @param {Operation} other
* @return {boolean}
*/
canBeComposedWithForUndo(other) {
return (
this.canBeComposedWith(other) &&
this.operation.canBeComposedWithForUndo(other.operation)
)
}
/**
* @inheritdoc
* @param {Operation} other
* @return {other is EditFileOperation}
*/
canBeComposedWith(other) {
// Ensure that other operation is an edit file operation
if (!(other instanceof EditFileOperation)) return false
// Ensure that both operations are editing the same file
if (this.getPathname() !== other.getPathname()) return false
return this.operation.canBeComposedWith(other.operation)
}
/**
* @inheritdoc
* @param {EditFileOperation} other
*/
compose(other) {
return new EditFileOperation(
this.pathname,
this.operation.compose(other.operation)
)
}
}
module.exports = EditFileOperation

View File

@@ -0,0 +1,29 @@
const EditOperation = require('./edit_operation')
/**
* @import { RawEditNoOperation } from '../types'
*/
class EditNoOperation extends EditOperation {
/**
* @inheritdoc
* @param {StringFileData} fileData
*/
apply(fileData) {}
/**
* @inheritdoc
* @returns {RawEditNoOperation}
*/
toJSON() {
return {
noOp: true,
}
}
static fromJSON() {
return new EditNoOperation()
}
}
module.exports = EditNoOperation

View File

@@ -0,0 +1,91 @@
// @ts-check
/**
* @import FileData from '../file_data'
* @import { RawEditOperation } from '../types'
*/
class EditOperation {
constructor() {
if (this.constructor === EditOperation) {
throw new Error('Cannot instantiate abstract class')
}
}
/**
* Converts operation into a JSON value.
* @returns {RawEditOperation}
*/
toJSON() {
throw new Error('Abstract method not implemented')
}
/**
* @abstract
* @param {FileData} fileData
*/
apply(fileData) {
throw new Error('Abstract method not implemented')
}
/**
* Determine the effect of this operation on the length of the text.
*
* NB: This is an Overleaf addition to the original OT system.
*
* @param {number} length of the original string; non-negative
* @return {number} length of the new string; non-negative
*/
applyToLength(length) {
return length
}
/**
* Computes the inverse of an operation. The inverse of an operation is the
* operation that reverts the effects of the operation, e.g. when you have an
* operation 'insert("hello "); skip(6);' then the inverse is 'remove("hello ");
* skip(6);'. The inverse should be used for implementing undo.
* @param {FileData} previousState
* @returns {EditOperation}
*/
invert(previousState) {
throw new Error('Abstract method not implemented')
}
/**
*
* @param {EditOperation} other
* @returns {boolean}
*/
canBeComposedWith(other) {
return false
}
/**
* When you use ctrl-z to undo your latest changes, you expect the program not
* to undo every single keystroke but to undo your last sentence you wrote at
* a stretch or the deletion you did by holding the backspace key down. This
* This can be implemented by composing operations on the undo stack. This
* method can help decide whether two operations should be composed. It
* returns true if the operations are consecutive insert operations or both
* operations delete text at the same position. You may want to include other
* factors like the time since the last change in your decision.
* @param {EditOperation} other
*/
canBeComposedWithForUndo(other) {
return false
}
/**
* Compose merges two consecutive operations into one operation, that
* preserves the changes of both. Or, in other words, for each input string S
* and a pair of consecutive operations A and B,
* apply(apply(S, A), B) = apply(S, compose(A, B)) must hold.
* @param {EditOperation} other
* @returns {EditOperation}
*/
compose(other) {
throw new Error('Abstract method not implemented')
}
}
module.exports = EditOperation

View File

@@ -0,0 +1,93 @@
// @ts-check
/**
* @import EditOperation from './edit_operation'
* @import { RawTextOperation, RawAddCommentOperation, RawEditOperation } from '../types'
* @import { RawDeleteCommentOperation, RawSetCommentStateOperation } from '../types'
*/
const DeleteCommentOperation = require('./delete_comment_operation')
const AddCommentOperation = require('./add_comment_operation')
const TextOperation = require('./text_operation')
const SetCommentStateOperation = require('./set_comment_state_operation')
const EditNoOperation = require('./edit_no_operation')
class EditOperationBuilder {
/**
*
* @param {RawEditOperation} raw
* @returns {EditOperation}
*/
static fromJSON(raw) {
if (isTextOperation(raw)) {
return TextOperation.fromJSON(raw)
}
if (isRawAddCommentOperation(raw)) {
return AddCommentOperation.fromJSON(raw)
}
if (isRawDeleteCommentOperation(raw)) {
return DeleteCommentOperation.fromJSON(raw)
}
if (isRawSetCommentStateOperation(raw)) {
return SetCommentStateOperation.fromJSON(raw)
}
if (isRawEditNoOperation(raw)) {
return EditNoOperation.fromJSON()
}
throw new Error('Unsupported operation in EditOperationBuilder.fromJSON')
}
}
/**
* @param {unknown} raw
* @returns {raw is RawTextOperation}
*/
function isTextOperation(raw) {
return raw !== null && typeof raw === 'object' && 'textOperation' in raw
}
/**
* @param {unknown} raw
* @returns {raw is RawAddCommentOperation}
*/
function isRawAddCommentOperation(raw) {
return (
raw !== null &&
typeof raw === 'object' &&
'commentId' in raw &&
'ranges' in raw &&
Array.isArray(raw.ranges)
)
}
/**
* @param {unknown} raw
* @returns {raw is RawDeleteCommentOperation}
*/
function isRawDeleteCommentOperation(raw) {
return raw !== null && typeof raw === 'object' && 'deleteComment' in raw
}
/**
* @param {unknown} raw
* @returns {raw is RawSetCommentStateOperation}
*/
function isRawSetCommentStateOperation(raw) {
return (
raw !== null &&
typeof raw === 'object' &&
'commentId' in raw &&
'resolved' in raw &&
typeof raw.resolved === 'boolean'
)
}
/**
* @param {unknown} raw
* @returns {raw is RawEditNoOperation}
*/
function isRawEditNoOperation(raw) {
return raw !== null && typeof raw === 'object' && 'noOp' in raw
}
module.exports = EditOperationBuilder

View File

@@ -0,0 +1,162 @@
// @ts-check
const core = require('../..')
const Comment = require('../comment')
const EditNoOperation = require('./edit_no_operation')
const TextOperation = require('./text_operation')
/**
* @import EditOperation from './edit_operation'
*/
class EditOperationTransformer {
/**
* Transform two edit operations against each other.
* @param {EditOperation} a
* @param {EditOperation} b
* @returns {[EditOperation, EditOperation]}
*/
static transform(a, b) {
const {
AddCommentOperation,
DeleteCommentOperation,
SetCommentStateOperation,
} = core
if (a instanceof EditNoOperation || b instanceof EditNoOperation) {
return [a, b]
}
const transformers = [
createTransformer(TextOperation, TextOperation, TextOperation.transform),
createTransformer(TextOperation, DeleteCommentOperation, noConflict),
createTransformer(TextOperation, SetCommentStateOperation, noConflict),
createTransformer(TextOperation, AddCommentOperation, (a, b) => {
// apply the text operation to the comment
const originalComment = new Comment(b.commentId, b.ranges, b.resolved)
const movedComment = originalComment.applyTextOperation(a, b.commentId)
return [
a,
new AddCommentOperation(
movedComment.id,
movedComment.ranges,
movedComment.resolved
),
]
}),
createTransformer(AddCommentOperation, AddCommentOperation, (a, b) => {
if (a.commentId === b.commentId) {
return [new EditNoOperation(), b]
}
return [a, b]
}),
createTransformer(AddCommentOperation, DeleteCommentOperation, (a, b) => {
if (a.commentId === b.commentId) {
// delete wins
return [new EditNoOperation(), b]
}
return [a, b]
}),
createTransformer(
AddCommentOperation,
SetCommentStateOperation,
(a, b) => {
if (a.commentId === b.commentId) {
const newA = new AddCommentOperation(
a.commentId,
a.ranges,
b.resolved
)
return [newA, b]
}
return [a, b]
}
),
createTransformer(
DeleteCommentOperation,
DeleteCommentOperation,
(a, b) => {
if (a.commentId === b.commentId) {
// if both operations delete the same comment, we can ignore both
return [new EditNoOperation(), new EditNoOperation()]
}
return [a, b]
}
),
createTransformer(
DeleteCommentOperation,
SetCommentStateOperation,
(a, b) => {
if (a.commentId === b.commentId) {
// delete wins
return [a, new EditNoOperation()]
}
return [a, b]
}
),
createTransformer(
SetCommentStateOperation,
SetCommentStateOperation,
(a, b) => {
if (a.commentId !== b.commentId) {
return [a, b]
}
if (a.resolved === b.resolved) {
return [new EditNoOperation(), new EditNoOperation()]
}
const shouldResolve = a.resolved && b.resolved
if (a.resolved === shouldResolve) {
return [a, new EditNoOperation()]
} else {
return [new EditNoOperation(), b]
}
}
),
]
for (const transformer of transformers) {
const result = transformer(a, b)
if (result) {
return result
}
}
throw new Error(
`Transform not implemented for ${a.constructor.name}${b.constructor.name}`
)
}
}
/**
* @template {EditOperation} X
* @template {EditOperation} Y
* @param {new(...args: any[]) => X} ClassA
* @param {new(...args: any[]) => Y} ClassB
* @param {(a: X, b: Y) => [EditOperation, EditOperation]} transformer
* @returns {(a: EditOperation, b: EditOperation) => [EditOperation, EditOperation] | false}
*/
function createTransformer(ClassA, ClassB, transformer) {
return (a, b) => {
if (a instanceof ClassA && b instanceof ClassB) {
return transformer(a, b)
}
if (b instanceof ClassA && a instanceof ClassB) {
const [bPrime, aPrime] = transformer(b, a)
return [aPrime, bPrime]
}
return false
}
}
/**
*
* @param {EditOperation} a
* @param {EditOperation} b
* @returns {[EditOperation, EditOperation]}
*/
function noConflict(a, b) {
return [a, b]
}
module.exports = EditOperationTransformer

View File

@@ -0,0 +1,462 @@
'use strict'
const _ = require('lodash')
const assert = require('check-types').assert
const EditOperationTransformer = require('./edit_operation_transformer')
// Dependencies are loaded at the bottom of the file to mitigate circular
// dependency
let NoOperation = null
let AddFileOperation = null
let MoveFileOperation = null
let EditFileOperation = null
let SetFileMetadataOperation = null
/**
* @import { BlobStore } from "../types"
* @import Snapshot from "../snapshot"
*/
/**
* An `Operation` changes a `Snapshot` when it is applied. See the
* {@tutorial OT} tutorial for background.
*/
class Operation {
/**
* Deserialize an Operation.
*
* @param {Object} raw
* @return {Operation} one of the subclasses
*/
static fromRaw(raw) {
if ('file' in raw) {
return AddFileOperation.fromRaw(raw)
}
if (
'textOperation' in raw ||
'commentId' in raw ||
'deleteComment' in raw
) {
return EditFileOperation.fromRaw(raw)
}
if ('newPathname' in raw) {
return new MoveFileOperation(raw.pathname, raw.newPathname)
}
if ('metadata' in raw) {
return new SetFileMetadataOperation(raw.pathname, raw.metadata)
}
if (_.isEmpty(raw)) {
return new NoOperation()
}
throw new Error('invalid raw operation ' + JSON.stringify(raw))
}
/**
* Serialize an Operation.
*
* @return {Object}
*/
toRaw() {
return {}
}
/**
* Whether this operation does nothing when applied.
*
* @return {Boolean}
*/
isNoOp() {
return false
}
/**
* If this Operation references blob hashes, add them to the given Set.
*
* @param {Set.<String>} blobHashes
*/
findBlobHashes(blobHashes) {}
/**
* If this operation references any files, load the files.
*
* @param {string} kind see {File#load}
* @param {BlobStore} blobStore
* @return {Promise<void>}
*/
async loadFiles(kind, blobStore) {}
/**
* Return a version of this operation that is suitable for long term storage.
* In most cases, we just need to convert the operation to raw form, but if
* the operation involves File objects, we may need to store their content.
*
* @param {BlobStore} blobStore
* @return {Promise.<Object>}
*/
async store(blobStore) {
return this.toRaw()
}
/**
* Apply this Operation to a snapshot.
*
* The snapshot is modified in place.
*
* @param {Snapshot} snapshot
*/
applyTo(snapshot) {
assert.object(snapshot, 'bad snapshot')
}
/**
* Whether this operation can be composed with another operation to produce a
* single operation of the same type as this one, while keeping the composed
* operation small and logical enough to be used in the undo stack.
*
* @param {Operation} other
* @return {Boolean}
*/
canBeComposedWithForUndo(other) {
return false
}
/**
* Whether this operation can be composed with another operation to produce a
* single operation of the same type as this one.
*
* TODO Moves can be composed. For example, if you rename a to b and then decide
* shortly after that actually you want to call it c, we could compose the two
* to get a -> c). Edits can also be composed --- see rules in TextOperation.
* We also need to consider the Change --- we will need to consider both time
* and author(s) when composing changes. I guess that AddFile can also be
* composed in some cases --- if you upload a file and then decide it was the
* wrong one and upload a new one, we could drop the one in the middle, but
* that seems like a pretty rare case.
*
* @param {Operation} other
* @return {Boolean}
*/
canBeComposedWith(other) {
return false
}
/**
* Compose this operation with another operation to produce a single operation
* of the same type as this one.
*
* @param {Operation} other
* @return {Operation}
*/
compose(other) {
throw new Error('not implemented')
}
/**
* Transform takes two operations A and B that happened concurrently and
* produces two operations A' and B' (in an array) such that
* `apply(apply(S, A), B') = apply(apply(S, B), A')`.
*
* That is, if one client applies A and then B', they get the same result as
* another client who applies B and then A'.
*
* @param {Operation} a
* @param {Operation} b
* @return {Operation[]} operations `[a', b']`
*/
static transform(a, b) {
if (a.isNoOp() || b.isNoOp()) return [a, b]
function transpose(transformer) {
return transformer(b, a).reverse()
}
const bIsAddFile = b instanceof AddFileOperation
const bIsEditFile = b instanceof EditFileOperation
const bIsMoveFile = b instanceof MoveFileOperation
const bIsSetFileMetadata = b instanceof SetFileMetadataOperation
if (a instanceof AddFileOperation) {
if (bIsAddFile) return transformAddFileAddFile(a, b)
if (bIsMoveFile) return transformAddFileMoveFile(a, b)
if (bIsEditFile) return transformAddFileEditFile(a, b)
if (bIsSetFileMetadata) return transformAddFileSetFileMetadata(a, b)
throw new Error('bad op b')
}
if (a instanceof MoveFileOperation) {
if (bIsAddFile) return transpose(transformAddFileMoveFile)
if (bIsMoveFile) return transformMoveFileMoveFile(a, b)
if (bIsEditFile) return transformMoveFileEditFile(a, b)
if (bIsSetFileMetadata) return transformMoveFileSetFileMetadata(a, b)
throw new Error('bad op b')
}
if (a instanceof EditFileOperation) {
if (bIsAddFile) return transpose(transformAddFileEditFile)
if (bIsMoveFile) return transpose(transformMoveFileEditFile)
if (bIsEditFile) return transformEditFileEditFile(a, b)
if (bIsSetFileMetadata) return transformEditFileSetFileMetadata(a, b)
throw new Error('bad op b')
}
if (a instanceof SetFileMetadataOperation) {
if (bIsAddFile) return transpose(transformAddFileSetFileMetadata)
if (bIsMoveFile) return transpose(transformMoveFileSetFileMetadata)
if (bIsEditFile) return transpose(transformEditFileSetFileMetadata)
if (bIsSetFileMetadata) return transformSetFileMetadatas(a, b)
throw new Error('bad op b')
}
throw new Error('bad op a')
}
/**
* Transform each operation in `a` by each operation in `b` and save the primed
* operations in place.
*
* @param {Array.<Operation>} as - modified in place
* @param {Array.<Operation>} bs - modified in place
*/
static transformMultiple(as, bs) {
for (let i = 0; i < as.length; ++i) {
for (let j = 0; j < bs.length; ++j) {
const primes = Operation.transform(as[i], bs[j])
as[i] = primes[0]
bs[j] = primes[1]
}
}
}
static addFile(pathname, file) {
return new AddFileOperation(pathname, file)
}
static editFile(pathname, editOperation) {
return new EditFileOperation(pathname, editOperation)
}
static moveFile(pathname, newPathname) {
return new MoveFileOperation(pathname, newPathname)
}
static removeFile(pathname) {
return new MoveFileOperation(pathname, '')
}
static setFileMetadata(pathname, metadata) {
return new SetFileMetadataOperation(pathname, metadata)
}
}
//
// Transform
//
// The way to read these transform functions is that
// 1. return_value[0] is the op to be applied after arguments[1], and
// 2. return_value[1] is the op to be applied after arguments[0],
// in order to arrive at the same project state.
//
function transformAddFileAddFile(add1, add2) {
if (add1.getPathname() === add2.getPathname()) {
return [Operation.NO_OP, add2] // add2 wins
}
return [add1, add2]
}
function transformAddFileMoveFile(add, move) {
function relocateAddFile() {
return new AddFileOperation(move.getNewPathname(), add.getFile().clone())
}
if (add.getPathname() === move.getPathname()) {
if (move.isRemoveFile()) {
return [add, Operation.NO_OP]
}
return [
relocateAddFile(),
new MoveFileOperation(add.getPathname(), move.getNewPathname()),
]
}
if (add.getPathname() === move.getNewPathname()) {
return [relocateAddFile(), new MoveFileOperation(move.getPathname(), '')]
}
return [add, move]
}
function transformAddFileEditFile(add, edit) {
if (add.getPathname() === edit.getPathname()) {
return [add, Operation.NO_OP] // the add wins
}
return [add, edit]
}
function transformAddFileSetFileMetadata(add, set) {
if (add.getPathname() === set.getPathname()) {
const newFile = add.getFile().clone()
newFile.setMetadata(set.getMetadata())
return [new AddFileOperation(add.getPathname(), newFile), set]
}
return [add, set]
}
//
// This is one of the trickier ones. There are 15 possible equivalence
// relationships between our four variables:
//
// path1, newPath1, path2, newPath2 --- "same move" (all equal)
//
// path1, newPath1, path2 | newPath2 --- "no-ops" (1)
// path1, newPath1, newPath2 | path2 --- "no-ops" (1)
// path1, path2, newPath2 | newPath1 --- "no-ops" (2)
// newPath1, path2, newPath2 | path1 --- "no-ops" (2)
//
// path1, newPath1 | path2, newPath2 --- "no-ops" (1 and 2)
// path1, path2 | newPath1, newPath2 --- "same move"
// path1, newPath2 | newPath1, path2 --- "opposite moves"
//
// path1, newPath1 | path2 | newPath2 --- "no-ops" (1)
// path1, path2 | newPath1 | newPath2 --- "divergent moves"
// path1, newPath2 | newPath1 | path2 --- "transitive move"
// newPath1, path2 | path1 | newPath2 --- "transitive move"
// newPath1, newPath2 | path1 | path2 --- "convergent move"
// path2, newPath2 | path1 | newPath1 --- "no-ops" (2)
//
// path1 | newPath1 | path2 | newPath2 --- "no conflict"
//
function transformMoveFileMoveFile(move1, move2) {
const path1 = move1.getPathname()
const path2 = move2.getPathname()
const newPath1 = move1.getNewPathname()
const newPath2 = move2.getNewPathname()
// the same move
if (path1 === path2 && newPath1 === newPath2) {
return [Operation.NO_OP, Operation.NO_OP]
}
// no-ops
if (path1 === newPath1 && path2 === newPath2) {
return [Operation.NO_OP, Operation.NO_OP]
}
if (path1 === newPath1) {
return [Operation.NO_OP, move2]
}
if (path2 === newPath2) {
return [move1, Operation.NO_OP]
}
// opposite moves (foo -> bar, bar -> foo)
if (path1 === newPath2 && path2 === newPath1) {
// We can't handle this very well: if we wanted move2 (say) to win, move2'
// would have to be addFile(foo) with the content of bar, but we don't have
// the content of bar available here. So, we just destroy both files.
return [Operation.removeFile(path1), Operation.removeFile(path2)]
}
// divergent moves (foo -> bar, foo -> baz); convention: move2 wins
if (path1 === path2 && newPath1 !== newPath2) {
return [Operation.NO_OP, Operation.moveFile(newPath1, newPath2)]
}
// convergent move (foo -> baz, bar -> baz); convention: move2 wins
if (newPath1 === newPath2 && path1 !== path2) {
return [Operation.removeFile(path1), move2]
}
// transitive move:
// 1: foo -> baz, 2: bar -> foo (result: bar -> baz) or
// 1: foo -> bar, 2: bar -> baz (result: foo -> baz)
if (path1 === newPath2 && newPath1 !== path2) {
return [
Operation.moveFile(newPath2, newPath1),
Operation.moveFile(path2, newPath1),
]
}
if (newPath1 === path2 && path1 !== newPath2) {
return [
Operation.moveFile(path1, newPath2),
Operation.moveFile(newPath1, newPath2),
]
}
// no conflict
return [move1, move2]
}
function transformMoveFileEditFile(move, edit) {
if (move.getPathname() === edit.getPathname()) {
if (move.isRemoveFile()) {
// let the remove win
return [move, Operation.NO_OP]
}
return [
move,
Operation.editFile(move.getNewPathname(), edit.getOperation()),
]
}
if (move.getNewPathname() === edit.getPathname()) {
// let the move win
return [move, Operation.NO_OP]
}
return [move, edit]
}
function transformMoveFileSetFileMetadata(move, set) {
if (move.getPathname() === set.getPathname()) {
return [
move,
Operation.setFileMetadata(move.getNewPathname(), set.getMetadata()),
]
}
// A: mv foo -> bar
// B: set bar.x
//
// A': mv foo -> bar
// B': nothing
if (move.getNewPathname() === set.getPathname()) {
return [move, Operation.NO_OP] // let the move win
}
return [move, set]
}
function transformEditFileEditFile(edit1, edit2) {
if (edit1.getPathname() === edit2.getPathname()) {
const primeOps = EditOperationTransformer.transform(
edit1.getOperation(),
edit2.getOperation()
)
return [
Operation.editFile(edit1.getPathname(), primeOps[0]),
Operation.editFile(edit2.getPathname(), primeOps[1]),
]
}
return [edit1, edit2]
}
function transformEditFileSetFileMetadata(edit, set) {
// There is no conflict.
return [edit, set]
}
function transformSetFileMetadatas(set1, set2) {
if (set1.getPathname() === set2.getPathname()) {
return [Operation.NO_OP, set2] // set2 wins
}
return [set1, set2]
}
module.exports = Operation
// Work around circular import
NoOperation = require('./no_operation')
AddFileOperation = require('./add_file_operation')
MoveFileOperation = require('./move_file_operation')
EditFileOperation = require('./edit_file_operation')
SetFileMetadataOperation = require('./set_file_metadata_operation')
Operation.NO_OP = new NoOperation()

View File

@@ -0,0 +1,54 @@
'use strict'
const Operation = require('./')
/**
* Moves or removes a file from a project.
*/
class MoveFileOperation extends Operation {
/**
* @param {string} pathname
* @param {string} newPathname
*/
constructor(pathname, newPathname) {
super()
this.pathname = pathname
this.newPathname = newPathname
}
/**
* @inheritdoc
*/
toRaw() {
return {
pathname: this.pathname,
newPathname: this.newPathname,
}
}
getPathname() {
return this.pathname
}
getNewPathname() {
return this.newPathname
}
/**
* Whether this operation is a MoveFile operation that deletes the file.
*
* @return {boolean}
*/
isRemoveFile() {
return this.getNewPathname() === ''
}
/**
* @inheritdoc
*/
applyTo(snapshot) {
snapshot.moveFile(this.getPathname(), this.getNewPathname())
}
}
module.exports = MoveFileOperation

View File

@@ -0,0 +1,20 @@
'use strict'
const Operation = require('./')
/**
* An explicit no-operation.
*
* There are several no-ops, such as moving a file to itself, but it's useful
* to have a generic no-op as well.
*/
class NoOperation extends Operation {
/**
* @inheritdoc
*/
isNoOp() {
return true
}
}
module.exports = NoOperation

View File

@@ -0,0 +1,457 @@
// @ts-check
const { containsNonBmpChars } = require('../util')
const {
ApplyError,
InvalidInsertionError,
UnprocessableError,
} = require('../errors')
const ClearTrackingProps = require('../file_data/clear_tracking_props')
const TrackingProps = require('../file_data/tracking_props')
/**
* @import { RawScanOp, RawInsertOp, RawRetainOp, RawRemoveOp, TrackingDirective } from '../types'
*
* @typedef {{ length: number, inputCursor: number, readonly inputLength: number}} LengthApplyContext
*/
class ScanOp {
constructor() {
if (this.constructor === ScanOp) {
throw new Error('Cannot instantiate abstract class')
}
}
/**
* Applies an operation to a length
* @param {LengthApplyContext} current
* @returns {LengthApplyContext}
*/
applyToLength(current) {
throw new Error('abstract method')
}
/**
* @returns {RawScanOp}
*/
toJSON() {
throw new Error('abstract method')
}
/**
* @param {RawScanOp} raw
* @returns {ScanOp}
*/
static fromJSON(raw) {
if (isRetain(raw)) {
return RetainOp.fromJSON(raw)
} else if (isInsert(raw)) {
return InsertOp.fromJSON(raw)
} else if (isRemove(raw)) {
return RemoveOp.fromJSON(raw)
}
throw new UnprocessableError(`Invalid ScanOp ${JSON.stringify(raw)}`)
}
/**
* Tests whether two ScanOps are equal
* @param {ScanOp} _other
* @returns {boolean}
*/
equals(_other) {
return false
}
/**
* Tests whether two ScanOps can be merged into a single operation
* @param {ScanOp} other
* @returns
*/
canMergeWith(other) {
return false
}
/**
* Merge two ScanOps into a single operation
* @param {ScanOp} _other
* @returns {void}
*/
mergeWith(_other) {
throw new Error('abstract method')
}
toString() {
'ScanOp'
}
}
class InsertOp extends ScanOp {
/**
*
* @param {string} insertion
* @param {TrackingProps | undefined} tracking
* @param {string[] | undefined} commentIds
*/
constructor(insertion, tracking = undefined, commentIds = undefined) {
super()
if (typeof insertion !== 'string') {
throw new InvalidInsertionError('insertion must be a string')
}
if (containsNonBmpChars(insertion)) {
throw new InvalidInsertionError('insertion contains non-BMP characters')
}
/** @type {string} */
this.insertion = insertion
/** @type {TrackingProps | undefined} */
this.tracking = tracking
/** @type {string[] | undefined} */
this.commentIds = commentIds
}
/**
*
* @param {RawInsertOp} op
* @returns {InsertOp}
*/
static fromJSON(op) {
if (typeof op === 'string') {
return new InsertOp(op)
}
// It must be an object with an 'i' property.
if (typeof op.i !== 'string') {
throw new InvalidInsertionError(
'insert operation must have a string property'
)
}
return new InsertOp(
op.i,
op.tracking && TrackingProps.fromRaw(op.tracking),
op.commentIds
)
}
/**
* @inheritdoc
* @param {LengthApplyContext} current
* @returns {LengthApplyContext}
*/
applyToLength(current) {
current.length += this.insertion.length
return current
}
/** @inheritdoc
* @param {ScanOp} other
*/
equals(other) {
if (!(other instanceof InsertOp)) {
return false
}
if (this.insertion !== other.insertion) {
return false
}
if (this.tracking) {
if (!this.tracking.equals(other.tracking)) {
return false
}
} else if (other.tracking) {
return false
}
if (this.commentIds) {
return (
this.commentIds.length === other.commentIds?.length &&
this.commentIds.every(id => other.commentIds?.includes(id))
)
}
return !other.commentIds
}
/**
* @param {ScanOp} other
* @return {other is InsertOp}
*/
canMergeWith(other) {
if (!(other instanceof InsertOp)) {
return false
}
if (this.tracking) {
if (!this.tracking.equals(other.tracking)) {
return false
}
} else if (other.tracking) {
return false
}
if (this.commentIds) {
return (
this.commentIds.length === other.commentIds?.length &&
this.commentIds.every(id => other.commentIds?.includes(id))
)
}
return !other.commentIds
}
/**
* @param {ScanOp} other
*/
mergeWith(other) {
if (!this.canMergeWith(other)) {
throw new Error('Cannot merge with incompatible operation')
}
this.insertion += other.insertion
// We already have the same tracking info and commentIds
}
/**
* @returns {RawInsertOp}
*/
toJSON() {
if (!this.tracking && !this.commentIds) {
return this.insertion
}
/** @type RawInsertOp */
const obj = { i: this.insertion }
if (this.tracking) {
obj.tracking = this.tracking.toRaw()
}
if (this.commentIds) {
obj.commentIds = this.commentIds
}
return obj
}
toString() {
return `insert '${this.insertion}'`
}
}
class RetainOp extends ScanOp {
/**
* @param {number} length
* @param {TrackingDirective | undefined} tracking
*/
constructor(length, tracking = undefined) {
super()
if (length < 0) {
throw new Error('length must be non-negative')
}
/** @type {number} */
this.length = length
/** @type {TrackingDirective | undefined} */
this.tracking = tracking
}
/**
* @inheritdoc
* @param {LengthApplyContext} current
* @returns {LengthApplyContext}
*/
applyToLength(current) {
if (current.inputCursor + this.length > current.inputLength) {
throw new ApplyError(
"Operation can't retain more chars than are left in the string.",
this.toJSON(),
current.inputLength
)
}
current.length += this.length
current.inputCursor += this.length
return current
}
/**
*
* @param {RawRetainOp} op
* @returns {RetainOp}
*/
static fromJSON(op) {
if (typeof op === 'number') {
return new RetainOp(op)
}
// It must be an object with a 'r' property.
if (typeof op.r !== 'number') {
throw new Error('retain operation must have a number property')
}
if (op.tracking) {
const tracking =
op.tracking.type === 'none'
? new ClearTrackingProps()
: TrackingProps.fromRaw(op.tracking)
return new RetainOp(op.r, tracking)
}
return new RetainOp(op.r)
}
/** @inheritdoc
* @param {ScanOp} other
*/
equals(other) {
if (!(other instanceof RetainOp)) {
return false
}
if (this.length !== other.length) {
return false
}
if (this.tracking) {
return this.tracking.equals(other.tracking)
}
return !other.tracking
}
/**
* @param {ScanOp} other
* @return {other is RetainOp}
*/
canMergeWith(other) {
if (!(other instanceof RetainOp)) {
return false
}
if (this.tracking) {
return this.tracking.equals(other.tracking)
}
return !other.tracking
}
/**
* @param {ScanOp} other
*/
mergeWith(other) {
if (!this.canMergeWith(other)) {
throw new Error('Cannot merge with incompatible operation')
}
this.length += other.length
}
/**
* @returns {RawRetainOp}
*/
toJSON() {
if (!this.tracking) {
return this.length
}
return { r: this.length, tracking: this.tracking.toRaw() }
}
toString() {
return `retain ${this.length}`
}
}
class RemoveOp extends ScanOp {
/**
* @param {number} length
*/
constructor(length) {
super()
if (length < 0) {
throw new Error('length must be non-negative')
}
/** @type {number} */
this.length = length
}
/**
* @inheritdoc
* @param {LengthApplyContext} current
* @returns {LengthApplyContext}
*/
applyToLength(current) {
current.inputCursor += this.length
return current
}
/**
*
* @param {RawRemoveOp} op
* @returns {RemoveOp}
*/
static fromJSON(op) {
if (typeof op !== 'number' || op > 0) {
throw new Error('delete operation must be a negative number')
}
return new RemoveOp(-op)
}
/**
* @inheritdoc
* @param {ScanOp} other
* @return {boolean}
*/
equals(other) {
if (!(other instanceof RemoveOp)) {
return false
}
return this.length === other.length
}
/**
* @param {ScanOp} other
* @return {other is RemoveOp}
*/
canMergeWith(other) {
return other instanceof RemoveOp
}
/**
* @param {ScanOp} other
*/
mergeWith(other) {
if (!this.canMergeWith(other)) {
throw new Error('Cannot merge with incompatible operation')
}
this.length += other.length
}
/**
* @returns {RawRemoveOp}
*/
toJSON() {
return -this.length
}
toString() {
return `remove ${this.length}`
}
}
/**
* @param {RawScanOp} op
* @returns {op is RawRetainOp}
*/
function isRetain(op) {
return (
(typeof op === 'number' && op > 0) ||
(typeof op === 'object' &&
'r' in op &&
typeof op.r === 'number' &&
op.r > 0)
)
}
/**
* @param {RawScanOp} op
* @returns {op is RawInsertOp}
*/
function isInsert(op) {
return (
typeof op === 'string' ||
(typeof op === 'object' && 'i' in op && typeof op.i === 'string')
)
}
/**
* @param {RawScanOp} op
* @returns {op is RawRemoveOp}
*/
function isRemove(op) {
return typeof op === 'number' && op < 0
}
module.exports = {
ScanOp,
InsertOp,
RetainOp,
RemoveOp,
isRetain,
isInsert,
isRemove,
}

View File

@@ -0,0 +1,112 @@
// @ts-check
const core = require('../../index')
const Comment = require('../comment')
const EditNoOperation = require('./edit_no_operation')
const EditOperation = require('./edit_operation')
/**
* @import DeleteCommentOperation from './delete_comment_operation'
* @import { CommentRawData } from '../types'
* @import { RawSetCommentStateOperation } from '../types'
* @import StringFileData from '../file_data/string_file_data'
*/
/**
* @extends EditOperation
*/
class SetCommentStateOperation extends EditOperation {
/**
* @param {string} commentId
* @param {boolean} resolved
*/
constructor(commentId, resolved) {
super()
this.commentId = commentId
this.resolved = resolved
}
/**
*
* @returns {RawSetCommentStateOperation}
*/
toJSON() {
return {
resolved: this.resolved,
commentId: this.commentId,
}
}
/**
* @param {StringFileData} fileData
*/
apply(fileData) {
const comment = fileData.comments.getComment(this.commentId)
if (comment) {
const newComment = new Comment(comment.id, comment.ranges, this.resolved)
fileData.comments.add(newComment)
}
}
/**
* @param {StringFileData} previousState
* @returns {SetCommentStateOperation | EditNoOperation}
*/
invert(previousState) {
const comment = previousState.comments.getComment(this.commentId)
if (!comment) {
return new EditNoOperation()
}
return new SetCommentStateOperation(this.commentId, comment.resolved)
}
/**
* @inheritdoc
* @param {EditOperation} other
* @returns {boolean}
*/
canBeComposedWith(other) {
return (
(other instanceof SetCommentStateOperation &&
this.commentId === other.commentId) ||
(other instanceof core.DeleteCommentOperation &&
this.commentId === other.commentId)
)
}
/**
* @inheritdoc
* @param {EditOperation} other
* @returns {SetCommentStateOperation | core.DeleteCommentOperation}
*/
compose(other) {
if (
other instanceof SetCommentStateOperation &&
other.commentId === this.commentId
) {
return other
}
if (
other instanceof core.DeleteCommentOperation &&
other.commentId === this.commentId
) {
return other
}
throw new Error(
`Trying to compose SetCommentStateOperation with ${other?.constructor?.name}.`
)
}
/**
* @inheritdoc
* @param {RawSetCommentStateOperation} raw
* @returns {SetCommentStateOperation}
*/
static fromJSON(raw) {
return new SetCommentStateOperation(raw.commentId, raw.resolved)
}
}
module.exports = SetCommentStateOperation

View File

@@ -0,0 +1,53 @@
'use strict'
const _ = require('lodash')
const assert = require('check-types').assert
const Operation = require('./')
/**
* Moves or removes a file from a project.
*/
class SetFileMetadataOperation extends Operation {
/**
* @param {string} pathname
* @param {Object} metadata
*/
constructor(pathname, metadata) {
super()
assert.string(pathname, 'SetFileMetadataOperation: bad pathname')
assert.object(metadata, 'SetFileMetadataOperation: bad metadata')
this.pathname = pathname
this.metadata = metadata
}
/**
* @inheritdoc
*/
toRaw() {
return {
pathname: this.pathname,
metadata: _.cloneDeep(this.metadata),
}
}
getPathname() {
return this.pathname
}
getMetadata() {
return this.metadata
}
/**
* @inheritdoc
*/
applyTo(snapshot) {
const file = snapshot.getFile(this.pathname)
if (!file) return
file.setMetadata(this.metadata)
}
}
module.exports = SetFileMetadataOperation

View File

@@ -0,0 +1,929 @@
// @ts-check
/**
* The text operation from OT.js with some minor cosmetic changes.
*
* Specifically, this is based on
* https://github.com/Operational-Transformation/ot.js/
* blob/298825f58fb51fefb352e7df5ddbc668f4d5646f/lib/text-operation.js
* from 18 Mar 2013.
*/
'use strict'
const containsNonBmpChars = require('../util').containsNonBmpChars
const EditOperation = require('./edit_operation')
const {
RetainOp,
InsertOp,
RemoveOp,
isRetain,
isInsert,
isRemove,
} = require('./scan_op')
const {
UnprocessableError,
ApplyError,
InvalidInsertionError,
TooLongError,
} = require('../errors')
const Range = require('../range')
const ClearTrackingProps = require('../file_data/clear_tracking_props')
const TrackingProps = require('../file_data/tracking_props')
/**
* @import StringFileData from '../file_data/string_file_data'
* @import { RawTextOperation, TrackingDirective } from '../types'
* @import { ScanOp } from '../operation/scan_op'
* @import TrackedChangeList from '../file_data/tracked_change_list'
*
* @typedef {{tracking?: TrackingProps, commentIds?: string[]}} InsertOptions
*/
/**
* Create an empty text operation.
* @extends EditOperation
*/
class TextOperation extends EditOperation {
/**
* Length of the longest file that we'll attempt to edit, in characters.
*
* @type {number}
*/
static MAX_STRING_LENGTH = 2 * Math.pow(1024, 2)
static UnprocessableError = UnprocessableError
static ApplyError = ApplyError
static InvalidInsertionError = InvalidInsertionError
static TooLongError = TooLongError
constructor() {
super()
/**
* When an operation is applied to an input string, you can think of this as
* if an imaginary cursor runs over the entire string and skips over some
* parts, removes some parts and inserts characters at some positions. These
* actions (skip/remove/insert) are stored as an array in the "ops" property.
* @type {ScanOp[]}
*/
this.ops = []
/**
* An operation's baseLength is the length of every string the operation
* can be applied to.
*/
this.baseLength = 0
/**
* The targetLength is the length of every string that results from applying
* the operation on a valid input string.
*/
this.targetLength = 0
/**
* The expected content hash after this operation is applied
*
* @type {string | null}
*/
this.contentHash = null
}
/**
* @param {TextOperation} other
* @return {boolean}
*/
equals(other) {
if (this.baseLength !== other.baseLength) {
return false
}
if (this.targetLength !== other.targetLength) {
return false
}
if (this.ops.length !== other.ops.length) {
return false
}
for (let i = 0; i < this.ops.length; i++) {
if (!this.ops[i].equals(other.ops[i])) {
return false
}
}
return true
}
// After an operation is constructed, the user of the library can specify the
// actions of an operation (skip/insert/remove) with these three builder
// methods. They all return the operation for convenient chaining.
/**
* Skip over a given number of characters.
* @param {number | {r: number}} n
* @param {{tracking?: TrackingDirective}} opts
* @returns {TextOperation}
*/
retain(n, opts = {}) {
if (n === 0) {
return this
}
if (!isRetain(n)) {
throw new Error('retain expects an integer or a retain object')
}
const newOp = RetainOp.fromJSON(n)
newOp.tracking = opts.tracking
if (newOp.length === 0) {
return this
}
this.baseLength += newOp.length
this.targetLength += newOp.length
const lastOperation = this.ops[this.ops.length - 1]
if (lastOperation?.canMergeWith(newOp)) {
// The last op is a retain op => we can merge them into one op.
lastOperation.mergeWith(newOp)
} else {
// Create a new op.
this.ops.push(newOp)
}
return this
}
/**
* Insert a string at the current position.
* @param {string | {i: string}} insertValue
* @param {InsertOptions} opts
* @returns {TextOperation}
*/
insert(insertValue, opts = {}) {
if (!isInsert(insertValue)) {
throw new Error('insert expects a string or an insert object')
}
const newOp = InsertOp.fromJSON(insertValue)
newOp.tracking = opts.tracking
newOp.commentIds = opts.commentIds
if (newOp.insertion === '') {
return this
}
this.targetLength += newOp.insertion.length
const ops = this.ops
const lastOp = this.ops[this.ops.length - 1]
if (lastOp?.canMergeWith(newOp)) {
// Merge insert op.
lastOp.mergeWith(newOp)
} else if (lastOp instanceof RemoveOp) {
// It doesn't matter when an operation is applied whether the operation
// is remove(3), insert("something") or insert("something"), remove(3).
// Here we enforce that in this case, the insert op always comes first.
// This makes all operations that have the same effect when applied to
// a document of the right length equal in respect to the `equals` method.
const secondToLastOp = ops[ops.length - 2]
if (secondToLastOp?.canMergeWith(newOp)) {
secondToLastOp.mergeWith(newOp)
} else {
ops[ops.length] = ops[ops.length - 1]
ops[ops.length - 2] = newOp
}
} else {
ops.push(newOp)
}
return this
}
/**
* Remove a string at the current position.
* @param {number | string} n
* @returns {TextOperation}
*/
remove(n) {
if (typeof n === 'string') {
n = n.length
}
if (typeof n !== 'number') {
throw new Error('remove expects an integer or a string')
}
if (n === 0) {
return this
}
if (n > 0) {
n = -n
}
const newOp = RemoveOp.fromJSON(n)
this.baseLength -= n
const lastOp = this.ops[this.ops.length - 1]
if (lastOp?.canMergeWith(newOp)) {
lastOp.mergeWith(newOp)
} else {
this.ops.push(newOp)
}
return this
}
/**
* Tests whether this operation has no effect.
*/
isNoop() {
return (
this.ops.length === 0 ||
(this.ops.length === 1 && this.ops[0] instanceof RetainOp)
)
}
/**
* Pretty printing.
*/
toString() {
return this.ops.map(op => op.toString()).join(', ')
}
/**
* @inheritdoc
* @returns {RawTextOperation}
*/
toJSON() {
/** @type {RawTextOperation} */
const json = { textOperation: this.ops.map(op => op.toJSON()) }
if (this.contentHash != null) {
json.contentHash = this.contentHash
}
return json
}
/**
* Converts a plain JS object into an operation and validates it.
* @param {RawTextOperation} obj
* @returns {TextOperation}
*/
static fromJSON = function ({ textOperation: ops, contentHash }) {
const o = new TextOperation()
for (const op of ops) {
if (isRetain(op)) {
const retain = RetainOp.fromJSON(op)
o.retain(retain.length, { tracking: retain.tracking })
} else if (isInsert(op)) {
const insert = InsertOp.fromJSON(op)
o.insert(insert.insertion, {
commentIds: insert.commentIds,
tracking: insert.tracking,
})
} else if (isRemove(op)) {
const remove = RemoveOp.fromJSON(op)
o.remove(-remove.length)
} else {
throw new UnprocessableError('unknown operation: ' + JSON.stringify(op))
}
}
if (contentHash != null) {
o.contentHash = contentHash
}
return o
}
/**
* Apply an operation to a string, returning a new string. Throws an error if
* there's a mismatch between the input string and the operation.
* @override
* @inheritdoc
* @param {StringFileData} file
*/
apply(file) {
const str = file.getContent()
const operation = this
if (containsNonBmpChars(str)) {
throw new TextOperation.ApplyError(
'The string contains non BMP characters.',
operation,
str
)
}
if (str.length !== operation.baseLength) {
throw new TextOperation.ApplyError(
"The operation's base length must be equal to the string's length.",
operation,
str
)
}
const ops = this.ops
let inputCursor = 0
let result = ''
for (const op of ops) {
if (op instanceof RetainOp) {
if (inputCursor + op.length > str.length) {
throw new ApplyError(
"Operation can't retain more chars than are left in the string.",
op.toJSON(),
str
)
}
file.trackedChanges.applyRetain(result.length, op.length, {
tracking: op.tracking,
})
result += str.slice(inputCursor, inputCursor + op.length)
inputCursor += op.length
} else if (op instanceof InsertOp) {
if (containsNonBmpChars(op.insertion)) {
throw new InvalidInsertionError(str, op.toJSON())
}
file.trackedChanges.applyInsert(result.length, op.insertion, {
tracking: op.tracking,
})
file.comments.applyInsert(
new Range(result.length, op.insertion.length),
{ commentIds: op.commentIds }
)
result += op.insertion
} else if (op instanceof RemoveOp) {
file.trackedChanges.applyDelete(result.length, op.length)
file.comments.applyDelete(new Range(result.length, op.length))
inputCursor += op.length
} else {
throw new UnprocessableError('Unknown ScanOp type during apply')
}
}
if (inputCursor !== str.length) {
throw new TextOperation.ApplyError(
"The operation didn't operate on the whole string.",
operation,
str
)
}
if (result.length > TextOperation.MAX_STRING_LENGTH) {
throw new TextOperation.TooLongError(operation, result.length)
}
file.content = result
}
/**
* @inheritdoc
* @param {number} length of the original string; non-negative
* @return {number} length of the new string; non-negative
*/
applyToLength(length) {
const operation = this
if (length !== operation.baseLength) {
throw new TextOperation.ApplyError(
"The operation's base length must be equal to the string's length.",
operation,
length
)
}
const { length: newLength, inputCursor } = this.ops.reduce(
(intermediate, op) => op.applyToLength(intermediate),
{ length: 0, inputCursor: 0, inputLength: length }
)
if (inputCursor !== length) {
throw new TextOperation.ApplyError(
"The operation didn't operate on the whole string.",
operation,
length
)
}
if (newLength > TextOperation.MAX_STRING_LENGTH) {
throw new TextOperation.TooLongError(operation, newLength)
}
return newLength
}
/**
* @inheritdoc
* @param {StringFileData} previousState
*/
invert(previousState) {
const str = previousState.getContent()
let strIndex = 0
const inverse = new TextOperation()
const ops = this.ops
for (let i = 0, l = ops.length; i < l; i++) {
const op = ops[i]
if (op instanceof RetainOp) {
// Where we need to end up after the retains
const target = strIndex + op.length
// A previous retain could have overriden some tracking info. Now we
// need to restore it.
const previousRanges = previousState.trackedChanges.inRange(
new Range(strIndex, op.length)
)
let removeTrackingInfoIfNeeded
if (op.tracking) {
removeTrackingInfoIfNeeded = new ClearTrackingProps()
}
for (const trackedChange of previousRanges) {
if (strIndex < trackedChange.range.start) {
inverse.retain(trackedChange.range.start - strIndex, {
tracking: removeTrackingInfoIfNeeded,
})
strIndex = trackedChange.range.start
}
if (trackedChange.range.end < strIndex + op.length) {
inverse.retain(trackedChange.range.length, {
tracking: trackedChange.tracking,
})
strIndex = trackedChange.range.end
}
if (trackedChange.range.end !== strIndex) {
// No need to split the range at the end
const [left] = trackedChange.range.splitAt(strIndex)
inverse.retain(left.length, { tracking: trackedChange.tracking })
strIndex = left.end
}
}
if (strIndex < target) {
inverse.retain(target - strIndex, {
tracking: removeTrackingInfoIfNeeded,
})
strIndex = target
}
} else if (op instanceof InsertOp) {
inverse.remove(op.insertion.length)
} else if (op instanceof RemoveOp) {
const segments = calculateTrackingCommentSegments(
strIndex,
op.length,
previousState.comments,
previousState.trackedChanges
)
for (const segment of segments) {
inverse.insert(str.slice(strIndex, strIndex + segment.length), {
tracking: segment.tracking,
commentIds: segment.commentIds,
})
strIndex += segment.length
}
} else {
throw new UnprocessableError('unknown scanop during inversion')
}
}
return inverse
}
/**
* @inheritdoc
* @param {EditOperation} other
*/
canBeComposedWithForUndo(other) {
if (!(other instanceof TextOperation)) {
return false
}
if (this.isNoop() || other.isNoop()) {
return true
}
const startA = getStartIndex(this)
const startB = getStartIndex(other)
const simpleA = getSimpleOp(this)
const simpleB = getSimpleOp(other)
if (!simpleA || !simpleB) {
return false
}
if (simpleA instanceof InsertOp && simpleB instanceof InsertOp) {
return startA + simpleA.insertion.length === startB
}
if (simpleA instanceof RemoveOp && simpleB instanceof RemoveOp) {
// there are two possibilities to delete: with backspace and with the
// delete key.
return startB + simpleB.length === startA || startA === startB
}
return false
}
/**
* @inheritdoc
* @param {EditOperation} other
*/
canBeComposedWith(other) {
if (!(other instanceof TextOperation)) {
return false
}
return this.targetLength === other.baseLength
}
/**
* @inheritdoc
* @param {EditOperation} operation2
*/
compose(operation2) {
if (!(operation2 instanceof TextOperation)) {
throw new Error(
`Trying to compose TextOperation with ${operation2?.constructor?.name}.`
)
}
const operation1 = this
if (operation1.targetLength !== operation2.baseLength) {
throw new Error(
'The base length of the second operation has to be the ' +
'target length of the first operation'
)
}
const operation = new TextOperation() // the combined operation
const ops1 = operation1.ops
const ops2 = operation2.ops // for fast access
let i1 = 0
let i2 = 0 // current index into ops1 respectively ops2
let op1 = ops1[i1++]
let op2 = ops2[i2++] // current ops
for (;;) {
// Dispatch on the type of op1 and op2
if (typeof op1 === 'undefined' && typeof op2 === 'undefined') {
// end condition: both ops1 and ops2 have been processed
break
}
if (op1 instanceof RemoveOp) {
operation.remove(-op1.length)
op1 = ops1[i1++]
continue
}
if (op2 instanceof InsertOp) {
operation.insert(op2.insertion, {
tracking: op2.tracking,
commentIds: op2.commentIds,
})
op2 = ops2[i2++]
continue
}
if (typeof op1 === 'undefined') {
throw new Error(
'Cannot compose operations: first operation is too short.'
)
}
if (typeof op2 === 'undefined') {
throw new Error(
'Cannot compose operations: first operation is too long.'
)
}
if (op1 instanceof RetainOp && op2 instanceof RetainOp) {
// If both have tracking info, use the latter one. Otherwise use the
// tracking info from the former.
const tracking = op2.tracking ?? op1.tracking
if (op1.length > op2.length) {
operation.retain(op2.length, {
tracking,
})
op1 = new RetainOp(op1.length - op2.length, op1.tracking)
op2 = ops2[i2++]
} else if (op1.length === op2.length) {
operation.retain(op1.length, {
tracking,
})
op1 = ops1[i1++]
op2 = ops2[i2++]
} else {
operation.retain(op1.length, {
tracking,
})
op2 = new RetainOp(op2.length - op1.length, op2.tracking)
op1 = ops1[i1++]
}
} else if (op1 instanceof InsertOp && op2 instanceof RemoveOp) {
if (op1.insertion.length > op2.length) {
op1 = new InsertOp(
op1.insertion.slice(op2.length),
op1.tracking,
op1.commentIds
)
op2 = ops2[i2++]
} else if (op1.insertion.length === op2.length) {
op1 = ops1[i1++]
op2 = ops2[i2++]
} else {
op2 = RemoveOp.fromJSON(op1.insertion.length - op2.length)
op1 = ops1[i1++]
}
} else if (op1 instanceof InsertOp && op2 instanceof RetainOp) {
/** @type InsertOptions */
const opts = {
commentIds: op1.commentIds,
}
if (op2.tracking instanceof TrackingProps) {
// Prefer the tracking info on the second operation
opts.tracking = op2.tracking
} else if (!(op2.tracking instanceof ClearTrackingProps)) {
// The second operation does not cancel the first operation's tracking
opts.tracking = op1.tracking
}
if (op1.insertion.length > op2.length) {
operation.insert(op1.insertion.slice(0, op2.length), opts)
op1 = new InsertOp(
op1.insertion.slice(op2.length),
op1.tracking,
op1.commentIds
)
op2 = ops2[i2++]
} else if (op1.insertion.length === op2.length) {
operation.insert(op1.insertion, opts)
op1 = ops1[i1++]
op2 = ops2[i2++]
} else {
operation.insert(op1.insertion, opts)
op2 = new RetainOp(op2.length - op1.insertion.length, op2.tracking)
op1 = ops1[i1++]
}
} else if (op1 instanceof RetainOp && op2 instanceof RemoveOp) {
if (op1.length > op2.length) {
operation.remove(-op2.length)
op1 = new RetainOp(op1.length - op2.length, op1.tracking)
op2 = ops2[i2++]
} else if (op1.length === op2.length) {
operation.remove(-op2.length)
op1 = ops1[i1++]
op2 = ops2[i2++]
} else {
operation.remove(op1.length)
op2 = RemoveOp.fromJSON(op1.length - op2.length)
op1 = ops1[i1++]
}
} else {
throw new Error(
"This shouldn't happen: op1: " +
JSON.stringify(op1) +
', op2: ' +
JSON.stringify(op2)
)
}
}
return operation
}
/**
* Transform takes two operations A and B that happened concurrently and
* produces two operations A' and B' (in an array) such that
* `apply(apply(S, A), B') = apply(apply(S, B), A')`. This function is the
* heart of OT.
* @param {TextOperation} operation1
* @param {TextOperation} operation2
* @returns {[TextOperation, TextOperation]}
*/
static transform(operation1, operation2) {
if (operation1.baseLength !== operation2.baseLength) {
throw new Error('Both operations have to have the same base length')
}
const operation1prime = new TextOperation()
const operation2prime = new TextOperation()
const ops1 = operation1.ops
const ops2 = operation2.ops
let i1 = 0
let i2 = 0
let op1 = ops1[i1++]
let op2 = ops2[i2++]
for (;;) {
// At every iteration of the loop, the imaginary cursor that both
// operation1 and operation2 have that operates on the input string must
// have the same position in the input string.
if (typeof op1 === 'undefined' && typeof op2 === 'undefined') {
// end condition: both ops1 and ops2 have been processed
break
}
// next two cases: one or both ops are insert ops
// => insert the string in the corresponding prime operation, skip it in
// the other one. If both op1 and op2 are insert ops, prefer op1.
if (op1 instanceof InsertOp) {
operation1prime.insert(op1.insertion, {
tracking: op1.tracking,
commentIds: op1.commentIds,
})
operation2prime.retain(op1.insertion.length)
op1 = ops1[i1++]
continue
}
if (op2 instanceof InsertOp) {
operation1prime.retain(op2.insertion.length)
operation2prime.insert(op2.insertion, {
tracking: op2.tracking,
commentIds: op2.commentIds,
})
op2 = ops2[i2++]
continue
}
if (typeof op1 === 'undefined') {
throw new Error(
'Cannot compose operations: first operation is too short.'
)
}
if (typeof op2 === 'undefined') {
throw new Error(
'Cannot compose operations: first operation is too long.'
)
}
let minl
if (op1 instanceof RetainOp && op2 instanceof RetainOp) {
// Simple case: retain/retain
// If both have tracking info, we use the one from op1
/** @type {TrackingProps | ClearTrackingProps | undefined} */
let operation1primeTracking
/** @type {TrackingProps | ClearTrackingProps | undefined} */
let operation2primeTracking
if (op1.tracking) {
operation1primeTracking = op1.tracking
} else {
operation2primeTracking = op2.tracking
}
if (op1.length > op2.length) {
minl = op2.length
op1 = new RetainOp(op1.length - op2.length, op1.tracking)
op2 = ops2[i2++]
} else if (op1.length === op2.length) {
minl = op2.length
op1 = ops1[i1++]
op2 = ops2[i2++]
} else {
minl = op1.length
op2 = new RetainOp(op2.length - op1.length, op2.tracking)
op1 = ops1[i1++]
}
operation1prime.retain(minl, { tracking: operation1primeTracking })
operation2prime.retain(minl, { tracking: operation2primeTracking })
} else if (op1 instanceof RemoveOp && op2 instanceof RemoveOp) {
// Both operations remove the same string at the same position. We don't
// need to produce any operations, we just skip over the remove ops and
// handle the case that one operation removes more than the other.
if (op1.length > op2.length) {
op1 = RemoveOp.fromJSON(op2.length - op1.length)
op2 = ops2[i2++]
} else if (op1.length === op2.length) {
op1 = ops1[i1++]
op2 = ops2[i2++]
} else {
op2 = RemoveOp.fromJSON(op1.length - op2.length)
op1 = ops1[i1++]
}
// next two cases: remove/retain and retain/remove
} else if (op1 instanceof RemoveOp && op2 instanceof RetainOp) {
if (op1.length > op2.length) {
minl = op2.length
op1 = RemoveOp.fromJSON(op2.length - op1.length)
op2 = ops2[i2++]
} else if (op1.length === op2.length) {
minl = op2.length
op1 = ops1[i1++]
op2 = ops2[i2++]
} else {
minl = op1.length
op2 = new RetainOp(op2.length - op1.length, op2.tracking)
op1 = ops1[i1++]
}
operation1prime.remove(minl)
} else if (op1 instanceof RetainOp && op2 instanceof RemoveOp) {
if (op1.length > op2.length) {
minl = op2.length
op1 = new RetainOp(op1.length - op2.length, op1.tracking)
op2 = ops2[i2++]
} else if (op1.length === op2.length) {
minl = op1.length
op1 = ops1[i1++]
op2 = ops2[i2++]
} else {
minl = op1.length
op2 = RemoveOp.fromJSON(op1.length - op2.length)
op1 = ops1[i1++]
}
operation2prime.remove(minl)
} else {
throw new Error("The two operations aren't compatible")
}
}
return [operation1prime, operation2prime]
}
}
// Operation are essentially lists of ops. There are three types of ops:
//
// * Retain ops: Advance the cursor position by a given number of characters.
// Represented by positive ints.
// * Insert ops: Insert a given string at the current cursor position.
// Represented by strings.
// * Remove ops: Remove the next n characters. Represented by negative ints.
/**
*
* @param {TextOperation} operation
* @returns {ScanOp | null}
*/
function getSimpleOp(operation) {
const ops = operation.ops
switch (ops.length) {
case 1:
return ops[0]
case 2:
return ops[0] instanceof RetainOp
? ops[1]
: ops[1] instanceof RetainOp
? ops[0]
: null
case 3:
if (ops[0] instanceof RetainOp && ops[2] instanceof RetainOp) {
return ops[1]
}
}
return null
}
/**
* @param {TextOperation} operation
* @return {number}
*/
function getStartIndex(operation) {
if (operation.ops[0] instanceof RetainOp) {
return operation.ops[0].length
}
return 0
}
/**
* Constructs the segments defined as each overlapping range of tracked
* changes and comments. Each segment can have it's own tracking props and
* attached comment ids.
*
* The quick brown fox jumps over the lazy dog
* Tracked inserts ---------- -----
* Tracked deletes ------
* Comment 1 -------
* Comment 2 ----
* Comment 3 -----------------
*
* Approx. boundaries: | | | || | | | |
*
* @param {number} cursor
* @param {number} length
* @param {import('../file_data/comment_list')} commentsList
* @param {TrackedChangeList} trackedChangeList
* @returns {{length: number, commentIds?: string[], tracking?: TrackingProps}[]}
*/
function calculateTrackingCommentSegments(
cursor,
length,
commentsList,
trackedChangeList
) {
const breaks = new Set()
const opStart = cursor
const opEnd = cursor + length
/**
* Utility function to limit breaks to the boundary set by the operation range
* @param {number} rangeBoundary
*/
function addBreak(rangeBoundary) {
if (rangeBoundary < opStart || rangeBoundary > opEnd) {
return
}
breaks.add(rangeBoundary)
}
// Add comment boundaries
for (const comment of commentsList.comments.values()) {
for (const range of comment.ranges) {
addBreak(range.end)
addBreak(range.start)
}
}
// Add tracked change boundaries
for (const trackedChange of trackedChangeList.asSorted()) {
addBreak(trackedChange.range.start)
addBreak(trackedChange.range.end)
}
// Add operation boundaries
addBreak(opStart)
addBreak(opEnd)
// Sort the boundaries so that we can construct ranges between them
const sortedBreaks = Array.from(breaks).sort((a, b) => a - b)
const separateRanges = []
for (let i = 1; i < sortedBreaks.length; i++) {
const start = sortedBreaks[i - 1]
const end = sortedBreaks[i]
const currentRange = new Range(start, end - start)
// The comment ids that cover the current range is part of this sub-range
const commentIds = commentsList.idsCoveringRange(currentRange)
// The tracking info that covers the current range is part of this sub-range
const tracking = trackedChangeList.propsAtRange(currentRange)
separateRanges.push({
length: currentRange.length,
commentIds: commentIds.length > 0 ? commentIds : undefined,
tracking,
})
}
return separateRanges
}
module.exports = TextOperation

View File

@@ -0,0 +1,64 @@
'use strict'
const assert = require('check-types').assert
// Dependencies are loaded at the bottom of the file to mitigate circular
// dependency
let RestoreOrigin = null
let RestoreFileOrigin = null
let RestoreProjectOrigin = null
/**
* An Origin records where a {@link Change} came from. The Origin class handles
* simple tag origins, like "it came from rich text mode", or "it came from
* uploading files". Its subclasses record more detailed data for Changes such
* as restoring a version.
*/
class Origin {
/**
* @param {string} kind
*/
constructor(kind) {
assert.string(kind, 'Origin: bad kind')
this.kind = kind
}
/**
* Create an Origin from its raw form.
*
* @param {Object} [raw]
* @return {Origin | null}
*/
static fromRaw(raw) {
if (!raw) return null
if (raw.kind === RestoreOrigin.KIND) return RestoreOrigin.fromRaw(raw)
if (raw.kind === RestoreFileOrigin.KIND)
return RestoreFileOrigin.fromRaw(raw)
if (raw.kind === RestoreProjectOrigin.KIND)
return RestoreProjectOrigin.fromRaw(raw)
return new Origin(raw.kind)
}
/**
* Convert the Origin to raw form for storage or transmission.
*
* @return {Object}
*/
toRaw() {
return { kind: this.kind }
}
/**
* @return {string}
*/
getKind() {
return this.kind
}
}
module.exports = Origin
RestoreOrigin = require('./restore_origin')
RestoreFileOrigin = require('./restore_file_origin')
RestoreProjectOrigin = require('./restore_project_origin')

View File

@@ -0,0 +1,62 @@
'use strict'
const assert = require('check-types').assert
const Origin = require('.')
class RestoreFileOrigin extends Origin {
/**
* @param {number} version that was restored
* @param {string} path that was restored
* @param {Date} timestamp from the restored version
*/
constructor(version, path, timestamp) {
assert.integer(version, 'RestoreFileOrigin: bad version')
assert.string(path, 'RestoreFileOrigin: bad path')
assert.date(timestamp, 'RestoreFileOrigin: bad timestamp')
super(RestoreFileOrigin.KIND)
this.version = version
this.path = path
this.timestamp = timestamp
}
static fromRaw(raw) {
return new RestoreFileOrigin(raw.version, raw.path, new Date(raw.timestamp))
}
/** @inheritdoc */
toRaw() {
return {
kind: RestoreFileOrigin.KIND,
version: this.version,
path: this.path,
timestamp: this.timestamp.toISOString(),
}
}
/**
* @return {number}
*/
getVersion() {
return this.version
}
/**
* @return {string}
*/
getPath() {
return this.path
}
/**
* @return {Date}
*/
getTimestamp() {
return this.timestamp
}
}
RestoreFileOrigin.KIND = 'file-restore'
module.exports = RestoreFileOrigin

View File

@@ -0,0 +1,62 @@
'use strict'
const assert = require('check-types').assert
const Origin = require('./')
/**
* When a {@link Change} is generated by restoring a previous version, this
* records the original version. We also store the timestamp of the restored
* version for display; technically, this is redundant, because we could
* recover it using the version ID. However, it would be very expensive to
* recover all referenced versions, and it is also possible that the change
* for the restored version will no longer exist, either because it was merged
* with other changes or was deleted.
*
* @see Origin
*/
class RestoreOrigin extends Origin {
/**
* @param {number} version that was restored
* @param {Date} timestamp from the restored version
*/
constructor(version, timestamp) {
assert.integer(version, 'RestoreOrigin: bad version')
assert.date(timestamp, 'RestoreOrigin: bad timestamp')
super(RestoreOrigin.KIND)
this.version = version
this.timestamp = timestamp
}
static fromRaw(raw) {
return new RestoreOrigin(raw.version, new Date(raw.timestamp))
}
/** @inheritdoc */
toRaw() {
return {
kind: RestoreOrigin.KIND,
version: this.version,
timestamp: this.timestamp.toISOString(),
}
}
/**
* @return {number}
*/
getVersion() {
return this.version
}
/**
* @return {Date}
*/
getTimestamp() {
return this.timestamp
}
}
RestoreOrigin.KIND = 'restore'
module.exports = RestoreOrigin

View File

@@ -0,0 +1,51 @@
'use strict'
const assert = require('check-types').assert
const Origin = require('.')
class RestoreProjectOrigin extends Origin {
/**
* @param {number} version that was restored
* @param {Date} timestamp from the restored version
*/
constructor(version, timestamp) {
assert.integer(version, 'RestoreProjectOrigin: bad version')
assert.date(timestamp, 'RestoreProjectOrigin: bad timestamp')
super(RestoreProjectOrigin.KIND)
this.version = version
this.timestamp = timestamp
}
static fromRaw(raw) {
return new RestoreProjectOrigin(raw.version, new Date(raw.timestamp))
}
/** @inheritdoc */
toRaw() {
return {
kind: RestoreProjectOrigin.KIND,
version: this.version,
timestamp: this.timestamp.toISOString(),
}
}
/**
* @return {number}
*/
getVersion() {
return this.version
}
/**
* @return {Date}
*/
getTimestamp() {
return this.timestamp
}
}
RestoreProjectOrigin.KIND = 'project-restore'
module.exports = RestoreProjectOrigin

View File

@@ -0,0 +1,239 @@
'use strict'
const _ = require('lodash')
const ChangeNote = require('./change_note')
const ChangeRequest = require('./change_request')
const Chunk = require('./chunk')
const Operation = require('./operation')
/**
* Operational Transformation client.
*
* See OT.md for explanation.
*/
class OtClient {
constructor(_projectId, _editor, _blobStore, _socket) {
const STATE_DISCONNECTED = 0
const STATE_LOADING = 1
const STATE_READY = 2
const STATE_WAITING = 3
let _version = null
let _state = STATE_DISCONNECTED
const _buffer = []
let _ackVersion = null
let _outstanding = []
let _pending = []
const _waiting = []
this.connect = function otClientConnect() {
switch (_state) {
case STATE_DISCONNECTED:
_state = STATE_LOADING
_socket.emit('authenticate', {
projectId: _projectId,
token: 'letmein',
})
break
default:
throw new Error('connect in state ' + _state)
}
}
/**
* The latest project version number for which the client can construct the
* project content.
*
* @return {number} non-negative
*/
this.getVersion = function () {
return _version
}
_socket.on('load', function otClientOnLoad(data) {
switch (_state) {
case STATE_LOADING: {
const chunk = Chunk.fromRaw(data)
const snapshot = chunk.getSnapshot()
snapshot.applyAll(chunk.getChanges(), { strict: true })
_version = chunk.getEndVersion()
// TODO: we can get remote changes here, so it's not correct to wait for
// the editor to load before transitioning to the READY state
_editor.load(snapshot).then(function () {
_state = STATE_READY
})
break
}
default:
throw new Error('loaded in state ' + _state)
}
})
//
// Local Operations
//
function sendOutstandingChange() {
const changeRequest = new ChangeRequest(_version, _outstanding)
_socket.emit('change', changeRequest.toRaw())
_state = STATE_WAITING
}
function sendLocalOperation(operation) {
_outstanding.push(operation)
sendOutstandingChange()
}
function queueLocalOperation(operation) {
_pending.push(operation)
}
this.handleLocalOperation = function otClientHandleLocalOperation(
operation
) {
switch (_state) {
case STATE_READY:
sendLocalOperation(operation)
break
case STATE_WAITING:
queueLocalOperation(operation)
break
default:
throw new Error('local operation in state ' + _state)
}
}
/**
* A promise that resolves when the project reaches the given version.
*
* @param {number} version non-negative
* @return {Promise}
*/
this.waitForVersion = function otClientWaitForVersion(version) {
if (!_waiting[version]) _waiting[version] = []
return new Promise(function (resolve, reject) {
_waiting[version].push(resolve)
})
}
function resolveWaitingPromises() {
for (const version in _waiting) {
if (!Object.prototype.hasOwnProperty.call(_waiting, version)) continue
if (version > _version) continue
_waiting[version].forEach(function (resolve) {
resolve()
})
delete _waiting[version]
}
}
//
// Messages from Server
//
function advanceIfReady() {
if (_ackVersion !== null && _version === _ackVersion) {
_version += 1
_ackVersion = null
handleAckReady()
advanceIfReady()
return
}
const changeNotes = _.remove(_buffer, function (changeNote) {
return changeNote.getBaseVersion() === _version
})
if (changeNotes.length === 1) {
handleRemoteChangeReady(changeNotes[0].getChange())
_version += 1
advanceIfReady()
return
}
if (changeNotes.length !== 0) {
throw new Error('multiple remote changes in client version ' + _version)
}
}
function bufferRemoteChangeNote(changeNote) {
const version = changeNote.getBaseVersion()
if (_.find(_buffer, 'baseVersion', version)) {
throw new Error('multiple changes in version ' + version)
}
if (version === _ackVersion) {
throw new Error('received change that was acked in ' + _ackVersion)
}
_buffer.push(changeNote)
}
function handleAckReady() {
// console.log('handleAckReady')
if (_outstanding.length === 0) {
throw new Error('ack complete without outstanding change')
}
if (_state !== STATE_WAITING) {
throw new Error('ack complete in state ' + _state)
}
_editor.handleChangeAcknowledged()
resolveWaitingPromises()
if (_pending.length > 0) {
_outstanding = _pending
_pending = []
sendOutstandingChange()
} else {
_outstanding = []
_state = STATE_READY
}
}
function handleRemoteChangeReady(change) {
if (_pending.length > 0) {
if (_outstanding.length === 0) {
throw new Error('pending change without outstanding change')
}
}
Operation.transformMultiple(_outstanding, change.getOperations())
Operation.transformMultiple(_pending, change.getOperations())
_editor.applyRemoteChange(change)
}
_socket.on('ack', function otClientOnAck(data) {
switch (_state) {
case STATE_WAITING: {
const changeNote = ChangeNote.fromRaw(data)
_ackVersion = changeNote.getBaseVersion()
advanceIfReady()
break
}
default:
throw new Error('ack in state ' + _state)
}
})
_socket.on('change', function otClientOnChange(data) {
switch (_state) {
case STATE_READY:
case STATE_WAITING:
bufferRemoteChangeNote(ChangeNote.fromRaw(data))
advanceIfReady()
break
default:
throw new Error('remote change in state ' + _state)
}
})
//
// Connection State
// TODO: socket.io error handling
//
_socket.on('disconnect', function () {
_state = STATE_DISCONNECTED
// eslint-disable-next-line no-console
console.log('disconnected') // TODO: how do we handle disconnect?
})
}
}
module.exports = OtClient

View File

@@ -0,0 +1,232 @@
// @ts-check
const OError = require('@overleaf/o-error')
/**
* @import { RawRange } from './types'
*/
class Range {
/**
* @param {number} pos
* @param {number} length
*/
constructor(pos, length) {
if (pos < 0 || length < 0) {
throw new OError('Invalid range', { pos, length })
}
/** @readonly */
this.pos = pos
/** @readonly */
this.length = length
}
/**
* @return {number}
*/
get start() {
return this.pos
}
/**
* @return {number}
*/
get end() {
return this.pos + this.length
}
/**
* Is this range equal to the given range?
*
* @param {Range} other
* @returns {boolean}
*/
equals(other) {
return this.pos === other.pos && this.length === other.length
}
/**
* @param {Range} range
* @returns {boolean}
*/
startsAfter(range) {
return this.start >= range.end
}
/**
* @param {number} pos
* @returns {boolean}
*/
startIsAfter(pos) {
return this.start > pos
}
/**
*
* @returns {boolean}
*/
isEmpty() {
return this.length === 0
}
/**
* checks if the range contains a given range
* @param {Range} range
*/
contains(range) {
return this.start <= range.start && this.end >= range.end
}
/**
* checks if the range contains a cursor (i.e. is not at the ends of the range)
* @param {number} cursor
*/
containsCursor(cursor) {
return this.start <= cursor && this.end >= cursor
}
/**
* @param {Range} range
*/
overlaps(range) {
return this.start < range.end && this.end > range.start
}
/**
* checks if the range touches a given range
* @param {Range} range
*/
touches(range) {
return this.end === range.start || this.start === range.end
}
/**
* @param {Range} range
* @returns {Range}
*/
subtract(range) {
if (this.contains(range)) {
return this.shrinkBy(range.length)
}
if (range.contains(this)) {
return new Range(this.pos, 0)
}
if (range.overlaps(this)) {
if (range.start < this.start) {
const intersectedLength = range.end - this.start
return new Range(range.pos, this.length - intersectedLength)
} else {
const intersectedLength = this.end - range.start
return new Range(this.pos, this.length - intersectedLength)
}
}
return new Range(this.pos, this.length)
}
/**
* @param {Range} range
* @returns {boolean}
*/
canMerge(range) {
return this.overlaps(range) || this.touches(range)
}
/**
* @param {Range} range
*/
merge(range) {
if (!this.canMerge(range)) {
throw new Error('Ranges cannot be merged')
}
const newPos = Math.min(this.pos, range.pos)
const newEnd = Math.max(this.end, range.end)
return new Range(newPos, newEnd - newPos)
}
/**
* Moves the range by a given number
* @param {number} length
*/
moveBy(length) {
return new Range(this.pos + length, this.length)
}
/**
* Extends the range by a given number
* @param {number} extensionLength
*/
extendBy(extensionLength) {
return new Range(this.pos, this.length + extensionLength)
}
/**
* Shrinks the range by a given number
* @param {number} shrinkLength
*/
shrinkBy(shrinkLength) {
const newLength = this.length - shrinkLength
if (newLength < 0) {
throw new Error('Cannot shrink range by more than its length')
}
return new Range(this.pos, newLength)
}
/**
* Splits a range on the cursor and insert a range with the length provided
* @param {number} cursor
* @param {number} length
* @returns {[Range, Range, Range]}
*/
insertAt(cursor, length) {
if (!this.containsCursor(cursor)) {
throw new Error('The cursor must be contained in the range')
}
const rangeUpToCursor = new Range(this.pos, cursor - this.pos)
const insertedRange = new Range(cursor, length)
const rangeAfterCursor = new Range(
cursor + length,
this.length - rangeUpToCursor.length
)
return [rangeUpToCursor, insertedRange, rangeAfterCursor]
}
toRaw() {
return {
pos: this.pos,
length: this.length,
}
}
/**
* @param {RawRange} raw
* @return {Range}
*/
static fromRaw(raw) {
return new Range(raw.pos, raw.length)
}
/**
* Splits a range into two ranges, at a given cursor
* @param {number} cursor
* @returns {[Range, Range]}
*/
splitAt(cursor) {
if (!this.containsCursor(cursor)) {
throw new Error('The cursor must be contained in the range')
}
const rangeUpToCursor = new Range(this.pos, cursor - this.pos)
const rangeAfterCursor = new Range(
cursor,
this.length - rangeUpToCursor.length
)
return [rangeUpToCursor, rangeAfterCursor]
}
}
module.exports = Range

View File

@@ -0,0 +1,142 @@
// @ts-check
'use strict'
const path = require('path-browserify')
/**
* Regular expressions for Overleaf v2 taken from
* https://github.com/overleaf/internal/blob/f7b287b6a07354000a6b463ca3a5828104e4a811/services/web/app/src/Features/Project/SafePath.js
*/
//
// Regex of characters that are invalid in filenames
//
// eslint-disable-next-line no-control-regex
const BAD_CHAR_RX = /[/*\u0000-\u001F\u007F\u0080-\u009F\uD800-\uDFFF]/g
//
// Regex of filename patterns that are invalid ("." ".." and leading/trailing
// whitespace)
//
const BAD_FILE_RX = /(^\.$)|(^\.\.$)|(^\s+)|(\s+$)/g
//
// Put a block on filenames which match javascript property names, as they
// can cause exceptions where the code puts filenames into a hash. This is a
// temporary workaround until the code in other places is made safe against
// property names.
//
// See https://github.com/overleaf/write_latex/wiki/Using-javascript-Objects-as-Maps
//
const BLOCKED_FILE_RX =
/^(prototype|constructor|toString|toLocaleString|valueOf|hasOwnProperty|isPrototypeOf|propertyIsEnumerable|__defineGetter__|__lookupGetter__|__defineSetter__|__lookupSetter__|__proto__)$/
//
// Maximum path length, in characters. This is fairly arbitrary.
//
const MAX_PATH = 1024
/**
* Replace invalid characters and filename patterns in a filename with
* underscores.
* @param {string} filename
*/
function cleanPart(filename) {
filename = filename.replace(BAD_CHAR_RX, '_')
filename = filename.replace(BAD_FILE_RX, function (match) {
return new Array(match.length + 1).join('_')
})
return filename
}
/**
* All pathnames in a Snapshot must be clean. We want pathnames that:
*
* 1. are unambiguous (e.g. no `.`s or redundant path separators)
* 2. do not allow directory traversal attacks (e.g. no `..`s or absolute paths)
* 3. do not contain leading/trailing space
* 4. do not contain the character '*' in filenames
*
* We normalise the pathname, split it by the separator and then clean each part
* as a filename
*
* @param {string} pathname
* @return {String}
*/
exports.clean = function (pathname) {
return exports.cleanDebug(pathname)[0]
}
/**
* See clean
* @param {string} pathname
* @return {[string,string]}
*/
exports.cleanDebug = function (pathname) {
let prev = pathname
let reason = ''
/**
* @param {string} label
*/
function recordReasonIfChanged(label) {
if (pathname === prev) return
if (reason) reason += ','
reason += label
prev = pathname
}
pathname = path.normalize(pathname)
recordReasonIfChanged('normalize')
pathname = pathname.replace(/\\/g, '/')
recordReasonIfChanged('workaround for IE')
pathname = pathname.replace(/\/+/g, '/')
recordReasonIfChanged('no multiple slashes')
pathname = pathname.replace(/^(\/.*)$/, '_$1')
recordReasonIfChanged('no leading /')
pathname = pathname.replace(/^(.+)\/$/, '$1')
recordReasonIfChanged('no trailing /')
pathname = pathname.replace(/^ *(.*)$/, '$1')
recordReasonIfChanged('no leading spaces')
pathname = pathname.replace(/^(.*[^ ]) *$/, '$1')
recordReasonIfChanged('no trailing spaces')
if (pathname.length === 0) pathname = '_'
recordReasonIfChanged('empty')
pathname = pathname.split('/').map(cleanPart).join('/')
recordReasonIfChanged('cleanPart')
pathname = pathname.replace(BLOCKED_FILE_RX, '@$1')
recordReasonIfChanged('BLOCKED_FILE_RX')
return [pathname, reason]
}
/**
* A pathname is clean (see clean) and not too long.
*
* @param {string} pathname
* @return {Boolean}
*/
exports.isClean = function pathnameIsClean(pathname) {
return exports.isCleanDebug(pathname)[0]
}
/**
* A pathname is clean (see clean) and not too long.
*
* @param {string} pathname
* @return {[boolean,string]}
*/
exports.isCleanDebug = function (pathname) {
if (pathname.length > MAX_PATH) return [false, 'MAX_PATH']
if (pathname.length === 0) return [false, 'empty']
const [cleanPathname, reason] = exports.cleanDebug(pathname)
if (cleanPathname !== pathname) return [false, reason]
return [true, '']
}

View File

@@ -0,0 +1,284 @@
// @ts-check
'use strict'
const assert = require('check-types').assert
const OError = require('@overleaf/o-error')
const FileMap = require('./file_map')
const V2DocVersions = require('./v2_doc_versions')
const FILE_LOAD_CONCURRENCY = 50
/**
* @import { BlobStore, RawSnapshot, ReadonlyBlobStore } from "./types"
* @import Change from "./change"
* @import TextOperation from "./operation/text_operation"
* @import File from "./file"
*/
class EditMissingFileError extends OError {}
/**
* A Snapshot represents the state of a {@link Project} at a
* particular version.
*/
class Snapshot {
static PROJECT_VERSION_RX_STRING = '^[0-9]+\\.[0-9]+$'
static PROJECT_VERSION_RX = new RegExp(Snapshot.PROJECT_VERSION_RX_STRING)
static EditMissingFileError = EditMissingFileError
/**
* @param {RawSnapshot} raw
* @return {Snapshot}
*/
static fromRaw(raw) {
assert.object(raw.files, 'bad raw.files')
return new Snapshot(
FileMap.fromRaw(raw.files),
raw.projectVersion,
V2DocVersions.fromRaw(raw.v2DocVersions)
)
}
toRaw() {
/** @type RawSnapshot */
const raw = {
files: this.fileMap.toRaw(),
}
if (this.projectVersion) raw.projectVersion = this.projectVersion
if (this.v2DocVersions) raw.v2DocVersions = this.v2DocVersions.toRaw()
return raw
}
/**
* @param {FileMap} [fileMap]
* @param {string} [projectVersion]
* @param {V2DocVersions} [v2DocVersions]
*/
constructor(fileMap, projectVersion, v2DocVersions) {
assert.maybe.instance(fileMap, FileMap, 'bad fileMap')
this.fileMap = fileMap || new FileMap({})
this.projectVersion = projectVersion
this.v2DocVersions = v2DocVersions
}
/**
* @return {string | null | undefined}
*/
getProjectVersion() {
return this.projectVersion
}
/**
* @param {string} projectVersion
*/
setProjectVersion(projectVersion) {
assert.maybe.match(
projectVersion,
Snapshot.PROJECT_VERSION_RX,
'Snapshot: bad projectVersion'
)
this.projectVersion = projectVersion
}
/**
* @return {V2DocVersions | null | undefined}
*/
getV2DocVersions() {
return this.v2DocVersions
}
/**
* @param {V2DocVersions} v2DocVersions
*/
setV2DocVersions(v2DocVersions) {
assert.maybe.instance(
v2DocVersions,
V2DocVersions,
'Snapshot: bad v2DocVersions'
)
this.v2DocVersions = v2DocVersions
}
/**
* @param {V2DocVersions} v2DocVersions
*/
updateV2DocVersions(v2DocVersions) {
// merge new v2DocVersions into this.v2DocVersions
v2DocVersions.applyTo(this)
}
/**
* The underlying file map.
* @return {FileMap}
*/
getFileMap() {
return this.fileMap
}
/**
* The pathnames of all of the files.
*
* @return {Array.<string>} in no particular order
*/
getFilePathnames() {
return this.fileMap.getPathnames()
}
/**
* Get a File by its pathname.
* @see FileMap#getFile
* @param {string} pathname
*/
getFile(pathname) {
return this.fileMap.getFile(pathname)
}
/**
* Add the given file to the snapshot.
* @see FileMap#addFile
* @param {string} pathname
* @param {File} file
*/
addFile(pathname, file) {
this.fileMap.addFile(pathname, file)
}
/**
* Move or remove a file.
* @see FileMap#moveFile
* @param {string} pathname
* @param {string} newPathname
*/
moveFile(pathname, newPathname) {
this.fileMap.moveFile(pathname, newPathname)
if (this.v2DocVersions) this.v2DocVersions.moveFile(pathname, newPathname)
}
/**
* The number of files in the snapshot.
*
* @return {number}
*/
countFiles() {
return this.fileMap.countFiles()
}
/**
* Edit the content of an editable file.
*
* Throws an error if no file with the given name exists.
*
* @param {string} pathname
* @param {TextOperation} textOperation
*/
editFile(pathname, textOperation) {
const file = this.fileMap.getFile(pathname)
if (!file) {
throw new Snapshot.EditMissingFileError(
`can't find file for editing: ${pathname}`
)
}
file.edit(textOperation)
}
/**
* Apply all changes in sequence. Modifies the snapshot in place.
*
* Ignore recoverable errors (caused by historical bad data) unless opts.strict is true
*
* @param {Change[]} changes
* @param {object} [opts]
* @param {boolean} opts.strict - do not ignore recoverable errors
*/
applyAll(changes, opts) {
for (const change of changes) {
change.applyTo(this, opts)
}
}
/**
* If the Files in this Snapshot reference blob hashes, add them to the given
* set.
*
* @param {Set.<String>} blobHashes
*/
findBlobHashes(blobHashes) {
/**
* @param {File} file
*/
function find(file) {
const hash = file.getHash()
const rangeHash = file.getRangesHash()
if (hash) blobHashes.add(hash)
if (rangeHash) blobHashes.add(rangeHash)
}
// TODO(das7pad): refine types to enforce no nulls in FileMapData
// @ts-ignore
this.fileMap.map(find)
}
/**
* Load all of the files in this snapshot.
*
* @param {string} kind see {File#load}
* @param {ReadonlyBlobStore} blobStore
* @return {Promise<Record<string, File>>} an object where keys are the pathnames and
* values are the files in the snapshot
*/
async loadFiles(kind, blobStore) {
/**
* @param {File} file
*/
function load(file) {
return file.load(kind, blobStore)
}
// TODO(das7pad): refine types to enforce no nulls in FileMapData
// @ts-ignore
return await this.fileMap.mapAsync(load, FILE_LOAD_CONCURRENCY)
}
/**
* Store each of the files in this snapshot and return the raw snapshot for
* long term storage.
*
* @param {BlobStore} blobStore
* @param {number} [concurrency]
* @return {Promise.<Object>}
*/
async store(blobStore, concurrency) {
assert.maybe.number(concurrency, 'bad concurrency')
const projectVersion = this.projectVersion
const rawV2DocVersions = this.v2DocVersions
? this.v2DocVersions.toRaw()
: undefined
/**
* @param {File} file
*/
function store(file) {
return file.store(blobStore)
}
// TODO(das7pad): refine types to enforce no nulls in FileMapData
// @ts-ignore
const rawFiles = await this.fileMap.mapAsync(store, concurrency)
return {
files: rawFiles,
projectVersion,
v2DocVersions: rawV2DocVersions,
}
}
/**
* Create a deep clone of this snapshot.
*
* @return {Snapshot}
*/
clone() {
return Snapshot.fromRaw(this.toRaw())
}
}
module.exports = Snapshot

View File

@@ -0,0 +1,175 @@
import Blob from './blob'
import TrackingProps from './file_data/tracking_props'
import ClearTrackingProps from './file_data/clear_tracking_props'
export type BlobStore = {
getBlob(hash: string): Promise<Blob | null>
getString(hash: string): Promise<string>
putString(content: string): Promise<Blob>
putObject(obj: object): Promise<Blob>
getObject<T = unknown>(hash: string): Promise<T>
}
export type ReadonlyBlobStore = Pick<BlobStore, 'getString' | 'getObject'>
export type RangesBlob = {
comments: CommentRawData[]
trackedChanges: TrackedChangeRawData[]
}
export type RawRange = {
pos: number
length: number
}
export type CommentRawData = {
id: string
ranges: RawRange[]
resolved?: boolean
}
export type TrackedChangeRawData = {
range: RawRange
tracking: TrackingPropsRawData
}
export type TrackingPropsRawData = {
type: 'insert' | 'delete'
userId: string
ts: string
}
export type ClearTrackingPropsRawData = {
type: 'none'
}
export type TrackingDirective = TrackingProps | ClearTrackingProps
export type StringFileRawData = {
content: string
comments?: CommentRawData[]
trackedChanges?: TrackedChangeRawData[]
}
export type RawOrigin = {
kind: string
}
export type RawChange = {
operations: RawOperation[]
timestamp: string
authors?: (number | null)[]
v2Authors: string[]
origin: RawOrigin
projectVersion: string
v2DocVersions: RawV2DocVersions
}
export type RawOperation =
| RawEditFileOperation
// TODO(das7pad): add types for all the other operations
| object
export type RawSnapshot = {
files: RawFileMap
projectVersion?: string
v2DocVersions?: RawV2DocVersions | null
}
export type RawHistory = {
snapshot: RawSnapshot
changes: RawChange[]
}
export type RawChunk = {
history: RawHistory
startVersion: number
}
export type RawFileMap = Record<string, RawFile>
export type RawFile = { metadata?: Object } & RawFileData
export type RawFileData =
| RawBinaryFileData
| RawHashFileData
| RawHollowBinaryFileData
| RawHollowStringFileData
| RawLazyStringFileData
| StringFileRawData
export type RawHashFileData = { hash: string; rangesHash?: string }
export type RawBinaryFileData = { hash: string; byteLength: number }
export type RawLazyStringFileData = {
hash: string
stringLength: number
rangesHash?: string
operations?: RawEditOperation[]
}
export type RawHollowBinaryFileData = { byteLength: number }
export type RawHollowStringFileData = { stringLength: number }
export type RawV2DocVersions = Record<string, { pathname: string; v: number }>
export type RawInsertOp =
| {
i: string
commentIds?: string[]
tracking?: TrackingPropsRawData
}
| string
export type RawRemoveOp = number
export type RawRetainOp =
| {
r: number
commentIds?: string[]
tracking?: TrackingPropsRawData | ClearTrackingPropsRawData
}
| number
export type RawScanOp = RawInsertOp | RawRemoveOp | RawRetainOp
export type RawTextOperation = {
textOperation: RawScanOp[]
contentHash?: string
}
export type RawAddCommentOperation = {
commentId: string
ranges: RawRange[]
resolved?: boolean
}
export type RawDeleteCommentOperation = { deleteComment: string }
export type RawSetCommentStateOperation = {
commentId: string
resolved: boolean
}
export type RawEditNoOperation = {
noOp: true
}
export type RawEditFileOperation = RawEditOperation & { pathname: string }
export type RawEditOperation =
| RawTextOperation
| RawAddCommentOperation
| RawDeleteCommentOperation
| RawSetCommentStateOperation
| RawEditNoOperation
export type LinkedFileData = {
importedAt: string
provider: string
[other: string]: any
}
export type RawLabel = {
text: string
authorId: number | null
timestamp: string
version: number
}

View File

@@ -0,0 +1,14 @@
/*
* Misc functions
*/
'use strict'
/**
* @param {string} str
* @returns {boolean} true if the given string contains non-BMP chars otherwise false
*/
exports.containsNonBmpChars = function utilContainsNonBmpChars(str) {
// check for first (high) surrogate in a non-BMP character
return /[\uD800-\uDBFF]/.test(str)
}

View File

@@ -0,0 +1,83 @@
// @ts-check
'use strict'
const _ = require('lodash')
/**
* @import File from "./file"
* @import Snapshot from "./snapshot"
* @import { RawV2DocVersions } from "./types"
*/
class V2DocVersions {
/**
* @param {RawV2DocVersions} data
*/
constructor(data) {
this.data = data || {}
}
/**
* @param {RawV2DocVersions?} [raw]
* @return {V2DocVersions|undefined}
*/
static fromRaw(raw) {
if (!raw) return undefined
return new V2DocVersions(raw)
}
/**
* @return {RawV2DocVersions|null}
*/
toRaw() {
if (!this.data) return null
const raw = _.clone(this.data)
return raw
}
/**
* Clone this object.
*
* @return {V2DocVersions|undefined} a new object of the same type
*/
clone() {
return V2DocVersions.fromRaw(this.toRaw())
}
/**
* @param {Snapshot} snapshot
*/
applyTo(snapshot) {
// Only update the snapshot versions if we have new versions
if (!_.size(this.data)) return
// Create v2DocVersions in snapshot if it does not exist
// otherwise update snapshot v2docversions
if (!snapshot.v2DocVersions) {
snapshot.v2DocVersions = this.clone()
} else {
_.assign(snapshot.v2DocVersions.data, this.data)
}
}
/**
* Move or remove a doc.
* Must be called after FileMap#moveFile, which validates the paths.
* @param {string} pathname
* @param {string} newPathname
*/
moveFile(pathname, newPathname) {
for (const [id, v] of Object.entries(this.data)) {
if (v.pathname !== pathname) continue
if (newPathname === '') {
delete this.data[id]
} else {
v.pathname = newPathname
}
break
}
}
}
module.exports = V2DocVersions