first commit

This commit is contained in:
2025-04-24 13:11:28 +08:00
commit ff9c54d5e4
5960 changed files with 834111 additions and 0 deletions

View File

@@ -0,0 +1,149 @@
'use strict'
const basicAuth = require('basic-auth')
const config = require('config')
const HTTPStatus = require('http-status')
const jwt = require('jsonwebtoken')
const tsscmp = require('tsscmp')
function setupBasicHttpAuthForSwaggerDocs(app) {
app.use('/docs', function (req, res, next) {
if (hasValidBasicAuthCredentials(req)) {
return next()
}
res.header('WWW-Authenticate', 'Basic realm="Application"')
res.status(HTTPStatus.UNAUTHORIZED).end()
})
}
exports.setupBasicHttpAuthForSwaggerDocs = setupBasicHttpAuthForSwaggerDocs
function hasValidBasicAuthCredentials(req) {
const credentials = basicAuth(req)
if (!credentials) return false
// No security in the name, so just use straight comparison.
if (credentials.name !== 'staging') return false
const password = config.get('basicHttpAuth.password')
if (password && tsscmp(credentials.pass, password)) return true
// Support an old password so we can change the password without downtime.
if (config.has('basicHttpAuth.oldPassword')) {
const oldPassword = config.get('basicHttpAuth.oldPassword')
if (oldPassword && tsscmp(credentials.pass, oldPassword)) return true
}
return false
}
function setupSSL(app) {
const httpsOnly = config.get('httpsOnly') === 'true'
if (!httpsOnly) {
return
}
app.enable('trust proxy')
app.use(function (req, res, next) {
if (req.protocol === 'https') {
next()
return
}
if (req.method === 'GET' || req.method === 'HEAD') {
res.redirect('https://' + req.headers.host + req.url)
} else {
res
.status(HTTPStatus.FORBIDDEN)
.send('Please use HTTPS when submitting data to this server.')
}
})
}
exports.setupSSL = setupSSL
function handleJWTAuth(req, authOrSecDef, scopesOrApiKey, next) {
// as a temporary solution, to make the OT demo still work
// this handler will also check for basic authorization
if (hasValidBasicAuthCredentials(req)) {
return next()
}
let token, err
if (authOrSecDef.name === 'token') {
token = req.query.token
} else if (
req.headers.authorization &&
req.headers.authorization.split(' ')[0] === 'Bearer'
) {
token = req.headers.authorization.split(' ')[1]
}
if (!token) {
err = new Error('jwt missing')
err.statusCode = HTTPStatus.UNAUTHORIZED
err.headers = { 'WWW-Authenticate': 'Bearer' }
return next(err)
}
let decoded
try {
decoded = decodeJWT(token)
} catch (error) {
if (
error instanceof jwt.JsonWebTokenError ||
error instanceof jwt.TokenExpiredError
) {
err = new Error(error.message)
err.statusCode = HTTPStatus.UNAUTHORIZED
err.headers = { 'WWW-Authenticate': 'Bearer error="invalid_token"' }
return next(err)
}
throw error
}
if (decoded.project_id.toString() !== req.swagger.params.project_id.value) {
err = new Error('Wrong project_id')
err.statusCode = HTTPStatus.FORBIDDEN
return next(err)
}
next()
}
exports.hasValidBasicAuthCredentials = hasValidBasicAuthCredentials
/**
* Verify and decode the given JSON Web Token
*/
function decodeJWT(token) {
const key = config.get('jwtAuth.key')
const algorithm = config.get('jwtAuth.algorithm')
try {
return jwt.verify(token, key, { algorithms: [algorithm] })
} catch (err) {
// Support an old key so we can change the key without downtime.
if (config.has('jwtAuth.oldKey')) {
const oldKey = config.get('jwtAuth.oldKey')
return jwt.verify(token, oldKey, { algorithms: [algorithm] })
} else {
throw err
}
}
}
function handleBasicAuth(req, authOrSecDef, scopesOrApiKey, next) {
if (hasValidBasicAuthCredentials(req)) {
return next()
}
const error = new Error()
error.statusCode = HTTPStatus.UNAUTHORIZED
error.headers = { 'WWW-Authenticate': 'Basic realm="Application"' }
return next(error)
}
function getSwaggerHandlers() {
const handlers = {}
if (!config.has('jwtAuth.key') || !config.has('basicHttpAuth.password')) {
throw new Error('missing authentication env vars')
}
handlers.jwt = handleJWTAuth
handlers.basic = handleBasicAuth
handlers.token = handleJWTAuth
return handlers
}
exports.getSwaggerHandlers = getSwaggerHandlers

View File

@@ -0,0 +1,10 @@
/**
* Turn an async function into an Express middleware
*/
function expressify(fn) {
return (req, res, next) => {
fn(req, res, next).catch(next)
}
}
module.exports = expressify

View File

@@ -0,0 +1,23 @@
const logger = require('@overleaf/logger')
const expressify = require('./expressify')
const { mongodb } = require('../../storage')
async function status(req, res) {
try {
await mongodb.db.command({ ping: 1 })
} catch (err) {
logger.warn({ err }, 'Lost connection with MongoDB')
res.status(500).send('Lost connection with MongoDB')
return
}
res.send('history-v1 is up')
}
function healthCheck(req, res) {
res.send('OK')
}
module.exports = {
status: expressify(status),
healthCheck,
}

View File

@@ -0,0 +1,141 @@
// @ts-check
'use strict'
const { expressify } = require('@overleaf/promise-utils')
const HTTPStatus = require('http-status')
const core = require('overleaf-editor-core')
const Change = core.Change
const Chunk = core.Chunk
const File = core.File
const FileMap = core.FileMap
const Snapshot = core.Snapshot
const TextOperation = core.TextOperation
const logger = require('@overleaf/logger')
const storage = require('../../storage')
const BatchBlobStore = storage.BatchBlobStore
const BlobStore = storage.BlobStore
const chunkStore = storage.chunkStore
const HashCheckBlobStore = storage.HashCheckBlobStore
const persistChanges = storage.persistChanges
const InvalidChangeError = storage.InvalidChangeError
const render = require('./render')
async function importSnapshot(req, res) {
const projectId = req.swagger.params.project_id.value
const rawSnapshot = req.swagger.params.snapshot.value
let snapshot
try {
snapshot = Snapshot.fromRaw(rawSnapshot)
} catch (err) {
return render.unprocessableEntity(res)
}
let historyId
try {
historyId = await chunkStore.initializeProject(projectId, snapshot)
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
return render.conflict(res)
} else {
throw err
}
}
res.status(HTTPStatus.OK).json({ projectId: historyId })
}
async function importChanges(req, res, next) {
const projectId = req.swagger.params.project_id.value
const rawChanges = req.swagger.params.changes.value
const endVersion = req.swagger.params.end_version.value
const returnSnapshot = req.swagger.params.return_snapshot.value || 'none'
let changes
try {
changes = rawChanges.map(Change.fromRaw)
} catch (err) {
logger.warn({ err, projectId }, 'failed to parse changes')
return render.unprocessableEntity(res)
}
// Set limits to force us to persist all of the changes.
const farFuture = new Date()
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
const limits = {
maxChanges: 0,
minChangeTimestamp: farFuture,
maxChangeTimestamp: farFuture,
}
const blobStore = new BlobStore(projectId)
const batchBlobStore = new BatchBlobStore(blobStore)
const hashCheckBlobStore = new HashCheckBlobStore(blobStore)
async function loadFiles() {
const blobHashes = new Set()
for (const change of changes) {
// This populates the set blobHashes with blobs referred to in the change
change.findBlobHashes(blobHashes)
}
await batchBlobStore.preload(Array.from(blobHashes))
for (const change of changes) {
await change.loadFiles('lazy', batchBlobStore)
}
}
async function buildResultSnapshot(resultChunk) {
const chunk = resultChunk || (await chunkStore.loadLatest(projectId))
const snapshot = chunk.getSnapshot()
snapshot.applyAll(chunk.getChanges())
const rawSnapshot = await snapshot.store(hashCheckBlobStore)
return rawSnapshot
}
await loadFiles()
let result
try {
result = await persistChanges(projectId, changes, limits, endVersion)
} catch (err) {
if (
err instanceof Chunk.ConflictingEndVersion ||
err instanceof TextOperation.UnprocessableError ||
err instanceof File.NotEditableError ||
err instanceof FileMap.PathnameError ||
err instanceof Snapshot.EditMissingFileError ||
err instanceof chunkStore.ChunkVersionConflictError ||
err instanceof InvalidChangeError
) {
// If we failed to apply operations, that's probably because they were
// invalid.
logger.warn({ err, projectId, endVersion }, 'changes rejected by history')
return render.unprocessableEntity(res)
} else if (err instanceof Chunk.NotFoundError) {
logger.warn({ err, projectId }, 'chunk not found')
return render.notFound(res)
} else {
throw err
}
}
if (returnSnapshot === 'none') {
res.status(HTTPStatus.CREATED).json({})
} else {
const rawSnapshot = await buildResultSnapshot(result && result.currentChunk)
res.status(HTTPStatus.CREATED).json(rawSnapshot)
}
}
exports.importSnapshot = expressify(importSnapshot)
exports.importChanges = expressify(importChanges)

View File

@@ -0,0 +1,388 @@
'use strict'
const _ = require('lodash')
const Path = require('node:path')
const Stream = require('node:stream')
const HTTPStatus = require('http-status')
const fs = require('node:fs')
const { promisify } = require('node:util')
const config = require('config')
const OError = require('@overleaf/o-error')
const logger = require('@overleaf/logger')
const { Chunk, ChunkResponse, Blob } = require('overleaf-editor-core')
const {
BlobStore,
blobHash,
chunkStore,
HashCheckBlobStore,
ProjectArchive,
zipStore,
chunkBuffer,
} = require('../../storage')
const render = require('./render')
const expressify = require('./expressify')
const withTmpDir = require('./with_tmp_dir')
const StreamSizeLimit = require('./stream_size_limit')
const pipeline = promisify(Stream.pipeline)
async function initializeProject(req, res, next) {
let projectId = req.swagger.params.body.value.projectId
try {
projectId = await chunkStore.initializeProject(projectId)
res.status(HTTPStatus.OK).json({ projectId })
} catch (err) {
if (err instanceof chunkStore.AlreadyInitialized) {
render.conflict(res)
} else {
throw err
}
}
}
async function getLatestContent(req, res, next) {
const projectId = req.swagger.params.project_id.value
const blobStore = new BlobStore(projectId)
const chunk = await chunkBuffer.loadLatest(projectId)
const snapshot = chunk.getSnapshot()
snapshot.applyAll(chunk.getChanges())
await snapshot.loadFiles('eager', blobStore)
res.json(snapshot.toRaw())
}
async function getContentAtVersion(req, res, next) {
const projectId = req.swagger.params.project_id.value
const version = req.swagger.params.version.value
const blobStore = new BlobStore(projectId)
const snapshot = await getSnapshotAtVersion(projectId, version)
await snapshot.loadFiles('eager', blobStore)
res.json(snapshot.toRaw())
}
async function getLatestHashedContent(req, res, next) {
const projectId = req.swagger.params.project_id.value
const blobStore = new HashCheckBlobStore(new BlobStore(projectId))
const chunk = await chunkBuffer.loadLatest(projectId)
const snapshot = chunk.getSnapshot()
snapshot.applyAll(chunk.getChanges())
await snapshot.loadFiles('eager', blobStore)
const rawSnapshot = await snapshot.store(blobStore)
res.json(rawSnapshot)
}
async function getLatestHistory(req, res, next) {
const projectId = req.swagger.params.project_id.value
try {
const chunk = await chunkBuffer.loadLatest(projectId)
const chunkResponse = new ChunkResponse(chunk)
res.json(chunkResponse.toRaw())
} catch (err) {
if (err instanceof Chunk.NotFoundError) {
render.notFound(res)
} else {
throw err
}
}
}
async function getLatestHistoryRaw(req, res, next) {
const projectId = req.swagger.params.project_id.value
const readOnly = req.swagger.params.readOnly.value
try {
const { startVersion, endVersion, endTimestamp } =
await chunkStore.loadLatestRaw(projectId, { readOnly })
res.json({
startVersion,
endVersion,
endTimestamp,
})
} catch (err) {
if (err instanceof Chunk.NotFoundError) {
render.notFound(res)
} else {
throw err
}
}
}
async function getHistory(req, res, next) {
const projectId = req.swagger.params.project_id.value
const version = req.swagger.params.version.value
try {
const chunk = await chunkStore.loadAtVersion(projectId, version)
const chunkResponse = new ChunkResponse(chunk)
res.json(chunkResponse.toRaw())
} catch (err) {
if (err instanceof Chunk.NotFoundError) {
render.notFound(res)
} else {
throw err
}
}
}
async function getHistoryBefore(req, res, next) {
const projectId = req.swagger.params.project_id.value
const timestamp = req.swagger.params.timestamp.value
try {
const chunk = await chunkStore.loadAtTimestamp(projectId, timestamp)
const chunkResponse = new ChunkResponse(chunk)
res.json(chunkResponse.toRaw())
} catch (err) {
if (err instanceof Chunk.NotFoundError) {
render.notFound(res)
} else {
throw err
}
}
}
/**
* Get all changes since the beginning of history or since a given version
*/
async function getChanges(req, res, next) {
const projectId = req.swagger.params.project_id.value
const since = req.swagger.params.since.value ?? 0
if (since < 0) {
// Negative values would cause an infinite loop
return res.status(400).json({
error: `Version out of bounds: ${since}`,
})
}
const changes = []
let chunk = await chunkBuffer.loadLatest(projectId)
if (since > chunk.getEndVersion()) {
return res.status(400).json({
error: `Version out of bounds: ${since}`,
})
}
// Fetch all chunks that come after the chunk that contains the start version
while (chunk.getStartVersion() > since) {
const changesInChunk = chunk.getChanges()
changes.unshift(...changesInChunk)
chunk = await chunkStore.loadAtVersion(projectId, chunk.getStartVersion())
}
// Extract the relevant changes from the chunk that contains the start version
const changesInChunk = chunk
.getChanges()
.slice(since - chunk.getStartVersion())
changes.unshift(...changesInChunk)
res.json(changes.map(change => change.toRaw()))
}
async function getZip(req, res, next) {
const projectId = req.swagger.params.project_id.value
const version = req.swagger.params.version.value
const blobStore = new BlobStore(projectId)
let snapshot
try {
snapshot = await getSnapshotAtVersion(projectId, version)
} catch (err) {
if (err instanceof Chunk.NotFoundError) {
return render.notFound(res)
} else {
throw err
}
}
await withTmpDir('get-zip-', async tmpDir => {
const tmpFilename = Path.join(tmpDir, 'project.zip')
const archive = new ProjectArchive(snapshot)
await archive.writeZip(blobStore, tmpFilename)
res.set('Content-Type', 'application/octet-stream')
res.set('Content-Disposition', 'attachment; filename=project.zip')
const stream = fs.createReadStream(tmpFilename)
await pipeline(stream, res)
})
}
async function createZip(req, res, next) {
const projectId = req.swagger.params.project_id.value
const version = req.swagger.params.version.value
try {
const snapshot = await getSnapshotAtVersion(projectId, version)
const zipUrl = await zipStore.getSignedUrl(projectId, version)
// Do not await this; run it in the background.
zipStore.storeZip(projectId, version, snapshot).catch(err => {
logger.error({ err, projectId, version }, 'createZip: storeZip failed')
})
res.status(HTTPStatus.OK).json({ zipUrl })
} catch (error) {
if (error instanceof Chunk.NotFoundError) {
render.notFound(res)
} else {
next(error)
}
}
}
async function deleteProject(req, res, next) {
const projectId = req.swagger.params.project_id.value
const blobStore = new BlobStore(projectId)
await Promise.all([
chunkStore.deleteProjectChunks(projectId),
blobStore.deleteBlobs(),
])
res.status(HTTPStatus.NO_CONTENT).send()
}
async function createProjectBlob(req, res, next) {
const projectId = req.swagger.params.project_id.value
const expectedHash = req.swagger.params.hash.value
const maxUploadSize = parseInt(config.get('maxFileUploadSize'), 10)
await withTmpDir('blob-', async tmpDir => {
const tmpPath = Path.join(tmpDir, 'content')
const sizeLimit = new StreamSizeLimit(maxUploadSize)
await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath))
if (sizeLimit.sizeLimitExceeded) {
return render.requestEntityTooLarge(res)
}
const hash = await blobHash.fromFile(tmpPath)
if (hash !== expectedHash) {
logger.debug({ hash, expectedHash }, 'Hash mismatch')
return render.conflict(res, 'File hash mismatch')
}
const blobStore = new BlobStore(projectId)
const newBlob = await blobStore.putFile(tmpPath)
try {
const { backupBlob } = await import('../../storage/lib/backupBlob.mjs')
await backupBlob(projectId, newBlob, tmpPath)
} catch (error) {
logger.warn({ error, projectId, hash }, 'Failed to backup blob')
}
res.status(HTTPStatus.CREATED).end()
})
}
async function headProjectBlob(req, res) {
const projectId = req.swagger.params.project_id.value
const hash = req.swagger.params.hash.value
const blobStore = new BlobStore(projectId)
const blob = await blobStore.getBlob(hash)
if (blob) {
res.set('Content-Length', blob.getByteLength())
res.status(200).end()
} else {
res.status(404).end()
}
}
// Support simple, singular ranges starting from zero only, up-to 2MB = 2_000_000, 7 digits
const RANGE_HEADER = /^bytes=0-(\d{1,7})$/
/**
* @param {string} header
* @return {{}|{start: number, end: number}}
* @private
*/
function _getRangeOpts(header) {
if (!header) return {}
const match = header.match(RANGE_HEADER)
if (match) {
const end = parseInt(match[1], 10)
return { start: 0, end }
}
return {}
}
async function getProjectBlob(req, res, next) {
const projectId = req.swagger.params.project_id.value
const hash = req.swagger.params.hash.value
const opts = _getRangeOpts(req.swagger.params.range.value || '')
const blobStore = new BlobStore(projectId)
logger.debug({ projectId, hash }, 'getProjectBlob started')
try {
let stream
try {
stream = await blobStore.getStream(hash, opts)
} catch (err) {
if (err instanceof Blob.NotFoundError) {
logger.warn({ projectId, hash }, 'Blob not found')
return res.status(404).end()
} else {
throw err
}
}
res.set('Content-Type', 'application/octet-stream')
try {
await pipeline(stream, res)
} catch (err) {
if (err?.code === 'ERR_STREAM_PREMATURE_CLOSE') {
res.end()
} else {
throw OError.tag(err, 'error transferring stream', { projectId, hash })
}
}
} finally {
logger.debug({ projectId, hash }, 'getProjectBlob finished')
}
}
async function copyProjectBlob(req, res, next) {
const sourceProjectId = req.swagger.params.copyFrom.value
const targetProjectId = req.swagger.params.project_id.value
const blobHash = req.swagger.params.hash.value
// Check that blob exists in source project
const sourceBlobStore = new BlobStore(sourceProjectId)
const targetBlobStore = new BlobStore(targetProjectId)
const [sourceBlob, targetBlob] = await Promise.all([
sourceBlobStore.getBlob(blobHash),
targetBlobStore.getBlob(blobHash),
])
if (!sourceBlob) {
return render.notFound(res)
}
// Exit early if the blob exists in the target project.
// This will also catch global blobs, which always exist.
if (targetBlob) {
return res.status(HTTPStatus.NO_CONTENT).end()
}
// Otherwise, copy blob from source project to target project
await sourceBlobStore.copyBlob(sourceBlob, targetProjectId)
res.status(HTTPStatus.CREATED).end()
}
async function getSnapshotAtVersion(projectId, version) {
const chunk = await chunkStore.loadAtVersion(projectId, version)
const snapshot = chunk.getSnapshot()
const changes = _.dropRight(
chunk.getChanges(),
chunk.getEndVersion() - version
)
snapshot.applyAll(changes)
return snapshot
}
module.exports = {
initializeProject: expressify(initializeProject),
getLatestContent: expressify(getLatestContent),
getContentAtVersion: expressify(getContentAtVersion),
getLatestHashedContent: expressify(getLatestHashedContent),
getLatestPersistedHistory: expressify(getLatestHistory),
getLatestHistory: expressify(getLatestHistory),
getLatestHistoryRaw: expressify(getLatestHistoryRaw),
getHistory: expressify(getHistory),
getHistoryBefore: expressify(getHistoryBefore),
getChanges: expressify(getChanges),
getZip: expressify(getZip),
createZip: expressify(createZip),
deleteProject: expressify(deleteProject),
createProjectBlob: expressify(createProjectBlob),
getProjectBlob: expressify(getProjectBlob),
headProjectBlob: expressify(headProjectBlob),
copyProjectBlob: expressify(copyProjectBlob),
}

View File

@@ -0,0 +1,17 @@
'use strict'
const HTTPStatus = require('http-status')
function makeErrorRenderer(status) {
return (res, message) => {
res.status(status).json({ message: message || HTTPStatus[status] })
}
}
module.exports = {
badRequest: makeErrorRenderer(HTTPStatus.BAD_REQUEST),
notFound: makeErrorRenderer(HTTPStatus.NOT_FOUND),
unprocessableEntity: makeErrorRenderer(HTTPStatus.UNPROCESSABLE_ENTITY),
conflict: makeErrorRenderer(HTTPStatus.CONFLICT),
requestEntityTooLarge: makeErrorRenderer(HTTPStatus.REQUEST_ENTITY_TOO_LARGE),
}

View File

@@ -0,0 +1,26 @@
const stream = require('node:stream')
/**
* Transform stream that stops passing bytes through after some threshold has
* been reached.
*/
class StreamSizeLimit extends stream.Transform {
constructor(maxSize) {
super()
this.maxSize = maxSize
this.accumulatedSize = 0
this.sizeLimitExceeded = false
}
_transform(chunk, encoding, cb) {
this.accumulatedSize += chunk.length
if (this.accumulatedSize > this.maxSize) {
this.sizeLimitExceeded = true
} else {
this.push(chunk)
}
cb()
}
}
module.exports = StreamSizeLimit

View File

@@ -0,0 +1,27 @@
const fs = require('node:fs')
const fsExtra = require('fs-extra')
const logger = require('@overleaf/logger')
const os = require('node:os')
const path = require('node:path')
/**
* Create a temporary directory before executing a function and cleaning up
* after.
*
* @param {string} prefix - prefix for the temporary directory name
* @param {Function} fn - async function to call
*/
async function withTmpDir(prefix, fn) {
const tmpDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), prefix))
try {
await fn(tmpDir)
} finally {
fsExtra.remove(tmpDir).catch(err => {
if (err.code !== 'ENOENT') {
logger.error({ err }, 'failed to delete temporary file')
}
})
}
}
module.exports = withTmpDir

View File

@@ -0,0 +1,269 @@
'use strict'
const _ = require('lodash')
const paths = _.reduce(
[require('./projects').paths, require('./project_import').paths],
_.extend
)
const securityDefinitions = require('./security_definitions')
module.exports = {
swagger: '2.0',
info: {
title: 'Overleaf Editor API',
description: 'API for the Overleaf editor.',
version: '1.0',
},
produces: ['application/json'],
basePath: '/api',
paths,
securityDefinitions,
security: [
{
jwt: [],
},
],
definitions: {
Project: {
properties: {
projectId: {
type: 'string',
},
},
required: ['projectId'],
},
File: {
properties: {
hash: {
type: 'string',
},
byteLength: {
type: 'integer',
},
stringLength: {
type: 'integer',
},
},
},
Label: {
properties: {
authorId: {
type: 'integer',
},
text: {
type: 'string',
},
timestamp: {
type: 'string',
},
version: {
type: 'integer',
},
},
},
Chunk: {
properties: {
history: {
$ref: '#/definitions/History',
},
startVersion: {
type: 'number',
},
},
},
ChunkResponse: {
properties: {
chunk: {
$ref: '#/definitions/Chunk',
},
authors: {
type: 'array',
items: {
$ref: '#/definitions/Author',
},
},
},
},
ChunkResponseRaw: {
properties: {
startVersion: {
type: 'number',
},
endVersion: {
type: 'number',
},
endTimestamp: {
type: 'string',
},
},
},
History: {
properties: {
snapshot: {
$ref: '#/definitions/Snapshot',
},
changes: {
type: 'array',
items: {
$ref: '#/definitions/Change',
},
},
},
},
Snapshot: {
properties: {
files: {
type: 'object',
additionalProperties: {
$ref: '#/definitions/File',
},
},
},
required: ['files'],
},
Change: {
properties: {
timestamp: {
type: 'string',
},
operations: {
type: 'array',
items: {
$ref: '#/definitions/Operation',
},
},
authors: {
type: 'array',
items: {
type: ['integer', 'null'],
},
},
v2Authors: {
type: 'array',
items: {
type: ['string', 'null'],
},
},
projectVersion: {
type: 'string',
},
v2DocVersions: {
type: 'object',
additionalProperties: {
$ref: '#/definitions/V2DocVersions',
},
},
},
required: ['timestamp', 'operations'],
},
V2DocVersions: {
properties: {
pathname: {
type: 'string',
},
v: {
type: 'integer',
},
},
},
ChangeRequest: {
properties: {
baseVersion: {
type: 'integer',
},
untransformable: {
type: 'boolean',
},
operations: {
type: 'array',
items: {
$ref: '#/definitions/Operation',
},
},
authors: {
type: 'array',
items: {
type: ['integer', 'null'],
},
},
},
required: ['baseVersion', 'operations'],
},
ChangeNote: {
properties: {
baseVersion: {
type: 'integer',
},
change: {
$ref: '#/definitions/Change',
},
},
required: ['baseVersion'],
},
Operation: {
properties: {
pathname: {
type: 'string',
},
newPathname: {
type: 'string',
},
blob: {
$ref: '#/definitions/Blob',
},
textOperation: {
type: 'array',
items: {},
},
file: {
$ref: '#/definitions/File',
},
},
},
Error: {
properties: {
message: {
type: 'string',
},
},
required: ['message'],
},
Blob: {
properties: {
hash: {
type: 'string',
},
},
required: ['hash'],
},
Author: {
properties: {
id: {
type: 'integer',
},
email: {
type: 'string',
},
name: {
type: 'string',
},
},
required: ['id', 'email', 'name'],
},
SyncState: {
properties: {
synced: {
type: 'boolean',
},
},
},
ZipInfo: {
properties: {
zipUrl: {
type: 'string',
},
},
required: ['zipUrl'],
},
},
}

View File

@@ -0,0 +1,147 @@
'use strict'
const importSnapshot = {
'x-swagger-router-controller': 'project_import',
operationId: 'importSnapshot',
tags: ['ProjectImport'],
description: 'Import a snapshot from the current rails app.',
consumes: ['application/json'],
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'snapshot',
in: 'body',
description: 'Snapshot to import.',
required: true,
schema: {
$ref: '#/definitions/Snapshot',
},
},
],
responses: {
200: {
description: 'Imported',
},
409: {
description: 'Conflict: project already initialized',
},
404: {
description: 'No such project exists',
},
},
security: [
{
basic: [],
},
],
}
const importChanges = {
'x-swagger-router-controller': 'project_import',
operationId: 'importChanges',
tags: ['ProjectImport'],
description: 'Import changes for a project from the current rails app.',
consumes: ['application/json'],
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'end_version',
description: 'end_version of latest persisted chunk',
in: 'query',
required: true,
type: 'number',
},
{
name: 'return_snapshot',
description:
'optionally, return a snapshot with the latest hashed content',
in: 'query',
required: false,
type: 'string',
enum: ['hashed', 'none'],
},
{
name: 'changes',
in: 'body',
description: 'changes to be imported',
required: true,
schema: {
type: 'array',
items: {
$ref: '#/definitions/Change',
},
},
},
],
responses: {
201: {
description: 'Created',
schema: {
$ref: '#/definitions/Snapshot',
},
},
},
security: [
{
basic: [],
},
],
}
const getChanges = {
'x-swagger-router-controller': 'projects',
operationId: 'getChanges',
tags: ['Project'],
description: 'Get changes applied to a project',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'since',
in: 'query',
description: 'start version',
required: false,
type: 'number',
},
],
responses: {
200: {
description: 'Success',
schema: {
type: 'array',
items: {
$ref: '#/definitions/Change',
},
},
},
},
security: [
{
basic: [],
},
],
}
exports.paths = {
'/projects/{project_id}/import': { post: importSnapshot },
'/projects/{project_id}/legacy_import': { post: importSnapshot },
'/projects/{project_id}/changes': { get: getChanges, post: importChanges },
'/projects/{project_id}/legacy_changes': { post: importChanges },
}

View File

@@ -0,0 +1,588 @@
'use strict'
const Blob = require('overleaf-editor-core').Blob
exports.paths = {
'/projects': {
post: {
'x-swagger-router-controller': 'projects',
operationId: 'initializeProject',
tags: ['Project'],
description: 'Initialize project.',
consumes: ['application/json'],
parameters: [
{
name: 'body',
in: 'body',
schema: {
type: 'object',
properties: {
projectId: { type: 'string' },
},
},
},
],
responses: {
200: {
description: 'Initialized',
schema: {
$ref: '#/definitions/Project',
},
},
},
security: [
{
basic: [],
},
],
},
},
'/projects/{project_id}': {
delete: {
'x-swagger-router-controller': 'projects',
operationId: 'deleteProject',
tags: ['Project'],
description: "Delete a project's history",
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
],
responses: {
204: {
description: 'Success',
},
},
security: [
{
basic: [],
},
],
},
},
'/projects/{project_id}/blobs/{hash}': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getProjectBlob',
tags: ['Project'],
description: 'Fetch blob content by its project id and hash.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'hash',
in: 'path',
description: 'Hexadecimal SHA-1 hash',
required: true,
type: 'string',
pattern: Blob.HEX_HASH_RX_STRING,
},
{
name: 'range',
in: 'header',
description: 'HTTP Range header',
required: false,
type: 'string',
},
],
produces: ['application/octet-stream'],
responses: {
200: {
description: 'Success',
schema: {
type: 'file',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
security: [{ jwt: [] }, { token: [] }],
},
head: {
'x-swagger-router-controller': 'projects',
operationId: 'headProjectBlob',
tags: ['Project'],
description: 'Fetch blob content-length by its project id and hash.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'hash',
in: 'path',
description: 'Hexadecimal SHA-1 hash',
required: true,
type: 'string',
pattern: Blob.HEX_HASH_RX_STRING,
},
],
produces: ['application/octet-stream'],
responses: {
200: {
description: 'Success',
schema: {
type: 'file',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
security: [{ jwt: [] }, { token: [] }],
},
put: {
'x-swagger-router-controller': 'projects',
operationId: 'createProjectBlob',
tags: ['Project'],
description:
'Create blob to be used in a file addition operation when importing a' +
' snapshot or changes',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'hash',
in: 'path',
description: 'Hexadecimal SHA-1 hash',
required: true,
type: 'string',
pattern: Blob.HEX_HASH_RX_STRING,
},
],
responses: {
201: {
description: 'Created',
},
},
},
post: {
'x-swagger-router-controller': 'projects',
operationId: 'copyProjectBlob',
tags: ['Project'],
description:
'Copies a blob from a source project to a target project when duplicating a project',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'target project id',
required: true,
type: 'string',
},
{
name: 'hash',
in: 'path',
description: 'Hexadecimal SHA-1 hash',
required: true,
type: 'string',
pattern: Blob.HEX_HASH_RX_STRING,
},
{
name: 'copyFrom',
in: 'query',
description: 'source project id',
required: true,
type: 'string',
},
],
responses: {
201: {
description: 'Created',
},
},
},
},
'/projects/{project_id}/latest/content': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getLatestContent',
tags: ['Project'],
description:
'Get full content of the latest version. Text file ' +
'content is included, but binary files are just linked by hash.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/Snapshot',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
},
},
'/projects/{project_id}/latest/hashed_content': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getLatestHashedContent',
tags: ['Project'],
description:
'Get a snapshot of a project at the latest version ' +
'with the hashes for the contents each file',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/Snapshot',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
security: [
{
basic: [],
},
],
},
},
'/projects/{project_id}/latest/history': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getLatestHistory',
tags: ['Project'],
description:
'Get the latest sequence of changes.' +
' TODO probably want a configurable depth.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/ChunkResponse',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
},
},
'/projects/{project_id}/latest/history/raw': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getLatestHistoryRaw',
tags: ['Project'],
description: 'Get the metadata of latest sequence of changes.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'readOnly',
in: 'query',
description: 'use read only database connection',
required: false,
type: 'boolean',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/ChunkResponseRaw',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
},
},
'/projects/{project_id}/latest/persistedHistory': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getLatestPersistedHistory',
tags: ['Project'],
description: 'Get the latest sequence of changes.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/ChunkResponse',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
},
},
'/projects/{project_id}/versions/{version}/history': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getHistory',
tags: ['Project'],
description:
'Get the sequence of changes that includes the given version.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'version',
in: 'path',
description: 'numeric version',
required: true,
type: 'number',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/ChunkResponse',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
},
},
'/projects/{project_id}/versions/{version}/content': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getContentAtVersion',
tags: ['Project'],
description: 'Get full content at the given version',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'version',
in: 'path',
description: 'numeric version',
required: true,
type: 'number',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/Snapshot',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
},
},
'/projects/{project_id}/timestamp/{timestamp}/history': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getHistoryBefore',
tags: ['Project'],
description:
'Get the sequence of changes. ' + ' before the given timestamp',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'timestamp',
in: 'path',
description: 'timestamp',
required: true,
type: 'string',
format: 'date-time',
},
],
responses: {
200: {
description: 'Success',
schema: {
$ref: '#/definitions/ChunkResponse',
},
},
404: {
description: 'Not Found',
schema: {
$ref: '#/definitions/Error',
},
},
},
},
},
'/projects/{project_id}/version/{version}/zip': {
get: {
'x-swagger-router-controller': 'projects',
operationId: 'getZip',
tags: ['Project'],
description: 'Download zip with project content',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'version',
in: 'path',
description: 'numeric version',
required: true,
type: 'number',
},
],
produces: ['application/octet-stream'],
responses: {
200: {
description: 'success',
},
404: {
description: 'not found',
},
},
security: [
{
token: [],
},
],
},
post: {
'x-swagger-router-controller': 'projects',
operationId: 'createZip',
tags: ['Project'],
description:
'Create a zip file with project content. Returns a link to be polled.',
parameters: [
{
name: 'project_id',
in: 'path',
description: 'project id',
required: true,
type: 'string',
},
{
name: 'version',
in: 'path',
description: 'numeric version',
required: true,
type: 'number',
},
],
responses: {
200: {
description: 'success',
schema: {
$ref: '#/definitions/ZipInfo',
},
},
404: {
description: 'not found',
},
},
security: [
{
basic: [],
},
],
},
},
}

View File

@@ -0,0 +1,17 @@
'use strict'
module.exports = {
jwt: {
type: 'apiKey',
in: 'header',
name: 'authorization',
},
basic: {
type: 'basic',
},
token: {
type: 'apiKey',
in: 'query',
name: 'token',
},
}