first commit

This commit is contained in:
2025-04-24 13:11:28 +08:00
commit ff9c54d5e4
5960 changed files with 834111 additions and 0 deletions

View File

@@ -0,0 +1,195 @@
const config = require('config')
const fetch = require('node-fetch')
const sinon = require('sinon')
const { expect } = require('chai')
const cleanup = require('../storage/support/cleanup')
const expectResponse = require('./support/expect_response')
const fixtures = require('../storage/support/fixtures')
const HTTPStatus = require('http-status')
const testServer = require('./support/test_server')
describe('auth', function () {
beforeEach(cleanup.everything)
beforeEach(fixtures.create)
beforeEach('Set up stubs', function () {
sinon.stub(config, 'has').callThrough()
sinon.stub(config, 'get').callThrough()
})
afterEach(sinon.restore)
it('renders 401 on swagger docs endpoint without auth', async function () {
const response = await fetch(testServer.url('/docs'))
expect(response.status).to.equal(HTTPStatus.UNAUTHORIZED)
expect(response.headers.get('www-authenticate')).to.match(/^Basic/)
})
it('renders swagger docs endpoint with auth', async function () {
const response = await fetch(testServer.url('/docs'), {
headers: {
Authorization: testServer.basicAuthHeader,
},
})
expect(response.ok).to.be.true
})
it('takes an old basic auth password during a password change', async function () {
setMockConfig('basicHttpAuth.oldPassword', 'foo')
// Primary should still work.
const response1 = await fetch(testServer.url('/docs'), {
headers: {
Authorization: testServer.basicAuthHeader,
},
})
expect(response1.ok).to.be.true
// Old password should also work.
const response2 = await fetch(testServer.url('/docs'), {
headers: {
Authorization: 'Basic ' + Buffer.from('staging:foo').toString('base64'),
},
})
expect(response2.ok).to.be.true
// Incorrect password should not work.
const response3 = await fetch(testServer.url('/docs'), {
header: {
Authorization: 'Basic ' + Buffer.from('staging:bar').toString('base64'),
},
})
expect(response3.status).to.equal(HTTPStatus.UNAUTHORIZED)
})
it('renders 401 on ProjectImport endpoints', async function () {
const unauthenticatedClient = testServer.client
try {
await unauthenticatedClient.apis.ProjectImport.importSnapshot1({
project_id: '1',
snapshot: { files: {} },
})
expect.fail()
} catch (err) {
expectResponse.unauthorized(err)
expect(err.response.headers['www-authenticate']).to.match(/^Basic/)
}
// check that the snapshot was not persisted even if the response was a 401
const projectClient = await testServer.createClientForProject('1')
try {
await projectClient.apis.Project.getLatestHistory({ project_id: '1' })
expect.fail()
} catch (err) {
expectResponse.notFound(err)
}
})
it('renders 401 for JWT endpoints', function () {
return testServer.client.apis.Project.getLatestHistory({
project_id: '10000',
})
.then(() => {
expect.fail()
})
.catch(err => {
expectResponse.unauthorized(err)
expect(err.response.headers['www-authenticate']).to.equal('Bearer')
})
})
it('accepts basic auth in place of JWT (for now)', function () {
const projectId = fixtures.docs.initializedProject.id
return testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistory({
project_id: projectId,
}).then(response => {
expect(response.obj.chunk).to.exist
})
})
it('uses JWT', function () {
const projectId = fixtures.docs.initializedProject.id
return testServer
.createClientForProject(projectId)
.then(client => {
return client.apis.Project.getLatestHistory({
project_id: projectId,
})
})
.then(response => {
expect(response.obj.chunk).to.exist
})
})
it('checks for project id', function () {
return testServer
.createClientForProject('1')
.then(client => {
return client.apis.Project.getLatestHistory({
project_id: '2',
})
})
.then(() => {
expect.fail()
})
.catch(expectResponse.forbidden)
})
it('does not accept jwt for ProjectUpdate endpoints', function () {
return testServer.createClientForProject('1').then(client => {
return client.apis.ProjectImport.importSnapshot1({
project_id: '1',
snapshot: {},
})
.then(() => {
expect.fail()
})
.catch(expectResponse.unauthorized)
})
})
describe('when an old JWT key is defined', function () {
beforeEach(function () {
setMockConfig('jwtAuth.oldKey', 'old-secret')
})
it('accepts the old key', async function () {
const projectId = fixtures.docs.initializedProject.id
const client = await testServer.createClientForProject(projectId, {
jwtKey: 'old-secret',
})
const response = await client.apis.Project.getLatestHistory({
project_id: projectId,
})
expect(response.obj.chunk).to.exist
})
it('accepts the new key', async function () {
const projectId = fixtures.docs.initializedProject.id
const client = await testServer.createClientForProject(projectId)
const response = await client.apis.Project.getLatestHistory({
project_id: projectId,
})
expect(response.obj.chunk).to.exist
})
it('rejects other keys', async function () {
const projectId = fixtures.docs.initializedProject.id
const client = await testServer.createClientForProject(projectId, {
jwtKey: 'bad-secret',
})
try {
await client.apis.Project.getLatestHistory({
project_id: projectId,
})
expect.fail()
} catch (err) {
expectResponse.unauthorized(err)
}
})
})
})
function setMockConfig(path, value) {
config.has.withArgs(path).returns(true)
config.get.withArgs(path).returns(value)
}

View File

@@ -0,0 +1,244 @@
// @ts-check
import cleanup from '../storage/support/cleanup.js'
import fetch from 'node-fetch'
import testServer from './support/test_backup_deletion_server.mjs'
import { expect } from 'chai'
import testProjects from './support/test_projects.js'
import { db } from '../../../../storage/lib/mongodb.js'
import { ObjectId } from 'mongodb'
import {
backupPersistor,
projectBlobsBucket,
chunksBucket,
} from '../../../../storage/lib/backupPersistor.mjs'
import { makeProjectKey } from '../../../../storage/lib/blob_store/index.js'
import config from 'config'
import Stream from 'stream'
import projectKey from '../../../../storage/lib/project_key.js'
/**
* @typedef {import("node-fetch").Response} Response
*/
const { deksBucket } = config.get('backupStore')
const deletedProjectsCollection = db.collection('deletedProjects')
/**
* @param {string} bucket
* @param {string} prefix
* @return {Promise<Array<string>>}
*/
async function listS3Bucket(bucket, prefix) {
// @ts-ignore access to internal library helper
const client = backupPersistor._getClientForBucket(bucket)
const response = await client
.listObjectsV2({ Bucket: bucket, Prefix: prefix })
.promise()
return (response.Contents || []).map(item => item.Key || '')
}
/**
* @param {ObjectId} projectId
* @return {Promise<Response>}
*/
async function deleteProject(projectId) {
return await fetch(testServer.testUrl(`/project/${projectId}/backup`), {
method: 'DELETE',
headers: { Authorization: testServer.basicAuthHeader },
})
}
/**
* @param {number|ObjectId} historyId
* @return {Promise<void>}
*/
async function expectToHaveBackup(historyId) {
const prefix = projectKey.format(historyId.toString()) + '/'
expect(await listS3Bucket(deksBucket, prefix)).to.have.length(1)
expect(await listS3Bucket(chunksBucket, prefix)).to.have.length(2)
expect(await listS3Bucket(projectBlobsBucket, prefix)).to.have.length(2)
}
/**
* @param {number|ObjectId} historyId
* @return {Promise<void>}
*/
async function expectToHaveNoBackup(historyId) {
const prefix = projectKey.format(historyId.toString()) + '/'
expect(await listS3Bucket(deksBucket, prefix)).to.have.length(0)
expect(await listS3Bucket(chunksBucket, prefix)).to.have.length(0)
expect(await listS3Bucket(projectBlobsBucket, prefix)).to.have.length(0)
}
describe('backupDeletion', function () {
beforeEach(cleanup.everything)
beforeEach('create health check projects', async function () {
await testProjects.createEmptyProject('42')
await testProjects.createEmptyProject('000000000000000000000042')
})
beforeEach(testServer.listenOnRandomPort)
it('renders 200 on /status', async function () {
const response = await fetch(testServer.testUrl('/status'))
expect(response.status).to.equal(200)
})
it('renders 200 on /health_check', async function () {
const response = await fetch(testServer.testUrl('/health_check'))
expect(response.status).to.equal(200)
})
describe('DELETE /project/:projectId', function () {
const postgresHistoryId = 1
const projectIdPostgres = new ObjectId('000000000000000000000001')
const projectIdMongoDB = new ObjectId('000000000000000000000002')
const projectIdNonDeleted = new ObjectId('000000000000000000000003')
const projectIdNonExpired = new ObjectId('000000000000000000000004')
const projectIdWithChunks = new ObjectId('000000000000000000000005')
const projectIdNoHistoryId = new ObjectId('000000000000000000000006')
beforeEach('populate mongo', async function () {
await deletedProjectsCollection.insertMany([
{
_id: new ObjectId(),
deleterData: {
deletedProjectId: projectIdPostgres,
deletedAt: new Date('2024-01-01T00:00:00Z'),
deletedProjectOverleafHistoryId: postgresHistoryId,
},
},
{
_id: new ObjectId(),
deleterData: {
deletedProjectId: projectIdNonExpired,
deletedAt: new Date(),
deletedProjectOverleafHistoryId: projectIdNonExpired.toString(),
},
},
{
_id: new ObjectId(),
deleterData: {
deletedProjectId: projectIdNoHistoryId,
deletedAt: new Date('2024-01-01T00:00:00Z'),
},
},
...[projectIdMongoDB, projectIdWithChunks].map(projectId => {
return {
_id: new ObjectId(),
deleterData: {
deletedProjectId: projectId,
deletedAt: new Date('2024-01-01T00:00:00Z'),
deletedProjectOverleafHistoryId: projectId.toString(),
},
}
}),
])
})
beforeEach('initialize history', async function () {
await testProjects.createEmptyProject(projectIdWithChunks.toString())
})
beforeEach('create a file in s3', async function () {
const historyIds = [
postgresHistoryId,
projectIdMongoDB,
projectIdNonDeleted,
projectIdNonExpired,
projectIdWithChunks,
projectIdNoHistoryId,
]
const jobs = []
for (const historyId of historyIds) {
jobs.push(
backupPersistor.sendStream(
projectBlobsBucket,
makeProjectKey(historyId, 'a'.repeat(40)),
Stream.Readable.from(['blob a']),
{ contentLength: 6 }
)
)
jobs.push(
backupPersistor.sendStream(
projectBlobsBucket,
makeProjectKey(historyId, 'b'.repeat(40)),
Stream.Readable.from(['blob b']),
{ contentLength: 6 }
)
)
jobs.push(
backupPersistor.sendStream(
chunksBucket,
projectKey.format(historyId) + '/111',
Stream.Readable.from(['chunk 1']),
{ contentLength: 7 }
)
)
jobs.push(
backupPersistor.sendStream(
chunksBucket,
projectKey.format(historyId) + '/222',
Stream.Readable.from(['chunk 2']),
{ contentLength: 7 }
)
)
}
await Promise.all(jobs)
})
it('renders 401 without auth', async function () {
const response = await fetch(
testServer.testUrl('/project/000000000000000000000042/backup'),
{ method: 'DELETE' }
)
expect(response.status).to.equal(401)
expect(response.headers.get('www-authenticate')).to.match(/^Basic/)
})
it('returns 422 when not deleted', async function () {
const response = await deleteProject(projectIdNonDeleted)
expect(response.status).to.equal(422)
expect(await response.text()).to.equal(
'refusing to delete non-deleted project'
)
await expectToHaveBackup(projectIdNonDeleted)
})
it('returns 422 when not expired', async function () {
const response = await deleteProject(projectIdNonExpired)
expect(response.status).to.equal(422)
expect(await response.text()).to.equal(
'refusing to delete non-expired project'
)
await expectToHaveBackup(projectIdNonExpired)
})
it('returns 422 when live-history not deleted', async function () {
const response = await deleteProject(projectIdWithChunks)
expect(response.status).to.equal(422)
expect(await response.text()).to.equal(
'refusing to delete project with remaining chunks'
)
await expectToHaveBackup(projectIdWithChunks)
})
it('returns 422 when historyId is unknown', async function () {
const response = await deleteProject(projectIdNoHistoryId)
expect(response.status).to.equal(422)
expect(await response.text()).to.equal(
'refusing to delete project with unknown historyId'
)
await expectToHaveBackup(projectIdNoHistoryId)
})
it('should successfully delete postgres id', async function () {
await expectToHaveBackup(postgresHistoryId)
const response = await deleteProject(projectIdPostgres)
expect(response.status).to.equal(204)
await expectToHaveNoBackup(postgresHistoryId)
})
it('should successfully delete mongo id', async function () {
await expectToHaveBackup(projectIdMongoDB)
const response = await deleteProject(projectIdMongoDB)
expect(response.status).to.equal(204)
await expectToHaveNoBackup(projectIdMongoDB)
})
})
})

View File

@@ -0,0 +1,375 @@
// @ts-check
import cleanup from '../storage/support/cleanup.js'
import fetch from 'node-fetch'
import testServer from './support/test_backup_verifier_server.mjs'
import { expect } from 'chai'
import testProjects from './support/test_projects.js'
import {
backupPersistor,
chunksBucket,
projectBlobsBucket,
} from '../../../../storage/lib/backupPersistor.mjs'
import {
BlobStore,
makeProjectKey,
} from '../../../../storage/lib/blob_store/index.js'
import Stream from 'node:stream'
import * as zlib from 'node:zlib'
import { promisify } from 'node:util'
import { execFile } from 'node:child_process'
import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js'
import { chunkStore } from '../../../../storage/index.js'
import { Change, File, Operation } from 'overleaf-editor-core'
import Crypto from 'node:crypto'
import path from 'node:path'
import projectKey from '../../../../storage/lib/project_key.js'
import { historyStore } from '../../../../storage/lib/history_store.js'
/**
* @typedef {import("node-fetch").Response} Response
* @typedef {import("overleaf-editor-core").Blob} Blob
*/
async function verifyProjectScript(historyId, expectFail = true) {
try {
const result = await promisify(execFile)(
process.argv0,
['storage/scripts/verify_project.mjs', `--historyId=${historyId}`],
{
encoding: 'utf-8',
timeout: 5_000,
env: {
...process.env,
LOG_LEVEL: 'warn',
},
}
)
return { status: 0, stdout: result.stdout, stderr: result.stderr }
} catch (err) {
if (
err &&
typeof err === 'object' &&
'stdout' in err &&
'code' in err &&
'stderr' in err
) {
if (!expectFail) {
console.log(err)
}
return {
stdout: typeof err.stdout === 'string' ? err.stdout : '',
status: typeof err.code === 'number' ? err.code : -1,
stderr: typeof err.stdout === 'string' ? err.stderr : '',
}
}
throw err
}
}
/**
* @param {string} historyId
* @param {string} hash
* @return {Promise<{stdout: string, status:number }>}
*/
async function verifyBlobScript(historyId, hash, expectFail = true) {
try {
const result = await promisify(execFile)(
process.argv0,
[
'storage/scripts/verify_backup_blob.mjs',
`--historyId=${historyId}`,
hash,
],
{
encoding: 'utf-8',
timeout: 5_000,
env: {
...process.env,
LOG_LEVEL: 'warn',
},
}
)
return { status: 0, stdout: result.stdout }
} catch (err) {
if (err && typeof err === 'object' && 'stdout' in err && 'code' in err) {
if (!expectFail) {
console.log(err)
}
return {
stdout: typeof err.stdout === 'string' ? err.stdout : '',
status: typeof err.code === 'number' ? err.code : -1,
}
}
throw err
}
}
/**
* @param {string} historyId
* @param {string} hash
* @return {Promise<Response>}
*/
async function verifyBlobHTTP(historyId, hash) {
return await fetch(
testServer.testUrl(`/history/${historyId}/blob/${hash}/verify`),
{ method: 'GET' }
)
}
async function backupChunk(historyId) {
const newChunk = await chunkStore.loadLatestRaw(historyId)
const { buffer: chunkBuffer } = await historyStore.loadRawWithBuffer(
historyId,
newChunk.id
)
const md5 = Crypto.createHash('md5').update(chunkBuffer)
await backupPersistor.sendStream(
chunksBucket,
path.join(
projectKey.format(historyId),
projectKey.pad(newChunk.startVersion)
),
Stream.Readable.from([chunkBuffer]),
{
contentType: 'application/json',
contentEncoding: 'gzip',
contentLength: chunkBuffer.byteLength,
sourceMd5: md5.digest('hex'),
}
)
}
const FIFTEEN_MINUTES_IN_MS = 900_000
async function addFileInNewChunk(
fileContents,
filePath,
historyId,
{ creationDate = new Date() }
) {
const chunk = await chunkStore.loadLatest(historyId)
const operation = Operation.addFile(
`${historyId}.txt`,
File.fromString(fileContents)
)
const changes = [new Change([operation], creationDate, [])]
chunk.pushChanges(changes)
await chunkStore.update(historyId, 0, chunk)
}
/**
* @param {string} historyId
* @param {Object} [backup]
* @return {Promise<string>}
*/
async function prepareProjectAndBlob(
historyId,
{ shouldBackupBlob, shouldBackupChunk, shouldCreateChunk } = {
shouldBackupBlob: true,
shouldBackupChunk: true,
shouldCreateChunk: true,
}
) {
await testProjects.createEmptyProject(historyId)
const blobStore = new BlobStore(historyId)
const fileContents = historyId
const blob = await blobStore.putString(fileContents)
if (shouldCreateChunk) {
await addFileInNewChunk(fileContents, `${historyId}.txt`, historyId, {
creationDate: new Date(new Date().getTime() - FIFTEEN_MINUTES_IN_MS),
})
}
if (shouldBackupBlob) {
const gzipped = zlib.gzipSync(Buffer.from(historyId))
await backupPersistor.sendStream(
projectBlobsBucket,
makeProjectKey(historyId, blob.getHash()),
Stream.Readable.from([gzipped]),
{ contentLength: gzipped.byteLength, contentEncoding: 'gzip' }
)
await checkDEKExists(historyId)
}
if (shouldCreateChunk && shouldBackupChunk) {
await backupChunk(historyId)
}
return blob.getHash()
}
/**
* @param {string} historyId
* @return {Promise<void>}
*/
async function checkDEKExists(historyId) {
await backupPersistor.forProjectRO(
projectBlobsBucket,
makeProjectKey(historyId, '')
)
}
describe('backupVerifier', function () {
const historyIdPostgres = '42'
const historyIdMongo = '000000000000000000000042'
let blobHashPG, blobHashMongo, blobPathPG
beforeEach(cleanup.everything)
beforeEach('create health check projects', async function () {
;[blobHashPG, blobHashMongo] = await Promise.all([
prepareProjectAndBlob('42'),
prepareProjectAndBlob('000000000000000000000042'),
])
blobPathPG = makeProjectKey(historyIdPostgres, blobHashPG)
})
beforeEach(testServer.listenOnRandomPort)
it('renders 200 on /status', async function () {
const response = await fetch(testServer.testUrl('/status'))
expect(response.status).to.equal(200)
})
it('renders 200 on /health_check', async function () {
const response = await fetch(testServer.testUrl('/health_check'))
expect(response.status).to.equal(200)
})
describe('storage/scripts/verify_project.mjs', function () {
describe('when the project is appropriately backed up', function () {
it('should return 0', async function () {
const response = await verifyProjectScript(historyIdPostgres, false)
expect(response.status).to.equal(0)
})
})
describe('when the project chunk is not backed up', function () {
let response
beforeEach(async function () {
await prepareProjectAndBlob('000000000000000000000043', {
shouldBackupChunk: false,
shouldBackupBlob: true,
shouldCreateChunk: true,
})
response = await verifyProjectScript('000000000000000000000043')
})
it('should return 1', async function () {
expect(response.status).to.equal(1)
})
it('should emit an error message referring to a missing chunk', async function () {
const stderr = response.stderr
expect(stderr).to.include('BackupRPOViolationChunkNotBackedUpError')
})
})
describe('when a project blob is not backed up', function () {
let response
beforeEach(async function () {
await prepareProjectAndBlob('43', {
shouldBackupChunk: true,
shouldBackupBlob: false,
shouldCreateChunk: true,
})
response = await verifyProjectScript('43')
})
it('should return 1', function () {
expect(response.status).to.equal(1)
})
it('includes a BackupCorruptedError in stderr', function () {
expect(response.stderr).to.include(
'BackupCorruptedMissingBlobError: missing blob'
)
})
})
})
describe('storage/scripts/verify_backup_blob.mjs', function () {
it('throws and does not create DEK if missing', async function () {
const historyId = '404'
const hash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
const response = await verifyBlobScript(historyId, hash)
expect(response.status).to.equal(1)
expect(response.stdout).to.include('dek does not exist')
await expect(checkDEKExists(historyId)).to.be.rejectedWith(NotFoundError)
})
it('throws when deleted in db', async function () {
const blobStore = new BlobStore(historyIdPostgres)
await blobStore.deleteBlobs()
const response = await verifyBlobScript(historyIdPostgres, blobHashPG)
expect(response.status).to.equal(1)
expect(response.stdout).to.include(`blob ${blobHashPG} not found`)
})
it('throws when not existing', async function () {
await backupPersistor.deleteObject(projectBlobsBucket, blobPathPG)
const result = await verifyBlobScript(historyIdPostgres, blobHashPG)
expect(result.status).to.equal(1)
expect(result.stdout).to.include('missing blob')
})
it('throws when corrupted', async function () {
await backupPersistor.sendStream(
projectBlobsBucket,
blobPathPG,
Stream.Readable.from(['something else']),
{ contentLength: 14 }
)
const result = await verifyBlobScript(historyIdPostgres, blobHashPG)
expect(result.status).to.equal(1)
expect(result.stdout).to.include('hash mismatch for backed up blob')
})
it('should successfully verify from postgres', async function () {
const result = await verifyBlobScript(
historyIdPostgres,
blobHashPG,
false
)
expect(result.status).to.equal(0)
expect(result.stdout.split('\n')).to.include('OK')
})
it('should successfully verify from mongo', async function () {
const result = await verifyBlobScript(
historyIdMongo,
blobHashMongo,
false
)
expect(result.status).to.equal(0)
expect(result.stdout.split('\n')).to.include('OK')
})
})
describe('GET /history/:historyId/blob/:hash/verify', function () {
it('returns 404 when deleted in db', async function () {
const blobStore = new BlobStore(historyIdPostgres)
await blobStore.deleteBlobs()
const response = await verifyBlobHTTP(historyIdPostgres, blobHashPG)
expect(response.status).to.equal(404)
expect(await response.text()).to.equal(`blob ${blobHashPG} not found`)
})
it('returns 422 and does not create DEK if missing', async function () {
const historyId = '404'
const hash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
const response = await verifyBlobHTTP(historyId, hash)
expect(response.status).to.equal(422)
expect(await response.text()).to.equal('dek does not exist')
await expect(checkDEKExists(historyId)).to.be.rejectedWith(NotFoundError)
})
it('returns 422 when not existing', async function () {
await backupPersistor.deleteObject(projectBlobsBucket, blobPathPG)
const response = await verifyBlobHTTP(historyIdPostgres, blobHashPG)
expect(response.status).to.equal(422)
expect(await response.text()).to.equal('missing blob')
})
it('returns 422 when corrupted', async function () {
await backupPersistor.sendStream(
projectBlobsBucket,
blobPathPG,
Stream.Readable.from(['something else']),
{ contentLength: 14 }
)
const response = await verifyBlobHTTP(historyIdPostgres, blobHashPG)
expect(response.status).to.equal(422)
expect(await response.text()).to.equal('hash mismatch for backed up blob')
})
it('should successfully verify from postgres', async function () {
const response = await verifyBlobHTTP(historyIdPostgres, blobHashPG)
expect(response.status).to.equal(200)
})
it('should successfully verify from mongo', async function () {
const response = await verifyBlobHTTP(historyIdMongo, blobHashMongo)
expect(response.status).to.equal(200)
})
})
})

View File

@@ -0,0 +1,396 @@
'use strict'
const BPromise = require('bluebird')
const { expect } = require('chai')
const HTTPStatus = require('http-status')
const fetch = require('node-fetch')
const fs = BPromise.promisifyAll(require('node:fs'))
const cleanup = require('../storage/support/cleanup')
const fixtures = require('../storage/support/fixtures')
const testFiles = require('../storage/support/test_files')
const testProjects = require('./support/test_projects')
const testServer = require('./support/test_server')
const core = require('overleaf-editor-core')
const Change = core.Change
const ChunkResponse = core.ChunkResponse
const File = core.File
const Operation = core.Operation
const Snapshot = core.Snapshot
const TextOperation = core.TextOperation
const blobHash = require('../../../../storage').blobHash
describe('overleaf ot', function () {
beforeEach(cleanup.everything)
beforeEach(fixtures.create)
this.timeout(10000) // it takes a while on Docker for Mac
it('can use API', function () {
let client, downloadZipClient
const basicAuthClient = testServer.basicAuthClient
return (
testProjects
.createEmptyProject()
.then(projectId => {
return testServer
.createClientForProject(projectId)
.then(clientForProject => {
client = clientForProject
return testServer.createClientForDownloadZip(projectId)
})
.then(clientForProject => {
downloadZipClient = clientForProject
return projectId
})
})
// the project is currently empty
.then(projectId => {
return client.apis.Project.getLatestContent({
project_id: projectId,
}).then(response => {
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(0)
return projectId
})
})
// upload a blob and add two files using it
.then(projectId => {
return fetch(
testServer.url(
`/api/projects/${projectId}/blobs/${testFiles.GRAPH_PNG_HASH}`,
{ qs: { pathname: 'graph_1.png' } }
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('graph.png')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
const testFile = File.fromHash(testFiles.GRAPH_PNG_HASH)
const change = new Change(
[
Operation.addFile('graph_1.png', testFile),
Operation.addFile('graph_2.png', testFile),
],
new Date()
)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
end_version: 0,
return_snapshot: 'hashed',
changes: [change.toRaw()],
})
})
.then(() => projectId)
})
// get the new project state
.then(projectId => {
return client.apis.Project.getLatestContent({
project_id: projectId,
}).then(response => {
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(2)
const file0 = snapshot.getFile('graph_1.png')
expect(file0.getHash()).to.equal(testFiles.GRAPH_PNG_HASH)
const file1 = snapshot.getFile('graph_2.png')
expect(file1.getHash()).to.equal(testFiles.GRAPH_PNG_HASH)
return projectId
})
})
// get the history
.then(projectId => {
return client.apis.Project.getLatestHistory({
project_id: projectId,
}).then(response => {
const chunk = ChunkResponse.fromRaw(response.obj).getChunk()
const changes = chunk.getChanges()
expect(changes.length).to.equal(1)
const change0Timestamp = changes[0].getTimestamp().getTime()
expect(change0Timestamp).to.be.closeTo(Date.now(), 1e4)
return projectId
})
})
// upload an empty file
.then(projectId => {
return fetch(
testServer.url(
`/api/projects/${projectId}/blobs/${File.EMPTY_FILE_HASH}`,
{ qs: { pathname: 'main.tex' } }
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('empty.tex')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
const testFile = File.fromHash(File.EMPTY_FILE_HASH)
const change = new Change(
[Operation.addFile('main.tex', testFile)],
new Date()
)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
end_version: 1,
return_snapshot: 'hashed',
changes: [change.toRaw()],
})
})
.then(() => projectId)
})
.then(projectId => {
// Fetch empty file blob
return client.apis.Project.getProjectBlob({
project_id: projectId,
hash: File.EMPTY_FILE_HASH,
})
.then(response => {
expect(response.headers['content-type']).to.equal(
'application/octet-stream'
)
return response.data.arrayBuffer()
})
.then(buffer => {
expect(buffer).to.deep.equal(new ArrayBuffer(0))
return projectId
})
})
// get the history
.then(projectId => {
return client.apis.Project.getLatestHistory({
project_id: projectId,
}).then(response => {
const chunk = ChunkResponse.fromRaw(response.obj).getChunk()
const changes = chunk.getChanges()
expect(changes.length).to.equal(2)
return projectId
})
})
// get the new project state
.then(projectId => {
return client.apis.Project.getLatestContent({
project_id: projectId,
}).then(response => {
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(3)
expect(snapshot.getFile('graph_1.png').getHash()).to.equal(
testFiles.GRAPH_PNG_HASH
)
expect(snapshot.getFile('graph_2.png').getHash()).to.equal(
testFiles.GRAPH_PNG_HASH
)
expect(snapshot.getFile('main.tex').getContent()).to.equal('')
return projectId
})
})
// edit the main file
.then(projectId => {
const change = new Change(
[
Operation.editFile(
'main.tex',
TextOperation.fromJSON({ textOperation: ['hello'] })
),
],
new Date()
)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
changes: [change.toRaw()],
end_version: 2,
return_snapshot: 'hashed',
}).then(response => {
expect(response.status).to.equal(HTTPStatus.CREATED)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(3)
expect(snapshot.getFile('graph_1.png').getHash()).to.equal(
testFiles.GRAPH_PNG_HASH
)
expect(snapshot.getFile('graph_2.png').getHash()).to.equal(
testFiles.GRAPH_PNG_HASH
)
expect(snapshot.getFile('main.tex').getHash()).to.equal(
blobHash.fromString('hello')
)
return projectId
})
})
// get the new project state
.then(projectId => {
return client.apis.Project.getLatestContent({
project_id: projectId,
}).then(response => {
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(3)
expect(snapshot.getFile('graph_1.png').getHash()).to.equal(
testFiles.GRAPH_PNG_HASH
)
expect(snapshot.getFile('graph_2.png').getHash()).to.equal(
testFiles.GRAPH_PNG_HASH
)
const mainFile = snapshot.getFile('main.tex')
expect(mainFile.getHash()).to.be.null
expect(mainFile.getContent()).to.equal('hello')
return projectId
})
})
// edit the main file again
.then(projectId => {
const change = new Change(
[
Operation.editFile(
'main.tex',
TextOperation.fromJSON({ textOperation: [1, -4, 'i world'] })
),
],
new Date()
)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
changes: [change.toRaw()],
end_version: 3,
return_snapshot: 'hashed',
}).then(response => {
expect(response.status).to.equal(HTTPStatus.CREATED)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(3)
expect(snapshot.getFile('main.tex').getHash()).to.equal(
blobHash.fromString('hi world')
)
return projectId
})
})
// get the new project state
.then(projectId => {
return client.apis.Project.getLatestContent({
project_id: projectId,
}).then(response => {
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(3)
expect(snapshot.getFile('graph_1.png')).to.exist
expect(snapshot.getFile('graph_2.png')).to.exist
const mainFile = snapshot.getFile('main.tex')
expect(mainFile.getHash()).to.be.null
expect(mainFile.getContent()).to.equal('hi world')
return projectId
})
})
// rename the text file
.then(projectId => {
const change = new Change(
[Operation.moveFile('main.tex', 'intro.tex')],
new Date()
)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
changes: [change.toRaw()],
end_version: 4,
return_snapshot: 'hashed',
}).then(response => {
expect(response.status).to.equal(HTTPStatus.CREATED)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(3)
expect(snapshot.getFile('intro.tex').getHash()).to.equal(
blobHash.fromString('hi world')
)
return projectId
})
})
// get the new project state
.then(projectId => {
return client.apis.Project.getLatestContent({
project_id: projectId,
}).then(response => {
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(3)
expect(snapshot.getFile('graph_1.png')).to.exist
expect(snapshot.getFile('graph_2.png')).to.exist
const mainFile = snapshot.getFile('intro.tex')
expect(mainFile.getHash()).to.be.null
expect(mainFile.getContent()).to.equal('hi world')
return projectId
})
})
// remove a graph
.then(projectId => {
const change = new Change(
[Operation.removeFile('graph_1.png')],
new Date()
)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
changes: [change.toRaw()],
end_version: 5,
return_snapshot: 'hashed',
}).then(response => {
expect(response.status).to.equal(HTTPStatus.CREATED)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(2)
return projectId
})
})
// get the new project state
.then(projectId => {
return client.apis.Project.getLatestContent({
project_id: projectId,
}).then(response => {
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(2)
expect(snapshot.getFile('graph_2.png')).to.exist
const mainFile = snapshot.getFile('intro.tex')
expect(mainFile.getHash()).to.be.null
expect(mainFile.getContent()).to.equal('hi world')
return projectId
})
})
// download zip with project content
.then(projectId => {
return downloadZipClient.apis.Project.getZip({
project_id: projectId,
version: 6,
}).then(response => {
expect(response.status).to.equal(HTTPStatus.OK)
const headers = response.headers
expect(headers['content-type']).to.equal('application/octet-stream')
expect(headers['content-disposition']).to.equal(
'attachment; filename=project.zip'
)
})
})
)
})
})

View File

@@ -0,0 +1,251 @@
const { expect } = require('chai')
const config = require('config')
const fs = require('node:fs')
const fetch = require('node-fetch')
const HTTPStatus = require('http-status')
const cleanup = require('../storage/support/cleanup')
const fixtures = require('../storage/support/fixtures')
const testFiles = require('../storage/support/test_files')
const testServer = require('./support/test_server')
const { expectHttpError } = require('./support/expect_response')
const { globalBlobs } = require('../../../../storage/lib/mongodb.js')
const {
loadGlobalBlobs,
} = require('../../../../storage/lib/blob_store/index.js')
describe('Project blobs API', function () {
const projectId = '123'
beforeEach(cleanup.everything)
beforeEach(fixtures.create)
let client
let token
before(async function () {
client = await testServer.createClientForProject(projectId)
token = testServer.createTokenForProject(projectId)
})
it('returns 404 if the blob is not found', async function () {
const testHash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
await expectHttpError(
client.apis.Project.getProjectBlob({
project_id: projectId,
hash: testHash,
}),
HTTPStatus.NOT_FOUND
)
})
it('checks if file hash matches the hash parameter', async function () {
const testHash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
const response = await fetch(
testServer.url(`/api/projects/${projectId}/blobs/${testHash}`),
{
method: 'PUT',
headers: { Authorization: `Bearer ${token}` },
body: fs.createReadStream(testFiles.path('hello.txt')),
}
)
expect(response.status).to.equal(HTTPStatus.CONFLICT)
// check that it did not store the file
await expectHttpError(
client.apis.Project.getProjectBlob({
project_id: projectId,
hash: testFiles.HELLO_TXT_HASH,
}),
HTTPStatus.NOT_FOUND
)
})
it('rejects oversized files', async function () {
const testHash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
const buffer = Buffer.alloc(
parseInt(config.get('maxFileUploadSize'), 10) + 1
)
const response = await fetch(
testServer.url(`/api/projects/${projectId}/blobs/${testHash}`),
{
method: 'PUT',
headers: { Authorization: `Bearer ${token}` },
body: buffer,
}
)
expect(response.status).to.equal(HTTPStatus.REQUEST_ENTITY_TOO_LARGE)
})
describe('with an existing blob', async function () {
let fileContents
beforeEach(async function () {
fileContents = await fs.promises.readFile(testFiles.path('hello.txt'))
const response = await fetch(
testServer.url(
`/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}`
),
{
method: 'PUT',
headers: { Authorization: `Bearer ${token}` },
body: fileContents,
}
)
expect(response.ok).to.be.true
})
it('fulfills a request with a JWT header', async function () {
const response = await client.apis.Project.getProjectBlob({
project_id: projectId,
hash: testFiles.HELLO_TXT_HASH,
})
const responseText = await response.data.text()
expect(responseText).to.equal(fileContents.toString())
})
it('fulfills a request with a token parameter', async function () {
const url = new URL(
testServer.url(
`/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}`
)
)
url.searchParams.append('token', token)
const response = await fetch(url)
const payload = await response.text()
expect(payload).to.equal(fileContents.toString())
})
it('supports range request', async function () {
const url = new URL(
testServer.url(
`/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}`
)
)
url.searchParams.append('token', token)
const response = await fetch(url, { headers: { Range: 'bytes=0-4' } })
const payload = await response.text()
expect(payload).to.equal(fileContents.toString().slice(0, 4))
})
it('supports HEAD request', async function () {
const url = new URL(
testServer.url(
`/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}`
)
)
url.searchParams.append('token', token)
const response = await fetch(url, { method: 'HEAD' })
expect(response.headers.get('Content-Length')).to.equal(
testFiles.HELLO_TXT_BYTE_LENGTH.toString()
)
const payload = await response.text()
expect(payload).to.have.length(0)
})
it('rejects an unautorized request', async function () {
const response = await fetch(
testServer.url(
`/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}`
)
)
expect(response.status).to.equal(HTTPStatus.UNAUTHORIZED)
})
it('copies the blob to another project', async function () {
const targetProjectId = '456'
const targetClient =
await testServer.createClientForProject(targetProjectId)
const targetToken = testServer.createTokenForProject(targetProjectId)
const url = new URL(
testServer.url(
`/api/projects/${targetProjectId}/blobs/${testFiles.HELLO_TXT_HASH}`
)
)
url.searchParams.append('copyFrom', projectId)
const response = await fetch(url, {
method: 'POST',
headers: { Authorization: `Bearer ${targetToken}` },
})
expect(response.status).to.equal(HTTPStatus.CREATED)
const newBlobResponse = await targetClient.apis.Project.getProjectBlob({
project_id: targetProjectId,
hash: testFiles.HELLO_TXT_HASH,
})
const newBlobResponseText = await newBlobResponse.data.text()
expect(newBlobResponseText).to.equal(fileContents.toString())
})
it('skips copying a blob to another project if it already exists', async function () {
const targetProjectId = '456'
const targetClient =
await testServer.createClientForProject(targetProjectId)
const targetToken = testServer.createTokenForProject(targetProjectId)
const fileContents = await fs.promises.readFile(
testFiles.path('hello.txt')
)
const uploadResponse = await fetch(
testServer.url(
`/api/projects/${targetProjectId}/blobs/${testFiles.HELLO_TXT_HASH}`
),
{
method: 'PUT',
headers: { Authorization: `Bearer ${targetToken}` },
body: fileContents,
}
)
expect(uploadResponse.ok).to.be.true
const url = new URL(
testServer.url(
`/api/projects/${targetProjectId}/blobs/${testFiles.HELLO_TXT_HASH}`
)
)
url.searchParams.append('copyFrom', projectId)
const response = await fetch(url, {
method: 'POST',
headers: { Authorization: `Bearer ${targetToken}` },
})
expect(response.status).to.equal(HTTPStatus.NO_CONTENT)
const newBlobResponse = await targetClient.apis.Project.getProjectBlob({
project_id: targetProjectId,
hash: testFiles.HELLO_TXT_HASH,
})
const newBlobResponseText = await newBlobResponse.data.text()
expect(newBlobResponseText).to.equal(fileContents.toString())
})
})
describe('with a global blob', async function () {
before(async function () {
await globalBlobs.insertOne({
_id: testFiles.STRING_A_HASH,
byteLength: 1,
stringLength: 1,
})
await loadGlobalBlobs()
})
it('does not copy global blobs', async function () {
const targetProjectId = '456'
const targetToken = testServer.createTokenForProject(targetProjectId)
const url = new URL(
testServer.url(
`/api/projects/${targetProjectId}/blobs/${testFiles.STRING_A_HASH}`
)
)
url.searchParams.append('copyFrom', projectId)
const response = await fetch(url, {
method: 'POST',
headers: { Authorization: `Bearer ${targetToken}` },
})
expect(response.status).to.equal(HTTPStatus.NO_CONTENT)
})
})
})

View File

@@ -0,0 +1,57 @@
'use strict'
const BPromise = require('bluebird')
const { expect } = require('chai')
const HTTPStatus = require('http-status')
const fetch = require('node-fetch')
const fs = BPromise.promisifyAll(require('node:fs'))
const cleanup = require('../storage/support/cleanup')
const fixtures = require('../storage/support/fixtures')
const testFiles = require('../storage/support/test_files')
const testProjects = require('./support/test_projects')
const testServer = require('./support/test_server')
const { Change, File, Operation } = require('overleaf-editor-core')
describe('project import', function () {
beforeEach(cleanup.everything)
beforeEach(fixtures.create)
it('skips generating the snapshot by default', async function () {
const basicAuthClient = testServer.basicAuthClient
const projectId = await testProjects.createEmptyProject()
// upload an empty file
const response = await fetch(
testServer.url(
`/api/projects/${projectId}/blobs/${File.EMPTY_FILE_HASH}`,
{ qs: { pathname: 'main.tex' } }
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('empty.tex')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
expect(response.ok).to.be.true
const testFile = File.fromHash(File.EMPTY_FILE_HASH)
const testChange = new Change(
[Operation.addFile('main.tex', testFile)],
new Date()
)
const importResponse =
await basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
end_version: 0,
changes: [testChange.toRaw()],
})
expect(importResponse.status).to.equal(HTTPStatus.CREATED)
expect(importResponse.obj).to.deep.equal({})
})
})

View File

@@ -0,0 +1,853 @@
const BPromise = require('bluebird')
const { expect } = require('chai')
const fs = BPromise.promisifyAll(require('node:fs'))
const HTTPStatus = require('http-status')
const fetch = require('node-fetch')
const cleanup = require('../storage/support/cleanup')
const fixtures = require('../storage/support/fixtures')
const testFiles = require('../storage/support/test_files')
const expectResponse = require('./support/expect_response')
const testServer = require('./support/test_server')
const core = require('overleaf-editor-core')
const testProjects = require('./support/test_projects')
const Change = core.Change
const ChunkResponse = core.ChunkResponse
const File = core.File
const Operation = core.Operation
const Origin = core.Origin
const Snapshot = core.Snapshot
const TextOperation = core.TextOperation
const V2DocVersions = core.V2DocVersions
const knex = require('../../../../storage').knex
describe('history import', function () {
beforeEach(cleanup.everything)
beforeEach(fixtures.create)
function changeToRaw(change) {
return change.toRaw()
}
function makeChange(operation) {
return new Change([operation], new Date(), [])
}
let basicAuthClient
let pseudoJwtBasicAuthClient
let clientForProject
before(async function () {
basicAuthClient = testServer.basicAuthClient
pseudoJwtBasicAuthClient = testServer.pseudoJwtBasicAuthClient
clientForProject = await testServer.createClientForProject('1')
})
it('creates blobs and then imports a snapshot and history', function () {
// We need to be able to set the projectId to match an existing doc ID.
const testProjectId = '1'
const testFilePathname = 'main.tex'
const testAuthors = [123, null]
const testTextOperation0 = TextOperation.fromJSON({ textOperation: ['a'] })
const testTextOperation1 = TextOperation.fromJSON({
textOperation: [1, 'b'],
})
let testSnapshot
return fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('empty.tex')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
// Import project
testSnapshot = new Snapshot()
testSnapshot.addFile(
testFilePathname,
File.fromHash(File.EMPTY_FILE_HASH)
)
return basicAuthClient.apis.ProjectImport.importSnapshot1({
project_id: testProjectId,
snapshot: testSnapshot.toRaw(),
})
})
.then(response => {
// Check project is valid
expect(response.obj.projectId).to.equal(testProjectId)
})
.then(() => {
// Try importing the project again
return basicAuthClient.apis.ProjectImport.importSnapshot1({
project_id: testProjectId,
snapshot: testSnapshot.toRaw(),
})
})
.then(() => {
// Check that importing a duplicate fails
expect.fail()
})
.catch(expectResponse.conflict)
.then(() => {
// Get project history
return clientForProject.apis.Project.getLatestHistory({
project_id: testProjectId,
})
})
.then(response => {
// Check that the imported history is valid
const chunk = ChunkResponse.fromRaw(response.obj).getChunk()
const snapshot = chunk.getSnapshot()
expect(snapshot.countFiles()).to.equal(1)
const file = snapshot.getFile(testFilePathname)
expect(file.getHash()).to.eql(File.EMPTY_FILE_HASH)
expect(chunk.getChanges().length).to.equal(0)
expect(chunk.getEndVersion()).to.equal(0)
})
.then(() => {
// Import changes with an end version
const changes = [
makeChange(Operation.editFile(testFilePathname, testTextOperation0)),
makeChange(Operation.editFile(testFilePathname, testTextOperation1)),
]
changes[0].setAuthors(testAuthors)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
changes: changes.map(changeToRaw),
end_version: 0,
return_snapshot: 'hashed',
})
})
.then(response => {
expect(response.status).to.equal(HTTPStatus.CREATED)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
expect(snapshot.getFile('main.tex').getHash()).to.equal(
testFiles.STRING_AB_HASH
)
})
.then(() => {
// Get project history
return clientForProject.apis.Project.getLatestHistory({
project_id: testProjectId,
})
})
.then(response => {
// Check that the history is valid
const chunkResponse = ChunkResponse.fromRaw(response.obj)
const chunk = chunkResponse.getChunk()
const snapshot = chunk.getSnapshot()
expect(snapshot.countFiles()).to.equal(1)
const file = snapshot.getFile(testFilePathname)
expect(file.getHash()).to.equal(File.EMPTY_FILE_HASH)
expect(chunk.getChanges().length).to.equal(2)
const changeWithAuthors = chunk.getChanges()[0]
expect(changeWithAuthors.getAuthors().length).to.equal(2)
expect(changeWithAuthors.getAuthors()).to.deep.equal(testAuthors)
expect(chunk.getStartVersion()).to.equal(0)
expect(chunk.getEndVersion()).to.equal(2)
})
.then(() => {
return clientForProject.apis.Project.getLatestHistory({
project_id: testProjectId,
})
})
.then(response => {
// it should retrieve the same chunk
const chunkResponse = ChunkResponse.fromRaw(response.obj)
const chunk = chunkResponse.getChunk()
expect(chunk.getChanges().length).to.equal(2)
expect(chunk.getStartVersion()).to.equal(0)
expect(chunk.getEndVersion()).to.equal(2)
})
.then(() => {
// Get project's latest content
return clientForProject.apis.Project.getLatestContent({
project_id: testProjectId,
})
})
.then(response => {
// Check that the content is valid
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
const file = snapshot.getFile(testFilePathname)
expect(file.getContent()).to.equal('ab')
})
})
it('rejects invalid changes in history', function () {
const testProjectId = '1'
const testFilePathname = 'main.tex'
const testTextOperation = TextOperation.fromJSON({
textOperation: ['a', 10],
})
let testSnapshot
return fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('empty.tex')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
// Import project
testSnapshot = new Snapshot()
testSnapshot.addFile(
testFilePathname,
File.fromHash(File.EMPTY_FILE_HASH)
)
return basicAuthClient.apis.ProjectImport.importSnapshot1({
project_id: testProjectId,
snapshot: testSnapshot.toRaw(),
})
})
.then(response => {
// Check project is valid
expect(response.obj.projectId).to.equal(testProjectId)
})
.then(() => {
// Import invalid changes
const changes = [
makeChange(Operation.editFile(testFilePathname, testTextOperation)),
]
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
return_snapshot: 'hashed',
changes: changes.map(changeToRaw),
})
})
.then(() => {
// Check that this fails
expect.fail()
})
.catch(expectResponse.unprocessableEntity)
.then(() => {
// Get the latest content
return clientForProject.apis.Project.getLatestContent({
project_id: testProjectId,
})
})
.then(response => {
// Check that no changes have been stored
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
const file = snapshot.getFile(testFilePathname)
expect(file.getContent()).to.equal('')
})
.then(() => {
// Send a change with the wrong end version that is not conflicting
// with the latest snapshot
const changes = [makeChange(Operation.removeFile(testFilePathname))]
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 10000,
changes,
})
})
.then(() => {
// Check that this fails
expect.fail()
})
.catch(expectResponse.unprocessableEntity)
.then(() => {
// Get the latest project history
return clientForProject.apis.Project.getLatestHistory({
project_id: testProjectId,
})
})
.then(response => {
// Check that no changes have been stored
const chunkResponse = ChunkResponse.fromRaw(response.obj)
const changes = chunkResponse.getChunk().getChanges()
expect(changes).to.have.length(0)
})
})
it('creates and edits a file using changes', function () {
const testProjectId = '1'
const mainFilePathname = 'main.tex'
const testFilePathname = 'test.tex'
const testTextOperation = TextOperation.fromJSON({ textOperation: ['a'] })
const inexistentAuthors = [1234, 5678]
const projectVersion = '12345.0'
const v2DocVersions = new V2DocVersions({
'random-doc-id': { pathname: 'doc-path.tex', v: 123 },
})
const testLabelOrigin = Origin.fromRaw({
kind: 'saved ver',
})
const testRestoreOrigin = Origin.fromRaw({
kind: 'restore',
timestamp: '2016-01-01T00:00:00',
version: 1,
})
let testSnapshot
return fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('empty.tex')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
// Import a project
testSnapshot = new Snapshot()
testSnapshot.addFile(
mainFilePathname,
File.fromHash(File.EMPTY_FILE_HASH)
)
return basicAuthClient.apis.ProjectImport.importSnapshot1({
project_id: testProjectId,
snapshot: testSnapshot.toRaw(),
})
})
.then(response => {
// Check that the project is valid
expect(response.obj.projectId).to.equal(testProjectId)
})
.then(() => {
// Import changes
const testFile = File.fromHash(File.EMPTY_FILE_HASH)
const changes = [
makeChange(Operation.addFile(testFilePathname, testFile)),
makeChange(Operation.editFile(testFilePathname, testTextOperation)),
]
changes[0].setProjectVersion(projectVersion)
changes[1].setAuthors(inexistentAuthors)
changes[1].setV2DocVersions(v2DocVersions)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
return_snapshot: 'hashed',
changes: changes.map(changeToRaw),
})
})
.then(response => {
expect(response.status).to.equal(HTTPStatus.CREATED)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(2)
expect(snapshot.getFile('main.tex').getHash()).to.equal(
File.EMPTY_FILE_HASH
)
expect(snapshot.getFile('test.tex').getHash()).to.equal(
testFiles.STRING_A_HASH
)
})
.then(() => {
// Get the project history
return clientForProject.apis.Project.getLatestHistory({
project_id: testProjectId,
})
})
.then(response => {
// it should not fail when the some of the authors do not exist anymore
const chunkResponse = ChunkResponse.fromRaw(response.obj)
const changes = chunkResponse.getChunk().getChanges()
expect(changes.length).to.equal(2)
const changeWithAuthor = changes[1]
expect(changeWithAuthor.getAuthors()).to.deep.equal(inexistentAuthors)
})
.then(() => {
// it should retrieve the latest snapshot when the changes set is empty
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
return_snapshot: 'hashed',
changes: [],
})
})
.then(response => {
// Check latest snapshot
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(2)
expect(snapshot.getFile('main.tex').getHash()).to.equal(
File.EMPTY_FILE_HASH
)
expect(snapshot.getFile('test.tex').getHash()).to.equal(
testFiles.STRING_A_HASH
)
expect(snapshot.getProjectVersion()).to.equal(projectVersion)
expect(snapshot.getV2DocVersions()).to.deep.equal(v2DocVersions)
})
.then(() => {
// Import changes with origin
const testFile = File.fromHash(File.EMPTY_FILE_HASH)
const changes = [
makeChange(Operation.removeFile(testFilePathname)),
makeChange(Operation.addFile(testFilePathname, testFile)),
]
changes[0].setOrigin(testLabelOrigin)
changes[1].setOrigin(testRestoreOrigin)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
changes: changes.map(changeToRaw),
})
})
.then(() => {
// Get the latest history
return clientForProject.apis.Project.getLatestHistory({
project_id: testProjectId,
})
})
.then(response => {
// Check that the origin is stored
const chunkResponse = ChunkResponse.fromRaw(response.obj)
const changes = chunkResponse.getChunk().getChanges()
expect(changes).to.have.length(4)
expect(changes[2].getOrigin()).to.deep.equal(testLabelOrigin)
expect(changes[3].getOrigin()).to.deep.equal(testRestoreOrigin)
})
.then(() => {
// Import invalid changes
const testFile = File.fromHash(File.EMPTY_FILE_HASH)
const changes = [makeChange(Operation.addFile('../../a.tex', testFile))]
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
changes: changes.map(changeToRaw),
})
})
.then(() => {
// Check that this fails and returns a 422
expect.fail()
})
.catch(expectResponse.unprocessableEntity)
})
it('rejects text operations on binary files', function () {
const testProjectId = '1'
const testFilePathname = 'main.tex'
const testTextOperation = TextOperation.fromJSON({ textOperation: ['bb'] })
let testSnapshot
return fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${testFiles.NON_BMP_TXT_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('non_bmp.txt')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
// Import a project
testSnapshot = new Snapshot()
testSnapshot.addFile(
testFilePathname,
File.fromHash(testFiles.NON_BMP_TXT_HASH)
)
return basicAuthClient.apis.ProjectImport.importSnapshot1({
project_id: testProjectId,
snapshot: testSnapshot.toRaw(),
})
})
.then(response => {
// Check that the project is valid
expect(response.obj.projectId).to.equal(testProjectId)
})
.then(() => {
// Import invalid changes
const changes = [
makeChange(Operation.editFile(testFilePathname, testTextOperation)),
]
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
changes: changes.map(changeToRaw),
})
})
.then(() => {
// Expect invalid changes to fail
expect.fail()
})
.catch(expectResponse.unprocessableEntity)
.then(() => {
// Get latest content
return clientForProject.apis.Project.getLatestContent({
project_id: testProjectId,
})
})
.then(response => {
// Check that no changes were stored
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
expect(snapshot.getFile(testFilePathname).getHash()).to.equal(
testFiles.NON_BMP_TXT_HASH
)
})
})
it('accepts text operation on files with null characters if stringLength is present', function () {
const testProjectId = '1'
const mainFilePathname = 'main.tex'
const testTextOperation = TextOperation.fromJSON({
textOperation: [3, 'a'],
})
let testSnapshot
function importChanges() {
const changes = [
makeChange(Operation.editFile(mainFilePathname, testTextOperation)),
]
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
changes: changes.map(changeToRaw),
})
}
function getLatestContent() {
return clientForProject.apis.Project.getLatestContent({
project_id: testProjectId,
})
}
return fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${testFiles.NULL_CHARACTERS_TXT_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('null_characters.txt')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
// Import project
testSnapshot = new Snapshot()
testSnapshot.addFile(
mainFilePathname,
File.fromHash(testFiles.NULL_CHARACTERS_TXT_HASH)
)
return basicAuthClient.apis.ProjectImport.importSnapshot1({
project_id: testProjectId,
snapshot: testSnapshot.toRaw(),
})
})
.then(importChanges)
.then(() => {
// Expect invalid changes to fail
expect.fail()
})
.catch(expectResponse.unprocessableEntity)
.then(getLatestContent)
.then(response => {
// Check that no chaes were made
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
expect(snapshot.getFile(mainFilePathname).getHash()).to.equal(
testFiles.NULL_CHARACTERS_TXT_HASH
)
})
.then(() => {
// Set string length
return knex('project_blobs').update(
'string_length',
testFiles.NULL_CHARACTERS_TXT_BYTE_LENGTH
)
})
.then(importChanges)
.then(getLatestContent)
.then(response => {
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
expect(snapshot.getFile(mainFilePathname).getContent()).to.equal(
'\x00\x00\x00a'
)
})
})
it('returns 404 when chunk is not found in bucket', function () {
const testProjectId = '1'
const fooChange = makeChange(Operation.removeFile('foo.tex'))
return knex('chunks')
.insert({
doc_id: testProjectId,
start_version: 0,
end_version: 100,
end_timestamp: null,
})
.then(() => {
// Import changes
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 100,
changes: [fooChange.toRaw()],
})
})
.then(() => {
// Expect invalid changes to fail
expect.fail()
})
.catch(expectResponse.notFound)
})
it('creates and returns changes with v2 author ids', function () {
const testFilePathname = 'test.tex'
const testTextOperation = TextOperation.fromJSON({ textOperation: ['a'] })
const v2Authors = ['5a296963ad5e82432674c839', null]
let testProjectId
return testProjects
.createEmptyProject()
.then(projectId => {
testProjectId = projectId
expect(testProjectId).to.be.a('string')
})
.then(() => {
return fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('empty.tex')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
})
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
// Import changes
const testFile = File.fromHash(File.EMPTY_FILE_HASH)
const changes = [
makeChange(Operation.addFile(testFilePathname, testFile)),
makeChange(Operation.editFile(testFilePathname, testTextOperation)),
]
changes[1].setV2Authors(v2Authors)
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
return_snapshot: 'hashed',
changes: changes.map(changeToRaw),
})
})
.then(response => {
expect(response.status).to.equal(HTTPStatus.CREATED)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(1)
expect(snapshot.getFile('test.tex').getHash()).to.equal(
testFiles.STRING_A_HASH
)
})
.then(() => {
// Get project history
return pseudoJwtBasicAuthClient.apis.Project.getLatestHistory({
project_id: testProjectId,
})
})
.then(response => {
// it should not fail when the some of the authors do not exist anymore
const chunkResponse = ChunkResponse.fromRaw(response.obj)
const changes = chunkResponse.getChunk().getChanges()
expect(changes.length).to.equal(2)
const changeWithAuthor = changes[1]
expect(changeWithAuthor.getV2Authors()).to.deep.equal(v2Authors)
})
})
it('should reject invalid v2 author ids', function () {
const testFilePathname = 'test.tex'
const v2Authors = ['not-a-v2-id']
let testProjectId
return testProjects
.createEmptyProject()
.then(projectId => {
testProjectId = projectId
expect(testProjectId).to.be.a('string')
})
.then(() => {
return fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('empty.tex')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
})
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
// Import changes
const testFile = File.fromHash(File.EMPTY_FILE_HASH)
const changes = [
makeChange(Operation.addFile(testFilePathname, testFile)),
]
changes[0].v2Authors = v2Authors
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
changes: changes.map(changeToRaw),
})
})
.then(() => {
// Check that invalid changes fail
expect.fail()
})
.catch(expectResponse.unprocessableEntity)
})
it('should reject changes with both v1 and v2 authors ids', function () {
const testFilePathname = 'test.tex'
const v1Authors = [456]
const v2Authors = ['5a296963ad5e82432674c839', null]
let testProjectId
return testProjects
.createEmptyProject()
.then(projectId => {
testProjectId = projectId
expect(testProjectId).to.be.a('string')
})
.then(() => {
return fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('empty.tex')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
})
.then(response => {
expect(response.ok).to.be.true
})
.then(() => {
// Import changes
const testFile = File.fromHash(File.EMPTY_FILE_HASH)
const changes = [
makeChange(Operation.addFile(testFilePathname, testFile)),
]
changes[0].authors = v1Authors
changes[0].v2Authors = v2Authors
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: testProjectId,
end_version: 0,
changes: changes.map(changeToRaw),
})
})
.then(() => {
// Check that invalid changes fail
expect.fail()
})
.catch(expectResponse.unprocessableEntity)
})
it("returns unprocessable if end_version isn't provided", function () {
return testProjects
.createEmptyProject()
.then(projectId => {
expect(projectId).to.be.a('string')
return projectId
})
.then(projectId => {
// Import changes
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
changes: [],
})
})
.then(() => {
// Check that invalid changes fail
expect.fail()
})
.catch(error => {
expect(error.message).to.equal(
'Required parameter end_version is not provided'
)
})
})
it('returns unprocessable if return_snapshot is invalid', function () {
return testProjects
.createEmptyProject()
.then(projectId => {
// Import changes
return basicAuthClient.apis.ProjectImport.importChanges1({
project_id: projectId,
changes: [],
end_version: 0,
return_snapshot: 'not_a_valid_value',
})
})
.then(() => {
// Check that invalid changes fail
expect.fail()
})
.catch(error => {
expect(error.status).to.equal(HTTPStatus.UNPROCESSABLE_ENTITY)
expect(error.response.body.message).to.equal(
'invalid enum value: return_snapshot'
)
})
})
})

View File

@@ -0,0 +1,333 @@
'use strict'
const { expect } = require('chai')
const fs = require('node:fs')
const HTTPStatus = require('http-status')
const fetch = require('node-fetch')
const sinon = require('sinon')
const cleanup = require('../storage/support/cleanup')
const fixtures = require('../storage/support/fixtures')
const testFiles = require('../storage/support/test_files')
const { zipStore, persistChanges } = require('../../../../storage')
const { expectHttpError } = require('./support/expect_response')
const testServer = require('./support/test_server')
const { createEmptyProject } = require('./support/test_projects')
const {
File,
Snapshot,
Change,
AddFileOperation,
EditFileOperation,
TextOperation,
} = require('overleaf-editor-core')
const testProjects = require('./support/test_projects')
describe('project controller', function () {
beforeEach(cleanup.everything)
beforeEach(fixtures.create)
describe('initializeProject', function () {
it('can initialize a new project', async function () {
const projectId = await testProjects.createEmptyProject()
expect(projectId).to.be.a('string')
})
})
describe('createZip', function () {
let importSnapshot
let createZip
before(function () {
importSnapshot =
testServer.basicAuthClient.apis.ProjectImport.importSnapshot1
createZip = testServer.basicAuthClient.apis.Project.createZip
})
beforeEach(function () {
// Don't start the work in the background in this test --- it is flaky.
sinon.stub(zipStore, 'storeZip').resolves()
})
afterEach(function () {
zipStore.storeZip.restore()
})
it('creates a URL to a zip file', async function () {
// Create a test blob.
const testProjectId = fixtures.docs.uninitializedProject.id
const response = await fetch(
testServer.url(
`/api/projects/${testProjectId}/blobs/${testFiles.HELLO_TXT_HASH}`
),
{
method: 'PUT',
body: fs.createReadStream(testFiles.path('hello.txt')),
headers: {
Authorization: testServer.basicAuthHeader,
},
}
)
expect(response.ok).to.be.true
// Import a project with the test blob.
const testFilePathname = 'hello.txt'
const testSnapshot = new Snapshot()
testSnapshot.addFile(
testFilePathname,
File.fromHash(testFiles.HELLO_TXT_HASH)
)
const importResponse = await importSnapshot({
project_id: testProjectId,
snapshot: testSnapshot.toRaw(),
})
expect(importResponse.obj.projectId).to.equal(testProjectId)
const createZipResponse = await createZip({
project_id: testProjectId,
version: 0,
})
expect(createZipResponse.status).to.equal(HTTPStatus.OK)
const zipInfo = createZipResponse.obj
expect(zipInfo.zipUrl).to.match(
/^http:\/\/gcs:9090\/download\/storage\/v1\/b\/overleaf-test-zips/
)
expect(zipStore.storeZip.calledOnce).to.be.true
})
})
// eslint-disable-next-line mocha/no-skipped-tests
describe.skip('getLatestContent', function () {
// TODO: remove this endpoint entirely, see
// https://github.com/overleaf/write_latex/pull/5120#discussion_r244291862
})
describe('project with changes', function () {
let projectId
beforeEach(async function () {
// used to provide a limit which forces us to persist all of the changes.
const farFuture = new Date()
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
const limits = {
minChangeTimestamp: farFuture,
maxChangeTimestamp: farFuture,
}
const changes = [
new Change(
[new AddFileOperation('test.tex', File.fromString('ab'))],
new Date(),
[]
),
new Change(
[new AddFileOperation('other.tex', File.fromString('hello'))],
new Date(),
[]
),
]
projectId = await createEmptyProject()
await persistChanges(projectId, changes, limits, 0)
})
describe('getLatestHashedContent', function () {
it('returns a snapshot', async function () {
const response =
await testServer.basicAuthClient.apis.Project.getLatestHashedContent({
project_id: projectId,
})
expect(response.status).to.equal(HTTPStatus.OK)
const snapshot = Snapshot.fromRaw(response.obj)
expect(snapshot.countFiles()).to.equal(2)
expect(snapshot.getFile('test.tex').getHash()).to.equal(
testFiles.STRING_AB_HASH
)
})
})
describe('getChanges', function () {
it('returns all changes when not given a limit', async function () {
const response =
await testServer.basicAuthClient.apis.Project.getChanges({
project_id: projectId,
})
expect(response.status).to.equal(HTTPStatus.OK)
const changes = response.obj
expect(changes.length).to.equal(2)
const filenames = changes
.flatMap(change => change.operations)
.map(operation => operation.pathname)
expect(filenames).to.deep.equal(['test.tex', 'other.tex'])
})
it('returns only requested changes', async function () {
const response =
await testServer.basicAuthClient.apis.Project.getChanges({
project_id: projectId,
since: 1,
})
expect(response.status).to.equal(HTTPStatus.OK)
const changes = response.obj
expect(changes.length).to.equal(1)
const filenames = changes
.flatMap(change => change.operations)
.map(operation => operation.pathname)
expect(filenames).to.deep.equal(['other.tex'])
})
it('rejects negative versions', async function () {
await expect(
testServer.basicAuthClient.apis.Project.getChanges({
project_id: projectId,
since: -1,
})
).to.be.rejectedWith('Bad Request')
})
it('rejects out of bounds versions', async function () {
await expect(
testServer.basicAuthClient.apis.Project.getChanges({
project_id: projectId,
since: 20,
})
).to.be.rejectedWith('Bad Request')
})
})
})
describe('project with many chunks', function () {
let projectId
beforeEach(async function () {
// used to provide a limit which forces us to persist all of the changes.
const farFuture = new Date()
farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000)
const limits = {
minChangeTimestamp: farFuture,
maxChangeTimestamp: farFuture,
maxChunkChanges: 5,
}
const changes = [
new Change(
[new AddFileOperation('test.tex', File.fromString(''))],
new Date(),
[]
),
]
for (let i = 0; i < 20; i++) {
const textOperation = new TextOperation()
textOperation.retain(i)
textOperation.insert('x')
changes.push(
new Change(
[new EditFileOperation('test.tex', textOperation)],
new Date(),
[]
)
)
}
projectId = await createEmptyProject()
await persistChanges(projectId, changes, limits, 0)
})
it('returns all changes when not given a limit', async function () {
const response = await testServer.basicAuthClient.apis.Project.getChanges(
{
project_id: projectId,
}
)
expect(response.status).to.equal(HTTPStatus.OK)
const changes = response.obj
expect(changes.length).to.equal(21)
expect(changes[10].operations[0].textOperation).to.deep.equal([9, 'x'])
})
it('returns only requested changes', async function () {
const response = await testServer.basicAuthClient.apis.Project.getChanges(
{
project_id: projectId,
since: 10,
}
)
expect(response.status).to.equal(HTTPStatus.OK)
const changes = response.obj
expect(changes.length).to.equal(11)
expect(changes[2].operations[0].textOperation).to.deep.equal([11, 'x'])
})
})
describe('getLatestHistoryRaw', function () {
it('should handles read', async function () {
const projectId = fixtures.docs.initializedProject.id
const response =
await testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistoryRaw(
{
project_id: projectId,
readOnly: 'true',
}
)
expect(response.body).to.deep.equal({
startVersion: 0,
endVersion: 1,
endTimestamp: '2032-01-01T00:00:00.000Z',
})
})
})
describe('deleteProject', function () {
it('deletes the project chunks', async function () {
const projectId = fixtures.docs.initializedProject.id
const historyResponse =
await testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistory(
{
project_id: projectId,
}
)
expect(historyResponse.status).to.equal(HTTPStatus.OK)
expect(historyResponse.body).to.have.property('chunk')
const deleteResponse =
await testServer.basicAuthClient.apis.Project.deleteProject({
project_id: projectId,
})
expect(deleteResponse.status).to.equal(HTTPStatus.NO_CONTENT)
await expectHttpError(
testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistory({
project_id: projectId,
}),
HTTPStatus.NOT_FOUND
)
})
it('deletes the project blobs', async function () {
const projectId = fixtures.docs.initializedProject.id
const token = testServer.createTokenForProject(projectId)
const authHeaders = { Authorization: `Bearer ${token}` }
const hash = testFiles.HELLO_TXT_HASH
const fileContents = await fs.promises.readFile(
testFiles.path('hello.txt')
)
const blobUrl = testServer.url(`/api/projects/${projectId}/blobs/${hash}`)
const response1 = await fetch(blobUrl, {
method: 'PUT',
headers: authHeaders,
body: fileContents,
})
expect(response1.ok).to.be.true
const response2 = await fetch(blobUrl, { headers: authHeaders })
const payload = await response2.text()
expect(payload).to.equal(fileContents.toString())
const deleteResponse =
await testServer.basicAuthClient.apis.Project.deleteProject({
project_id: projectId,
})
expect(deleteResponse.status).to.equal(HTTPStatus.NO_CONTENT)
const response3 = await fetch(blobUrl, { headers: authHeaders })
expect(response3.status).to.equal(HTTPStatus.NOT_FOUND)
})
})
})

View File

@@ -0,0 +1,53 @@
'use strict'
const { expect } = require('chai')
const HTTPStatus = require('http-status')
function expectStatus(err, expected) {
const httpStatus = err.status || err.statusCode
if (httpStatus === undefined) {
throw err
} else {
expect(httpStatus).to.equal(expected)
}
}
async function expectHttpError(promise, expectedStatusCode) {
try {
await promise
} catch (err) {
const statusCode = err.status || err.statusCode
if (statusCode === undefined) {
throw err
} else {
expect(statusCode).to.equal(expectedStatusCode)
return
}
}
expect.fail('expected HTTP request to return with an error response')
}
exports.expectHttpError = expectHttpError
exports.notFound = function (err) {
expectStatus(err, HTTPStatus.NOT_FOUND)
}
exports.unprocessableEntity = function (err) {
expectStatus(err, HTTPStatus.UNPROCESSABLE_ENTITY)
}
exports.conflict = function (err) {
expectStatus(err, HTTPStatus.CONFLICT)
}
exports.unauthorized = function (err) {
expectStatus(err, HTTPStatus.UNAUTHORIZED)
}
exports.forbidden = function (err) {
expectStatus(err, HTTPStatus.FORBIDDEN)
}
exports.requestEntityTooLarge = function (err) {
expectStatus(err, HTTPStatus.REQUEST_ENTITY_TOO_LARGE)
}

View File

@@ -0,0 +1,51 @@
// @ts-check
import config from 'config'
import { startApp } from '../../../../../backup-deletion-app.mjs'
/** @type {import("http").Server} */
let server
/**
* @param {string} pathname
* @return {string}
*/
function testUrl(pathname) {
const url = new URL('http://127.0.0.1')
const addr = server.address()
if (addr && typeof addr === 'object') {
url.port = addr.port.toString()
}
url.pathname = pathname
return url.toString()
}
const basicAuthHeader =
'Basic ' +
Buffer.from(`staging:${config.get('basicHttpAuth.password')}`).toString(
'base64'
)
async function listenOnRandomPort() {
if (server) return // already running
for (let i = 0; i < 10; i++) {
try {
server = await startApp(0)
return
} catch {}
}
server = await startApp(0)
}
after('close server', function (done) {
if (server) {
server.close(done)
} else {
done()
}
})
export default {
testUrl,
basicAuthHeader,
listenOnRandomPort,
}

View File

@@ -0,0 +1,43 @@
// @ts-check
import { startApp } from '../../../../../backup-verifier-app.mjs'
/** @type {import("http").Server} */
let server
/**
* @param {string} pathname
* @return {string}
*/
function testUrl(pathname) {
const url = new URL('http://127.0.0.1')
const addr = server.address()
if (addr && typeof addr === 'object') {
url.port = addr.port.toString()
}
url.pathname = pathname
return url.toString()
}
async function listenOnRandomPort() {
if (server) return // already running
for (let i = 0; i < 10; i++) {
try {
server = await startApp(0)
return
} catch {}
}
server = await startApp(0, false)
}
after('close server', function (done) {
if (server) {
server.close(done)
} else {
done()
}
})
export default {
testUrl,
listenOnRandomPort,
}

View File

@@ -0,0 +1,26 @@
const BPromise = require('bluebird')
const { expect } = require('chai')
const HTTPStatus = require('http-status')
const assert = require('../../../../../storage/lib/assert')
const testServer = require('./test_server')
/**
* Without a provided history id, a new one will get generated.
* The history id could either be a mongo id, or a postgres id.
*
* @param {string} [existingHistoryId]
* @return {Promise<string>}
*/
exports.createEmptyProject = function (existingHistoryId) {
return BPromise.resolve(
testServer.basicAuthClient.apis.Project.initializeProject({
body: { projectId: existingHistoryId },
})
).then(response => {
expect(response.status).to.equal(HTTPStatus.OK)
const { projectId } = response.obj
assert.projectId(projectId, 'bad projectId')
return projectId
})
}

View File

@@ -0,0 +1,133 @@
/**
* @file
* Create a test server. For performance reasons, there is only one test server,
* and it is shared between all of the tests.
*
* This uses the mocha's "root-level hooks" to start and clean up the server.
*/
const BPromise = require('bluebird')
const config = require('config')
const http = require('node:http')
const jwt = require('jsonwebtoken')
const Swagger = require('swagger-client')
const app = require('../../../../../app')
function testUrl(pathname, opts = {}) {
const url = new URL('http://127.0.0.1')
url.port = exports.server.address().port
url.pathname = pathname
if (opts.qs) {
url.searchParams = new URLSearchParams(opts.qs)
}
return url.toString()
}
exports.url = testUrl
function createClient(options) {
// The Swagger client returns native Promises; we use Bluebird promises. Just
// wrapping the client creation is enough in many (but not all) cases to
// get Bluebird into the chain.
return BPromise.resolve(new Swagger(testUrl('/api-docs'), options))
}
function createTokenForProject(projectId, opts = {}) {
const jwtKey = opts.jwtKey || config.get('jwtAuth.key')
const jwtAlgorithm = config.get('jwtAuth.algorithm')
return jwt.sign({ project_id: projectId }, jwtKey, {
algorithm: jwtAlgorithm,
})
}
exports.createTokenForProject = createTokenForProject
function createClientForProject(projectId, opts = {}) {
const token = createTokenForProject(projectId, opts)
return createClient({ authorizations: { jwt: `Bearer ${token}` } })
}
exports.createClientForProject = createClientForProject
function createClientForDownloadZip(projectId) {
const token = createTokenForProject(projectId)
return createClient({ authorizations: { token } })
}
exports.createClientForDownloadZip = createClientForDownloadZip
function createBasicAuthClient() {
return createClient({
authorizations: {
basic: {
username: 'staging',
password: config.get('basicHttpAuth.password'),
},
},
})
}
function createPseudoJwtBasicAuthClient() {
// HACK: The history service will accept HTTP basic auth for any endpoint that
// is expecting a JWT. If / when we fix that, we will need to fix this.
const jwt =
'Basic ' +
Buffer.from(`staging:${config.get('basicHttpAuth.password')}`).toString(
'base64'
)
return createClient({ authorizations: { jwt } })
}
exports.basicAuthHeader =
'Basic ' +
Buffer.from(`staging:${config.get('basicHttpAuth.password')}`).toString(
'base64'
)
function createServer() {
const server = http.createServer(app)
return app.setup().then(() => {
exports.server = server
return server
})
}
function createDefaultUnauthenticatedClient() {
return createClient().then(client => {
exports.client = client
})
}
function createDefaultBasicAuthClient() {
return createBasicAuthClient().then(client => {
exports.basicAuthClient = client
})
}
function createDefaultPseudoJwtBasicAuthClient() {
return createPseudoJwtBasicAuthClient().then(client => {
exports.pseudoJwtBasicAuthClient = client
})
}
before(function () {
function listenOnRandomPort(server) {
const listen = BPromise.promisify(server.listen, { context: server })
return listen(0).catch(err => {
if (err.code !== 'EADDRINUSE' && err.code !== 'EACCES') throw err
return listenOnRandomPort(server)
})
}
return createServer()
.then(listenOnRandomPort)
.then(createDefaultUnauthenticatedClient)
.then(createDefaultBasicAuthClient)
.then(createDefaultPseudoJwtBasicAuthClient)
})
after(function () {
exports.server.close()
})