first commit
This commit is contained in:
6
services/web/modules/server-ce-scripts/index.js
Normal file
6
services/web/modules/server-ce-scripts/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
/** @import { WebModule } from "../../types/web-module" */
|
||||
|
||||
/** @type {WebModule} */
|
||||
const ServerCeScriptsModule = {}
|
||||
|
||||
module.exports = ServerCeScriptsModule
|
@@ -0,0 +1,44 @@
|
||||
import minimist from 'minimist'
|
||||
import { db, ObjectId } from '../../../app/src/infrastructure/mongodb.js'
|
||||
|
||||
async function main() {
|
||||
const argv = minimist(process.argv.slice(2), {
|
||||
string: ['user-id', 'compile-timeout'],
|
||||
})
|
||||
|
||||
const { 'user-id': userId, 'compile-timeout': rawCompileTimeout } = argv
|
||||
const compileTimeout = parseInt(rawCompileTimeout, 10)
|
||||
if (
|
||||
!userId ||
|
||||
!ObjectId.isValid(userId) ||
|
||||
!rawCompileTimeout ||
|
||||
Number.isNaN(compileTimeout)
|
||||
) {
|
||||
console.error(
|
||||
`Usage: node ${import.meta.filename} --user-id=5a9414f259776c7900b300e6 --timeout=90`
|
||||
)
|
||||
process.exit(101)
|
||||
}
|
||||
|
||||
if (compileTimeout < 1 || compileTimeout > 600) {
|
||||
console.error(
|
||||
`The compile timeout must be positive number of seconds, below 10 minutes (600).`
|
||||
)
|
||||
process.exit(101)
|
||||
}
|
||||
|
||||
await db.users.updateOne(
|
||||
{ _id: new ObjectId(userId) },
|
||||
{ $set: { 'features.compileTimeout': compileTimeout } }
|
||||
)
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,64 @@
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import {
|
||||
connectionPromise,
|
||||
db,
|
||||
} from '../../../app/src/infrastructure/mongodb.js'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MIN_MONGO_VERSION = [5, 0]
|
||||
|
||||
async function main() {
|
||||
let mongoClient
|
||||
try {
|
||||
mongoClient = await connectionPromise
|
||||
} catch (err) {
|
||||
console.error('Cannot connect to mongodb')
|
||||
throw err
|
||||
}
|
||||
|
||||
await checkMongoVersion(mongoClient)
|
||||
|
||||
try {
|
||||
await testTransactions(mongoClient)
|
||||
} catch (err) {
|
||||
console.error("Mongo instance doesn't support transactions")
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async function testTransactions(mongoClient) {
|
||||
const session = mongoClient.startSession()
|
||||
try {
|
||||
await session.withTransaction(async () => {
|
||||
await db.users.findOne({ _id: new ObjectId() }, { session })
|
||||
})
|
||||
} finally {
|
||||
await session.endSession()
|
||||
}
|
||||
}
|
||||
|
||||
async function checkMongoVersion(mongoClient) {
|
||||
const buildInfo = await mongoClient.db().admin().buildInfo()
|
||||
const [major, minor] = buildInfo.versionArray
|
||||
const [minMajor, minMinor] = MIN_MONGO_VERSION
|
||||
|
||||
if (major < minMajor || (major === minMajor && minor < minMinor)) {
|
||||
const version = buildInfo.version
|
||||
const minVersion = MIN_MONGO_VERSION.join('.')
|
||||
console.error(
|
||||
`The MongoDB server has version ${version}, but Overleaf requires at least version ${minVersion}. Aborting.`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Mongodb is up.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,18 @@
|
||||
import RedisWrapper from '../../../app/src/infrastructure/RedisWrapper.js'
|
||||
const rclient = RedisWrapper.client('health_check')
|
||||
rclient.on('error', err => {
|
||||
console.error('Cannot connect to redis.')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
rclient.healthCheck(err => {
|
||||
if (err) {
|
||||
console.error('Cannot connect to redis.')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
} else {
|
||||
console.error('Redis is up.')
|
||||
process.exit(0)
|
||||
}
|
||||
})
|
@@ -0,0 +1,101 @@
|
||||
import { db } from '../../../app/src/infrastructure/mongodb.js'
|
||||
|
||||
async function readImagesInUse() {
|
||||
const projectCount = await db.projects.countDocuments()
|
||||
if (projectCount === 0) {
|
||||
return []
|
||||
}
|
||||
const images = await db.projects.distinct('imageName')
|
||||
|
||||
if (!images || images.length === 0 || images.includes(null)) {
|
||||
console.error(`'project.imageName' is not set for some projects`)
|
||||
console.error(
|
||||
`Set SKIP_TEX_LIVE_CHECK=true in config/variables.env, restart the instance and run 'bin/run-script scripts/backfill_project_image_name.mjs' to initialise TexLive image in existing projects.`
|
||||
)
|
||||
console.error(
|
||||
`After running the script, remove SKIP_TEX_LIVE_CHECK from config/variables.env and restart the instance.`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
return images
|
||||
}
|
||||
|
||||
function checkIsServerPro() {
|
||||
if (process.env.OVERLEAF_IS_SERVER_PRO !== 'true') {
|
||||
console.log('Running Overleaf Community Edition, skipping TexLive checks')
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
function checkSandboxedCompilesAreEnabled() {
|
||||
if (process.env.SANDBOXED_COMPILES !== 'true') {
|
||||
console.log('Sandboxed compiles disabled, skipping TexLive checks')
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
function checkTexLiveEnvVariablesAreProvided() {
|
||||
if (
|
||||
!process.env.TEX_LIVE_DOCKER_IMAGE ||
|
||||
!process.env.ALL_TEX_LIVE_DOCKER_IMAGES
|
||||
) {
|
||||
console.error(
|
||||
'Sandboxed compiles require TEX_LIVE_DOCKER_IMAGE and ALL_TEX_LIVE_DOCKER_IMAGES being set.'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (process.env.SKIP_TEX_LIVE_CHECK === 'true') {
|
||||
console.log(`SKIP_TEX_LIVE_CHECK=true, skipping TexLive images check`)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
checkIsServerPro()
|
||||
checkSandboxedCompilesAreEnabled()
|
||||
checkTexLiveEnvVariablesAreProvided()
|
||||
|
||||
const allTexLiveImages = process.env.ALL_TEX_LIVE_DOCKER_IMAGES.split(',')
|
||||
|
||||
if (!allTexLiveImages.includes(process.env.TEX_LIVE_DOCKER_IMAGE)) {
|
||||
console.error(
|
||||
`TEX_LIVE_DOCKER_IMAGE must be included in ALL_TEX_LIVE_DOCKER_IMAGES`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const currentImages = await readImagesInUse()
|
||||
|
||||
const danglingImages = []
|
||||
for (const image of currentImages) {
|
||||
if (!allTexLiveImages.includes(image)) {
|
||||
danglingImages.push(image)
|
||||
}
|
||||
}
|
||||
if (danglingImages.length > 0) {
|
||||
danglingImages.forEach(image =>
|
||||
console.error(
|
||||
`${image} is currently in use but it's not included in ALL_TEX_LIVE_DOCKER_IMAGES`
|
||||
)
|
||||
)
|
||||
console.error(
|
||||
`Set SKIP_TEX_LIVE_CHECK=true in config/variables.env, restart the instance and run 'bin/run-script scripts/update_project_image_name.js <dangling_image> <new_image>' to update projects to a new image.`
|
||||
)
|
||||
console.error(
|
||||
`After running the script, remove SKIP_TEX_LIVE_CHECK from config/variables.env and restart the instance.`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log('Done.')
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* WARNING
|
||||
* This file has been replaced by create-user.mjs. It is left in place for backwards compatibility with previous versions of Overleaf.
|
||||
* This will be used by the e2e tests that check the upgrade from the older versions, if these tests are updated or removed,
|
||||
* this file can be removed as well.
|
||||
*/
|
||||
|
||||
async function main() {
|
||||
const { default: createUser } = await import('./create-user.mjs')
|
||||
await createUser()
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,50 @@
|
||||
import minimist from 'minimist'
|
||||
import { db } from '../../../app/src/infrastructure/mongodb.js'
|
||||
import UserRegistrationHandler from '../../../app/src/Features/User/UserRegistrationHandler.js'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
export default async function main() {
|
||||
const argv = minimist(process.argv.slice(2), {
|
||||
string: ['email'],
|
||||
boolean: ['admin'],
|
||||
})
|
||||
|
||||
const { admin, email } = argv
|
||||
if (!email) {
|
||||
console.error(`Usage: node ${filename} [--admin] --email=joe@example.com`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
UserRegistrationHandler.registerNewUserAndSendActivationEmail(
|
||||
email,
|
||||
(error, user, setNewPasswordUrl) => {
|
||||
if (error) {
|
||||
return reject(error)
|
||||
}
|
||||
db.users.updateOne(
|
||||
{ _id: user._id },
|
||||
{ $set: { isAdmin: admin } },
|
||||
error => {
|
||||
if (error) {
|
||||
return reject(error)
|
||||
}
|
||||
|
||||
console.log('')
|
||||
console.log(`\
|
||||
Successfully created ${email} as ${admin ? 'an admin' : 'a'} user.
|
||||
|
||||
Please visit the following URL to set a password for ${email} and log in:
|
||||
|
||||
${setNewPasswordUrl}
|
||||
|
||||
`)
|
||||
resolve()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
@@ -0,0 +1,47 @@
|
||||
import UserGetter from '../../../app/src/Features/User/UserGetter.js'
|
||||
import UserDeleter from '../../../app/src/Features/User/UserDeleter.js'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
async function main() {
|
||||
const email = (process.argv.slice(2).pop() || '').replace(/^--email=/, '')
|
||||
if (!email) {
|
||||
console.error(`Usage: node ${filename} --email=joe@example.com`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
UserGetter.getUser({ email }, { _id: 1 }, function (error, user) {
|
||||
if (error) {
|
||||
return reject(error)
|
||||
}
|
||||
if (!user) {
|
||||
console.log(
|
||||
`user ${email} not in database, potentially already deleted`
|
||||
)
|
||||
return resolve()
|
||||
}
|
||||
const options = {
|
||||
ipAddress: '0.0.0.0',
|
||||
force: true,
|
||||
}
|
||||
UserDeleter.deleteUser(user._id, options, function (err) {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,309 @@
|
||||
const minimist = require('minimist')
|
||||
const {
|
||||
mkdirSync,
|
||||
createWriteStream,
|
||||
existsSync,
|
||||
unlinkSync,
|
||||
renameSync,
|
||||
} = require('fs')
|
||||
const mongodb = require('../app/src/infrastructure/mongodb')
|
||||
const DocumentUpdaterHandler = require('../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js')
|
||||
const ProjectZipStreamManager = require('../app/src/Features/Downloads/ProjectZipStreamManager.js')
|
||||
const logger = require('logger-sharelatex')
|
||||
const { Project } = require('../app/src/models/Project.js')
|
||||
const { User } = require('../app/src/models/User.js')
|
||||
const readline = require('readline')
|
||||
|
||||
function parseArgs() {
|
||||
return minimist(process.argv.slice(2), {
|
||||
boolean: ['help', 'list', 'export-all'],
|
||||
string: ['user-id', 'output', 'project-id', 'output-dir', 'log-level'],
|
||||
alias: { help: 'h' },
|
||||
default: {
|
||||
'log-level': 'error',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
function showUsage() {
|
||||
console.log(`
|
||||
Usage: node scripts/export-user-projects.mjs [options]
|
||||
--help, -h Show help
|
||||
--user-id The user ID (required unless using --export-all or --project-id)
|
||||
--project-id Export a single project (cannot be used with --user-id or --export-all)
|
||||
--list List user's projects (cannot be used with --output)
|
||||
--output Output zip file (for single export operations)
|
||||
--export-all Export all users' projects (requires --output-dir)
|
||||
--output-dir Directory for storing all users' export files
|
||||
--log-level Log level (trace|debug|info|warn|error|fatal) [default: error]
|
||||
`)
|
||||
}
|
||||
|
||||
function findAllUsers(callback) {
|
||||
User.find({}, 'email', callback)
|
||||
}
|
||||
|
||||
function findUserProjects(userId, callback) {
|
||||
Project.find({ owner_ref: userId }, 'name', callback)
|
||||
}
|
||||
|
||||
function listProjects(userId, callback) {
|
||||
findUserProjects(userId, function (err, projects) {
|
||||
if (err) return callback(err)
|
||||
projects.forEach(function (p) {
|
||||
console.log(`${p._id} - ${p.name}`)
|
||||
})
|
||||
callback()
|
||||
})
|
||||
}
|
||||
|
||||
function updateProgress(current, total) {
|
||||
if (!process.stdout.isTTY) return
|
||||
const width = 40
|
||||
const progress = Math.floor((current / total) * width)
|
||||
const SOLID_BLOCK = '\u2588' // Unicode "Full Block"
|
||||
const LIGHT_SHADE = '\u2591' // Unicode "Light Shade"
|
||||
const bar =
|
||||
SOLID_BLOCK.repeat(progress) + LIGHT_SHADE.repeat(width - progress)
|
||||
const percentage = Math.floor((current / total) * 100)
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
process.stdout.write(
|
||||
`Progress: [${bar}] ${percentage}% (${current}/${total} projects)`
|
||||
)
|
||||
}
|
||||
|
||||
function exportUserProjectsToZip(userId, output, callback) {
|
||||
findUserProjects(userId, function (err, projects) {
|
||||
if (err) return callback(err)
|
||||
const allIds = projects.map(p => p._id)
|
||||
if (allIds.length === 0) {
|
||||
console.log('No projects found for user')
|
||||
return callback()
|
||||
}
|
||||
|
||||
console.log('Flushing projects to MongoDB...')
|
||||
let completed = 0
|
||||
|
||||
function flushNext() {
|
||||
if (completed >= allIds.length) {
|
||||
createZip()
|
||||
return
|
||||
}
|
||||
|
||||
DocumentUpdaterHandler.flushProjectToMongoAndDelete(
|
||||
allIds[completed],
|
||||
function (err) {
|
||||
if (err) return callback(err)
|
||||
updateProgress(completed + 1, allIds.length)
|
||||
completed++
|
||||
flushNext()
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function createZip() {
|
||||
console.log('\nAll projects flushed, creating zip...')
|
||||
console.log(
|
||||
`Exporting ${allIds.length} projects for user ${userId} to ${output}`
|
||||
)
|
||||
|
||||
ProjectZipStreamManager.createZipStreamForMultipleProjects(
|
||||
allIds,
|
||||
function (err, zipStream) {
|
||||
if (err) return callback(err)
|
||||
|
||||
zipStream.on('progress', progress => {
|
||||
updateProgress(progress.entries.total, allIds.length)
|
||||
})
|
||||
|
||||
writeStreamToFileAtomically(zipStream, output, function (err) {
|
||||
if (err) return callback(err)
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
console.log(
|
||||
`Successfully exported ${allIds.length} projects to ${output}`
|
||||
)
|
||||
callback()
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
flushNext()
|
||||
})
|
||||
}
|
||||
|
||||
function writeStreamToFileAtomically(stream, finalPath, callback) {
|
||||
const tmpPath = `${finalPath}-${Date.now()}.tmp`
|
||||
const outStream = createWriteStream(tmpPath, { flags: 'wx' })
|
||||
|
||||
stream.pipe(outStream)
|
||||
|
||||
outStream.on('error', function (err) {
|
||||
try {
|
||||
unlinkSync(tmpPath)
|
||||
} catch {
|
||||
console.log('Leaving behind tmp file, please cleanup manually:', tmpPath)
|
||||
}
|
||||
callback(err)
|
||||
})
|
||||
|
||||
outStream.on('finish', function () {
|
||||
try {
|
||||
renameSync(tmpPath, finalPath)
|
||||
callback()
|
||||
} catch (err) {
|
||||
try {
|
||||
unlinkSync(tmpPath)
|
||||
} catch {
|
||||
console.log(
|
||||
'Leaving behind tmp file, please cleanup manually:',
|
||||
tmpPath
|
||||
)
|
||||
}
|
||||
callback(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function exportSingleProject(projectId, output, callback) {
|
||||
console.log('Flushing project to MongoDB...')
|
||||
DocumentUpdaterHandler.flushProjectToMongoAndDelete(
|
||||
projectId,
|
||||
function (err) {
|
||||
if (err) return callback(err)
|
||||
|
||||
console.log(`Exporting project ${projectId} to ${output}`)
|
||||
ProjectZipStreamManager.createZipStreamForProject(
|
||||
projectId,
|
||||
function (err, zipStream) {
|
||||
if (err) return callback(err)
|
||||
writeStreamToFileAtomically(zipStream, output, function (err) {
|
||||
if (err) return callback(err)
|
||||
console.log('Exported project to', output)
|
||||
callback()
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function exportAllUsersProjects(outputDir, callback) {
|
||||
findAllUsers(function (err, users) {
|
||||
if (err) return callback(err)
|
||||
|
||||
console.log(`Found ${users.length} users to process`)
|
||||
mkdirSync(outputDir, { recursive: true })
|
||||
|
||||
let userIndex = 0
|
||||
function processNextUser() {
|
||||
if (userIndex >= users.length) {
|
||||
return callback()
|
||||
}
|
||||
|
||||
const user = users[userIndex]
|
||||
const safeEmail = user.email.toLowerCase().replace(/[^a-z0-9]/g, '_')
|
||||
const outputFile = `${outputDir}/${user._id}_${safeEmail}_projects.zip`
|
||||
|
||||
if (existsSync(outputFile)) {
|
||||
console.log(`Skipping ${user._id} - file already exists`)
|
||||
userIndex++
|
||||
return processNextUser()
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Processing user ${userIndex + 1}/${users.length} (${user._id})`
|
||||
)
|
||||
exportUserProjectsToZip(user._id, outputFile, function (err) {
|
||||
if (err) return callback(err)
|
||||
userIndex++
|
||||
processNextUser()
|
||||
})
|
||||
}
|
||||
|
||||
processNextUser()
|
||||
})
|
||||
}
|
||||
|
||||
function main() {
|
||||
const argv = parseArgs()
|
||||
|
||||
if (argv.help) {
|
||||
showUsage()
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
if (argv['log-level']) {
|
||||
logger.logger.level(argv['log-level'])
|
||||
}
|
||||
|
||||
if (argv.list && argv.output) {
|
||||
console.error('Cannot use both --list and --output together')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (
|
||||
[argv['user-id'], argv['project-id'], argv['export-all']].filter(Boolean)
|
||||
.length > 1
|
||||
) {
|
||||
console.error('Can only use one of: --user-id, --project-id, --export-all')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
function cleanup(err) {
|
||||
// Allow the script to finish gracefully then exit
|
||||
setTimeout(() => {
|
||||
if (err) {
|
||||
logger.error({ err }, 'Error in export-user-projects script')
|
||||
process.exit(1)
|
||||
} else {
|
||||
console.log('Done.')
|
||||
process.exit(0)
|
||||
}
|
||||
}, 1000)
|
||||
}
|
||||
|
||||
if (argv.list) {
|
||||
if (!argv['user-id']) {
|
||||
console.error('--list requires --user-id')
|
||||
process.exit(1)
|
||||
}
|
||||
listProjects(argv['user-id'], cleanup)
|
||||
return
|
||||
}
|
||||
|
||||
if (argv['export-all']) {
|
||||
if (!argv['output-dir']) {
|
||||
console.error('--export-all requires --output-dir')
|
||||
process.exit(1)
|
||||
}
|
||||
exportAllUsersProjects(argv['output-dir'], cleanup)
|
||||
return
|
||||
}
|
||||
|
||||
if (!argv.output) {
|
||||
console.error('Please specify an --output zip file')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (argv['project-id']) {
|
||||
exportSingleProject(argv['project-id'], argv.output, cleanup)
|
||||
} else if (argv['user-id']) {
|
||||
exportUserProjectsToZip(argv['user-id'], argv.output, cleanup)
|
||||
} else {
|
||||
console.error(
|
||||
'Please specify either --user-id, --project-id, or --export-all'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
mongodb
|
||||
.waitForDb()
|
||||
.then(main)
|
||||
.catch(err => {
|
||||
console.error('Failed to connect to MongoDB:', err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,232 @@
|
||||
import minimist from 'minimist'
|
||||
import {
|
||||
mkdirSync,
|
||||
createWriteStream,
|
||||
existsSync,
|
||||
unlinkSync,
|
||||
renameSync,
|
||||
} from 'fs'
|
||||
import { pipeline } from 'stream/promises'
|
||||
import DocumentUpdaterHandler from '../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js'
|
||||
import ProjectZipStreamManager from '../../../app/src/Features/Downloads/ProjectZipStreamManager.mjs'
|
||||
import logger from '@overleaf/logger'
|
||||
import { promisify } from '@overleaf/promise-utils'
|
||||
import { gracefulShutdown } from '../../../app/src/infrastructure/GracefulShutdown.js'
|
||||
import { Project } from '../../../app/src/models/Project.js'
|
||||
import { User } from '../../../app/src/models/User.js'
|
||||
import readline from 'readline'
|
||||
|
||||
function parseArgs() {
|
||||
return minimist(process.argv.slice(2), {
|
||||
boolean: ['help', 'list', 'export-all'],
|
||||
string: ['user-id', 'output', 'project-id', 'output-dir', 'log-level'],
|
||||
alias: { help: 'h' },
|
||||
default: {
|
||||
'log-level': 'error',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
function showUsage() {
|
||||
console.log(`
|
||||
Usage: node scripts/export-user-projects.mjs [options]
|
||||
--help, -h Show help
|
||||
--user-id The user ID (required unless using --export-all or --project-id)
|
||||
--project-id Export a single project (cannot be used with --user-id or --export-all)
|
||||
--list List user's projects (cannot be used with --output)
|
||||
--output Output zip file (for single export operations)
|
||||
--export-all Export all users' projects (requires --output-dir)
|
||||
--output-dir Directory for storing all users' export files
|
||||
--log-level Log level (trace|debug|info|warn|error|fatal) [default: error]
|
||||
`)
|
||||
}
|
||||
|
||||
async function findAllUsers() {
|
||||
const users = await User.find({}, 'email').exec()
|
||||
return users
|
||||
}
|
||||
|
||||
async function findUserProjects(userId) {
|
||||
const ownedProjects = await Project.find({ owner_ref: userId }, 'name').exec()
|
||||
return ownedProjects
|
||||
}
|
||||
|
||||
async function listProjects(userId) {
|
||||
const projects = await findUserProjects(userId)
|
||||
for (const p of projects) {
|
||||
console.log(`${p._id} - ${p.name}`)
|
||||
}
|
||||
}
|
||||
|
||||
const createZipStreamForMultipleProjectsAsync = promisify(
|
||||
ProjectZipStreamManager.createZipStreamForMultipleProjects
|
||||
).bind(ProjectZipStreamManager)
|
||||
|
||||
function updateProgress(current, total) {
|
||||
if (!process.stdout.isTTY) return
|
||||
const width = 40
|
||||
const progress = Math.floor((current / total) * width)
|
||||
const SOLID_BLOCK = '\u2588' // Unicode "Full Block"
|
||||
const LIGHT_SHADE = '\u2591' // Unicode "Light Shade"
|
||||
const bar =
|
||||
SOLID_BLOCK.repeat(progress) + LIGHT_SHADE.repeat(width - progress)
|
||||
const percentage = Math.floor((current / total) * 100)
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
process.stdout.write(
|
||||
`Progress: [${bar}] ${percentage}% (${current}/${total} projects)`
|
||||
)
|
||||
}
|
||||
|
||||
async function exportUserProjectsToZip(userId, output) {
|
||||
const projects = await findUserProjects(userId)
|
||||
const allIds = projects.map(p => p._id)
|
||||
if (allIds.length === 0) {
|
||||
console.log('No projects found for user')
|
||||
return
|
||||
}
|
||||
console.log('Flushing projects to MongoDB...')
|
||||
for (const [index, id] of allIds.entries()) {
|
||||
await DocumentUpdaterHandler.promises.flushProjectToMongoAndDelete(id)
|
||||
updateProgress(index + 1, allIds.length)
|
||||
}
|
||||
console.log('\nAll projects flushed, creating zip...')
|
||||
|
||||
console.log(
|
||||
`Exporting ${allIds.length} projects for user ${userId} to ${output}`
|
||||
)
|
||||
|
||||
const zipStream = await createZipStreamForMultipleProjectsAsync(allIds)
|
||||
|
||||
zipStream.on('progress', progress => {
|
||||
updateProgress(progress.entries.total, allIds.length)
|
||||
})
|
||||
|
||||
await writeStreamToFileAtomically(zipStream, output)
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
console.log(`Successfully exported ${allIds.length} projects to ${output}`)
|
||||
}
|
||||
|
||||
async function writeStreamToFileAtomically(stream, finalPath) {
|
||||
const tmpPath = `${finalPath}-${Date.now()}.tmp`
|
||||
const outStream = createWriteStream(tmpPath, { flags: 'wx' })
|
||||
try {
|
||||
await pipeline(stream, outStream)
|
||||
renameSync(tmpPath, finalPath)
|
||||
} catch (err) {
|
||||
try {
|
||||
unlinkSync(tmpPath)
|
||||
} catch {
|
||||
console.log('Leaving behind tmp file, please cleanup manually:', tmpPath)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
const createZipStreamForProjectAsync = promisify(
|
||||
ProjectZipStreamManager.createZipStreamForProject
|
||||
).bind(ProjectZipStreamManager)
|
||||
|
||||
async function exportSingleProject(projectId, output) {
|
||||
console.log('Flushing project to MongoDB...')
|
||||
await DocumentUpdaterHandler.promises.flushProjectToMongoAndDelete(projectId)
|
||||
console.log(`Exporting project ${projectId} to ${output}`)
|
||||
const zipStream = await createZipStreamForProjectAsync(projectId)
|
||||
await writeStreamToFileAtomically(zipStream, output)
|
||||
console.log('Exported project to', output)
|
||||
}
|
||||
|
||||
async function exportAllUsersProjects(outputDir) {
|
||||
const users = await findAllUsers()
|
||||
console.log(`Found ${users.length} users to process`)
|
||||
|
||||
mkdirSync(outputDir, { recursive: true })
|
||||
|
||||
for (let i = 0; i < users.length; i++) {
|
||||
const user = users[i]
|
||||
const safeEmail = user.email.toLowerCase().replace(/[^a-z0-9]/g, '_')
|
||||
const outputFile = `${outputDir}/${user._id}_${safeEmail}_projects.zip`
|
||||
|
||||
if (existsSync(outputFile)) {
|
||||
console.log(`Skipping ${user._id} - file already exists`)
|
||||
continue
|
||||
}
|
||||
|
||||
console.log(`Processing user ${i + 1}/${users.length} (${user._id})`)
|
||||
await exportUserProjectsToZip(user._id, outputFile)
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const argv = parseArgs()
|
||||
|
||||
if (argv.help) {
|
||||
showUsage()
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
if (argv['log-level']) {
|
||||
logger.logger.level(argv['log-level'])
|
||||
}
|
||||
|
||||
if (argv.list && argv.output) {
|
||||
console.error('Cannot use both --list and --output together')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (
|
||||
[argv['user-id'], argv['project-id'], argv['export-all']].filter(Boolean)
|
||||
.length > 1
|
||||
) {
|
||||
console.error('Can only use one of: --user-id, --project-id, --export-all')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
try {
|
||||
if (argv.list) {
|
||||
if (!argv['user-id']) {
|
||||
console.error('--list requires --user-id')
|
||||
process.exit(1)
|
||||
}
|
||||
await listProjects(argv['user-id'])
|
||||
return
|
||||
}
|
||||
|
||||
if (argv['export-all']) {
|
||||
if (!argv['output-dir']) {
|
||||
console.error('--export-all requires --output-dir')
|
||||
process.exit(1)
|
||||
}
|
||||
await exportAllUsersProjects(argv['output-dir'])
|
||||
return
|
||||
}
|
||||
|
||||
if (!argv.output) {
|
||||
console.error('Please specify an --output zip file')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (argv['project-id']) {
|
||||
await exportSingleProject(argv['project-id'], argv.output)
|
||||
} else if (argv['user-id']) {
|
||||
await exportUserProjectsToZip(argv['user-id'], argv.output)
|
||||
} else {
|
||||
console.error(
|
||||
'Please specify either --user-id, --project-id, or --export-all'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
} finally {
|
||||
await gracefulShutdown({ close: done => done() })
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
.then(async () => {
|
||||
console.log('Done.')
|
||||
})
|
||||
.catch(async err => {
|
||||
logger.error({ err }, 'Error in export-user-projects script')
|
||||
process.exitCode = 1
|
||||
})
|
@@ -0,0 +1,197 @@
|
||||
// Script to migrate user emails using a CSV file with the following format:
|
||||
//
|
||||
// oldEmail,newEmail
|
||||
//
|
||||
// The script will iterate through the CSV file and update the user's email
|
||||
// address from oldEmail to newEmail, after checking all the email addresses
|
||||
// for duplicates.
|
||||
//
|
||||
// Intended for Server Pro customers migrating user emails from one domain to
|
||||
// another.
|
||||
|
||||
import minimist from 'minimist'
|
||||
|
||||
import os from 'os'
|
||||
import fs from 'fs'
|
||||
import * as csv from 'csv/sync'
|
||||
import { parseEmail } from '../../../app/src/Features/Helpers/EmailHelper.js'
|
||||
import UserGetter from '../../../app/src/Features/User/UserGetter.js'
|
||||
import UserUpdater from '../../../app/src/Features/User/UserUpdater.js'
|
||||
import UserSessionsManager from '../../../app/src/Features/User/UserSessionsManager.js'
|
||||
|
||||
const hostname = os.hostname()
|
||||
const scriptTimestamp = new Date().toISOString()
|
||||
|
||||
// support command line option of --commit to actually do the migration
|
||||
const argv = minimist(process.argv.slice(2), {
|
||||
boolean: ['commit', 'ignore-missing'],
|
||||
string: ['admin-id'],
|
||||
alias: {
|
||||
'ignore-missing': 'continue',
|
||||
},
|
||||
default: {
|
||||
commit: false,
|
||||
'ignore-missing': false,
|
||||
'admin-id': '000000000000000000000000', // use a dummy admin ID for script audit log entries
|
||||
},
|
||||
})
|
||||
|
||||
// display usage if no CSV file is provided
|
||||
if (argv._.length === 0) {
|
||||
console.log(
|
||||
'Usage: node migrate_user_emails.mjs [--commit] [--continue|--ignore-missing] [--admin-id=ADMIN_USER_ID] <csv_file>'
|
||||
)
|
||||
console.log(' --commit: actually do the migration (default: false)')
|
||||
console.log(
|
||||
' --continue|--ignore-missing: continue on missing or already-migrated users'
|
||||
)
|
||||
console.log(' --admin-id: admin user ID to use for audit log entries')
|
||||
console.log(' <csv_file>: CSV file with old and new email addresses')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
function filterEmails(rows) {
|
||||
// check that emails have a valid format
|
||||
const result = []
|
||||
const seenOld = new Set()
|
||||
const seenNew = new Set()
|
||||
for (const [oldEmail, newEmail] of rows) {
|
||||
const parsedOld = parseEmail(oldEmail)
|
||||
const parsedNew = parseEmail(newEmail)
|
||||
if (!parsedOld) {
|
||||
throw new Error(`invalid old email "${oldEmail}"`)
|
||||
}
|
||||
if (!parsedNew) {
|
||||
throw new Error(`invalid new email "${newEmail}"`)
|
||||
}
|
||||
// Check for duplicates and overlaps
|
||||
if (seenOld.has(parsedOld)) {
|
||||
throw new Error(`Duplicate old emails found in CSV file ${oldEmail}.`)
|
||||
}
|
||||
if (seenNew.has(parsedNew)) {
|
||||
throw new Error(`Duplicate new emails found in CSV file ${newEmail}.`)
|
||||
}
|
||||
if (seenOld.has(parsedNew) || seenNew.has(parsedOld)) {
|
||||
throw new Error(
|
||||
`Old and new emails cannot overlap ${oldEmail} ${newEmail}`
|
||||
)
|
||||
}
|
||||
seenOld.add(parsedOld)
|
||||
seenNew.add(parsedNew)
|
||||
result.push([parsedOld, parsedNew])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async function checkEmailsAgainstDb(emails) {
|
||||
const result = []
|
||||
for (const [oldEmail, newEmail] of emails) {
|
||||
const userWithEmail = await UserGetter.promises.getUserByMainEmail(
|
||||
oldEmail,
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
if (!userWithEmail) {
|
||||
if (argv['ignore-missing']) {
|
||||
console.log(
|
||||
`User with email "${oldEmail}" not found, skipping update to "${newEmail}"`
|
||||
)
|
||||
continue
|
||||
} else {
|
||||
throw new Error(`no user found with email "${oldEmail}"`)
|
||||
}
|
||||
}
|
||||
const userWithNewEmail = await UserGetter.promises.getUserByAnyEmail(
|
||||
newEmail,
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
if (userWithNewEmail) {
|
||||
throw new Error(
|
||||
`new email "${newEmail}" already exists for user ${userWithNewEmail._id}`
|
||||
)
|
||||
}
|
||||
result.push([oldEmail, newEmail])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async function doMigration(emails) {
|
||||
let success = 0
|
||||
let failure = 0
|
||||
let skipped = 0
|
||||
for (const [oldEmail, newEmail] of emails) {
|
||||
const userWithEmail = await UserGetter.promises.getUserByMainEmail(
|
||||
oldEmail,
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
if (!userWithEmail) {
|
||||
if (argv['ignore-missing']) {
|
||||
continue
|
||||
} else {
|
||||
throw new Error(`no user found with email "${oldEmail}"`)
|
||||
}
|
||||
}
|
||||
if (argv.commit) {
|
||||
console.log(
|
||||
`Updating user ${userWithEmail._id} email "${oldEmail}" to "${newEmail}"\n`
|
||||
)
|
||||
try {
|
||||
// log out all the user's sessions before changing the email address
|
||||
await UserSessionsManager.promises.removeSessionsFromRedis(
|
||||
userWithEmail
|
||||
)
|
||||
|
||||
await UserUpdater.promises.migrateDefaultEmailAddress(
|
||||
userWithEmail._id,
|
||||
oldEmail,
|
||||
newEmail,
|
||||
{
|
||||
initiatorId: argv['admin-id'],
|
||||
ipAddress: hostname,
|
||||
extraInfo: {
|
||||
script: 'migrate_user_emails.js',
|
||||
runAt: scriptTimestamp,
|
||||
},
|
||||
}
|
||||
)
|
||||
success++
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
failure++
|
||||
}
|
||||
} else {
|
||||
console.log(`Dry run, skipping update from ${oldEmail} to ${newEmail}`)
|
||||
skipped++
|
||||
}
|
||||
}
|
||||
console.log('Success: ', success, 'Failure: ', failure, 'Skipped: ', skipped)
|
||||
if (failure > 0) {
|
||||
throw new Error('Some email migrations failed')
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateEmails() {
|
||||
console.log('Starting email migration script')
|
||||
const csvFilePath = argv._[0]
|
||||
const csvFile = fs.readFileSync(csvFilePath, 'utf8')
|
||||
const rows = csv.parse(csvFile)
|
||||
console.log('Number of users to migrate: ', rows.length)
|
||||
const emails = filterEmails(rows)
|
||||
const existingUserEmails = await checkEmailsAgainstDb(emails)
|
||||
await doMigration(existingUserEmails)
|
||||
}
|
||||
|
||||
migrateEmails()
|
||||
.then(() => {
|
||||
console.log('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,34 @@
|
||||
import minimist from 'minimist'
|
||||
import { db } from '../../../app/src/infrastructure/mongodb.js'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
async function main() {
|
||||
const argv = minimist(process.argv.slice(2), {
|
||||
string: ['user-id', 'old-name', 'new_name'],
|
||||
})
|
||||
|
||||
const { 'user-id': userId, 'old-name': oldName, 'new-name': newName } = argv
|
||||
if (!userId || !oldName || !newName) {
|
||||
console.error(
|
||||
`Usage: node ${filename} --user-id=5a9414f259776c7900b300e6 --old-name=my-folder --new-name=my-folder-renamed`
|
||||
)
|
||||
process.exit(101)
|
||||
}
|
||||
|
||||
await db.tags.updateOne(
|
||||
{ name: oldName, user_id: userId },
|
||||
{ $set: { name: newName } }
|
||||
)
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,61 @@
|
||||
import Settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger'
|
||||
import { db } from '../../../app/src/infrastructure/mongodb.js'
|
||||
import {
|
||||
mergeFeatures,
|
||||
compareFeatures,
|
||||
} from '../../../app/src/Features/Subscription/FeaturesHelper.js'
|
||||
import { fileURLToPath } from 'url'
|
||||
const DRY_RUN = !process.argv.includes('--dry-run=false')
|
||||
|
||||
async function main(DRY_RUN, defaultFeatures) {
|
||||
logger.info({ defaultFeatures }, 'default features')
|
||||
|
||||
const cursor = db.users.find(
|
||||
{},
|
||||
{ projection: { _id: 1, email: 1, features: 1 } }
|
||||
)
|
||||
for await (const user of cursor) {
|
||||
const newFeatures = mergeFeatures(user.features, defaultFeatures)
|
||||
const diff = compareFeatures(newFeatures, user.features)
|
||||
if (Object.keys(diff).length > 0) {
|
||||
logger.warn(
|
||||
{
|
||||
userId: user._id,
|
||||
email: user.email,
|
||||
oldFeatures: user.features,
|
||||
newFeatures,
|
||||
},
|
||||
'user features upgraded'
|
||||
)
|
||||
|
||||
if (!DRY_RUN) {
|
||||
await db.users.updateOne(
|
||||
{ _id: user._id },
|
||||
{ $set: { features: newFeatures } }
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default main
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
if (filename === process.argv[1]) {
|
||||
if (DRY_RUN) {
|
||||
console.error('---')
|
||||
console.error('Dry-run enabled, use --dry-run=false to commit changes')
|
||||
console.error('---')
|
||||
}
|
||||
main(DRY_RUN, Settings.defaultFeatures)
|
||||
.then(() => {
|
||||
console.log('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(error => {
|
||||
console.error({ error })
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
@@ -0,0 +1,7 @@
|
||||
const base = require(process.env.BASE_CONFIG)
|
||||
|
||||
module.exports = base.mergeWith({
|
||||
test: {
|
||||
counterInit: 160000,
|
||||
},
|
||||
})
|
@@ -0,0 +1,14 @@
|
||||
import '../../../../../test/acceptance/src/helpers/InitApp.mjs'
|
||||
import MockProjectHistoryApi from '../../../../../test/acceptance/src/mocks/MockProjectHistoryApi.mjs'
|
||||
import MockDocstoreApi from '../../../../../test/acceptance/src/mocks/MockDocstoreApi.mjs'
|
||||
import MockDocUpdaterApi from '../../../../../test/acceptance/src/mocks/MockDocUpdaterApi.mjs'
|
||||
import MockV1Api from '../../../../admin-panel/test/acceptance/src/mocks/MockV1Api.mjs'
|
||||
|
||||
const mockOpts = {
|
||||
debug: ['1', 'true', 'TRUE'].includes(process.env.DEBUG_MOCKS),
|
||||
}
|
||||
|
||||
MockDocstoreApi.initialize(23016, mockOpts)
|
||||
MockDocUpdaterApi.initialize(23003, mockOpts)
|
||||
MockProjectHistoryApi.initialize(23054, mockOpts)
|
||||
MockV1Api.initialize(25000, mockOpts)
|
@@ -0,0 +1,713 @@
|
||||
import { execSync } from 'node:child_process'
|
||||
import fs from 'node:fs'
|
||||
import Settings from '@overleaf/settings'
|
||||
import { expect } from 'chai'
|
||||
import { db } from '../../../../../app/src/infrastructure/mongodb.js'
|
||||
import UserHelper from '../../../../../test/acceptance/src/helpers/User.mjs'
|
||||
|
||||
const { promises: User } = UserHelper
|
||||
|
||||
/**
|
||||
* @param {string} cmd
|
||||
* @return {string}
|
||||
*/
|
||||
function run(cmd) {
|
||||
// https://nodejs.org/docs/latest-v12.x/api/child_process.html#child_process_child_process_execsync_command_options
|
||||
// > stderr by default will be output to the parent process' stderr
|
||||
// > unless stdio is specified.
|
||||
// https://nodejs.org/docs/latest-v12.x/api/child_process.html#child_process_options_stdio
|
||||
// Pipe stdin from /dev/null, store stdout, pipe stderr to /dev/null.
|
||||
return execSync(cmd, {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
LOG_LEVEL: 'warn',
|
||||
},
|
||||
}).toString()
|
||||
}
|
||||
|
||||
function runAndExpectError(cmd, errorMessages) {
|
||||
try {
|
||||
run(cmd)
|
||||
} catch (error) {
|
||||
expect(error.status).to.equal(1)
|
||||
if (errorMessages) {
|
||||
errorMessages.forEach(errorMessage =>
|
||||
expect(error.stderr.toString()).to.include(errorMessage)
|
||||
)
|
||||
}
|
||||
return
|
||||
}
|
||||
expect.fail('command should have failed')
|
||||
}
|
||||
|
||||
async function getUser(email) {
|
||||
return db.users.findOne({ email }, { projection: { _id: 0, isAdmin: 1 } })
|
||||
}
|
||||
|
||||
describe('ServerCEScripts', function () {
|
||||
describe('check-mongodb', function () {
|
||||
it('should exit with code 0 on success', function () {
|
||||
run('node modules/server-ce-scripts/scripts/check-mongodb.mjs')
|
||||
})
|
||||
|
||||
it('should exit with code 1 on error', function () {
|
||||
try {
|
||||
run(
|
||||
'MONGO_SERVER_SELECTION_TIMEOUT=1' +
|
||||
'MONGO_CONNECTION_STRING=mongodb://127.0.0.1:4242 ' +
|
||||
'node modules/server-ce-scripts/scripts/check-mongodb.mjs'
|
||||
)
|
||||
} catch (e) {
|
||||
expect(e.status).to.equal(1)
|
||||
return
|
||||
}
|
||||
expect.fail('command should have failed')
|
||||
})
|
||||
})
|
||||
|
||||
describe('check-redis', function () {
|
||||
it('should exit with code 0 on success', function () {
|
||||
run('node modules/server-ce-scripts/scripts/check-redis.mjs')
|
||||
})
|
||||
|
||||
it('should exit with code 1 on error', function () {
|
||||
try {
|
||||
run(
|
||||
'REDIS_PORT=42 node modules/server-ce-scripts/scripts/check-redis.mjs'
|
||||
)
|
||||
} catch (e) {
|
||||
expect(e.status).to.equal(1)
|
||||
return
|
||||
}
|
||||
expect.fail('command should have failed')
|
||||
})
|
||||
})
|
||||
|
||||
describe('create-user', function () {
|
||||
it('should exit with code 0 on success', function () {
|
||||
const out = run(
|
||||
'node modules/server-ce-scripts/scripts/create-user.js --email=foo@bar.com'
|
||||
)
|
||||
expect(out).to.include('/user/activate?token=')
|
||||
})
|
||||
|
||||
it('should create a regular user by default', async function () {
|
||||
run(
|
||||
'node modules/server-ce-scripts/scripts/create-user.js --email=foo@bar.com'
|
||||
)
|
||||
expect(await getUser('foo@bar.com')).to.deep.equal({ isAdmin: false })
|
||||
})
|
||||
|
||||
it('should create an admin user with --admin flag', async function () {
|
||||
run(
|
||||
'node modules/server-ce-scripts/scripts/create-user.js --admin --email=foo@bar.com'
|
||||
)
|
||||
expect(await getUser('foo@bar.com')).to.deep.equal({ isAdmin: true })
|
||||
})
|
||||
|
||||
it('should exit with code 1 on missing email', function () {
|
||||
try {
|
||||
run('node modules/server-ce-scripts/scripts/create-user.js')
|
||||
} catch (e) {
|
||||
expect(e.status).to.equal(1)
|
||||
return
|
||||
}
|
||||
expect.fail('command should have failed')
|
||||
})
|
||||
})
|
||||
|
||||
describe('delete-user', function () {
|
||||
let user
|
||||
beforeEach(async function () {
|
||||
user = new User()
|
||||
await user.login()
|
||||
})
|
||||
|
||||
it('should log missing user', function () {
|
||||
const email = 'does-not-exist@example.com'
|
||||
const out = run(
|
||||
'node modules/server-ce-scripts/scripts/delete-user.mjs --email=' +
|
||||
email
|
||||
)
|
||||
expect(out).to.include('not in database, potentially already deleted')
|
||||
})
|
||||
|
||||
it('should exit with code 0 on success', function () {
|
||||
const email = user.email
|
||||
run(
|
||||
'node modules/server-ce-scripts/scripts/delete-user.mjs --email=' +
|
||||
email
|
||||
)
|
||||
})
|
||||
|
||||
it('should have deleted the user on success', async function () {
|
||||
const email = user.email
|
||||
run(
|
||||
'node modules/server-ce-scripts/scripts/delete-user.mjs --email=' +
|
||||
email
|
||||
)
|
||||
const dbEntry = await user.get()
|
||||
expect(dbEntry).to.not.exist
|
||||
const softDeletedEntry = await db.deletedUsers.findOne({
|
||||
'user.email': email,
|
||||
})
|
||||
expect(softDeletedEntry).to.exist
|
||||
expect(softDeletedEntry.deleterData.deleterIpAddress).to.equal('0.0.0.0')
|
||||
})
|
||||
|
||||
it('should exit with code 1 on missing email', function () {
|
||||
try {
|
||||
run('node modules/server-ce-scripts/scripts/delete-user.mjs')
|
||||
} catch (e) {
|
||||
expect(e.status).to.equal(1)
|
||||
return
|
||||
}
|
||||
expect.fail('command should have failed')
|
||||
})
|
||||
})
|
||||
|
||||
describe('migrate-user-emails', function () {
|
||||
let usersToMigrate
|
||||
let otherUsers
|
||||
let csv
|
||||
let csvfail
|
||||
beforeEach(async function () {
|
||||
// set up some users to migrate and others to leave alone
|
||||
usersToMigrate = []
|
||||
otherUsers = []
|
||||
for (let i = 0; i < 2; i++) {
|
||||
const user = new User()
|
||||
await user.login()
|
||||
usersToMigrate.push(user)
|
||||
}
|
||||
for (let i = 0; i < 2; i++) {
|
||||
const user = new User()
|
||||
await user.login()
|
||||
otherUsers.push(user)
|
||||
}
|
||||
// write the migration csv to a temporary file
|
||||
const id = usersToMigrate[0]._id
|
||||
csv = `/tmp/migration-${id}.csv`
|
||||
const rows = []
|
||||
for (const user of usersToMigrate) {
|
||||
rows.push(`${user.email},new-${user.email}`)
|
||||
}
|
||||
fs.writeFileSync(csv, rows.join('\n'))
|
||||
// also write a csv with a user that doesn't exist
|
||||
csvfail = `/tmp/migration-fail-${id}.csv`
|
||||
fs.writeFileSync(
|
||||
csvfail,
|
||||
[
|
||||
'nouser@example.com,nouser@other.example.com',
|
||||
...rows,
|
||||
'foo@example.com,bar@example.com',
|
||||
].join('\n')
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
// clean up the temporary files
|
||||
fs.unlinkSync(csv)
|
||||
fs.unlinkSync(csvfail)
|
||||
})
|
||||
|
||||
it('should do a dry run by default', async function () {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/migrate-user-emails.mjs ${csv}`
|
||||
)
|
||||
for (const user of usersToMigrate) {
|
||||
const dbEntry = await user.get()
|
||||
expect(dbEntry.email).to.equal(user.email)
|
||||
}
|
||||
for (const user of otherUsers) {
|
||||
const dbEntry = await user.get()
|
||||
expect(dbEntry.email).to.equal(user.email)
|
||||
}
|
||||
})
|
||||
|
||||
it('should exit with code 0 when successfully migrating user emails', function () {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/migrate-user-emails.mjs --commit ${csv}`
|
||||
)
|
||||
})
|
||||
|
||||
it('should migrate the user emails with the --commit option', async function () {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/migrate-user-emails.mjs --commit ${csv}`
|
||||
)
|
||||
for (const user of usersToMigrate) {
|
||||
const dbEntry = await user.get()
|
||||
expect(dbEntry.email).to.equal(`new-${user.email}`)
|
||||
expect(dbEntry.emails).to.have.lengthOf(1)
|
||||
expect(dbEntry.emails[0].email).to.equal(`new-${user.email}`)
|
||||
expect(dbEntry.emails[0].reversedHostname).to.equal('moc.elpmaxe')
|
||||
expect(dbEntry.emails[0].createdAt).to.eql(user.emails[0].createdAt)
|
||||
}
|
||||
})
|
||||
|
||||
it('should leave other user emails unchanged', async function () {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/migrate-user-emails.mjs --commit ${csv}`
|
||||
)
|
||||
for (const user of otherUsers) {
|
||||
const dbEntry = await user.get()
|
||||
expect(dbEntry.email).to.equal(user.email)
|
||||
}
|
||||
})
|
||||
|
||||
it('should exit with code 1 when there are failures migrating user emails', function () {
|
||||
try {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/migrate-user-emails.mjs --commit ${csvfail}`
|
||||
)
|
||||
} catch (e) {
|
||||
expect(e.status).to.equal(1)
|
||||
return
|
||||
}
|
||||
expect.fail('command should have failed')
|
||||
})
|
||||
|
||||
it('should migrate other users when there are failures with the --continue option', async function () {
|
||||
try {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/migrate-user-emails.mjs --commit ${csvfail}`
|
||||
)
|
||||
} catch (e) {
|
||||
expect(e.status).to.equal(1)
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/migrate-user-emails.mjs --commit --continue ${csvfail}`
|
||||
)
|
||||
for (const user of usersToMigrate) {
|
||||
const dbEntry = await user.get()
|
||||
expect(dbEntry.email).to.equal(`new-${user.email}`)
|
||||
expect(dbEntry.emails).to.have.lengthOf(1)
|
||||
expect(dbEntry.emails[0].email).to.equal(`new-${user.email}`)
|
||||
expect(dbEntry.emails[0].reversedHostname).to.equal('moc.elpmaxe')
|
||||
expect(dbEntry.emails[0].createdAt).to.eql(user.emails[0].createdAt)
|
||||
}
|
||||
return
|
||||
}
|
||||
expect.fail('command should have failed')
|
||||
})
|
||||
})
|
||||
|
||||
describe('rename-tag', function () {
|
||||
let user
|
||||
beforeEach(async function () {
|
||||
user = new User()
|
||||
await user.login()
|
||||
})
|
||||
|
||||
async function createTag(name) {
|
||||
await user.doRequest('POST', { url: '/tag', json: { name } })
|
||||
}
|
||||
|
||||
async function getTagNames() {
|
||||
const { body } = await user.doRequest('GET', { url: '/tag', json: true })
|
||||
return body.map(tag => tag.name)
|
||||
}
|
||||
|
||||
it('should rename a tag', async function () {
|
||||
const oldName = 'before'
|
||||
const newName = 'after'
|
||||
await createTag(oldName)
|
||||
|
||||
expect(await getTagNames()).to.deep.equal([oldName])
|
||||
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/rename-tag.mjs --user-id=${user.id} --old-name=${oldName} --new-name=${newName}`
|
||||
)
|
||||
|
||||
expect(await getTagNames()).to.deep.equal([newName])
|
||||
})
|
||||
})
|
||||
|
||||
describe('change-compile-timeout', function () {
|
||||
let userA, userB
|
||||
beforeEach('login', async function () {
|
||||
userA = new User()
|
||||
await userA.login()
|
||||
|
||||
userB = new User()
|
||||
await userB.login()
|
||||
})
|
||||
|
||||
async function getCompileTimeout(user) {
|
||||
const { compileTimeout } = await user.getFeatures()
|
||||
return compileTimeout
|
||||
}
|
||||
|
||||
let userATimeout, userBTimeout
|
||||
beforeEach('fetch current state', async function () {
|
||||
userATimeout = await getCompileTimeout(userA)
|
||||
userBTimeout = await getCompileTimeout(userB)
|
||||
})
|
||||
|
||||
describe('happy path', function () {
|
||||
let newUserATimeout
|
||||
beforeEach('run script on user a', function () {
|
||||
newUserATimeout = userATimeout - 1
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/change-compile-timeout.mjs --user-id=${userA.id} --compile-timeout=${newUserATimeout}`
|
||||
)
|
||||
})
|
||||
|
||||
it('should change the timeout for user a', async function () {
|
||||
const actual = await getCompileTimeout(userA)
|
||||
expect(actual).to.not.equal(userATimeout)
|
||||
expect(actual).to.equal(newUserATimeout)
|
||||
})
|
||||
|
||||
it('should leave the timeout for user b as is', async function () {
|
||||
expect(await getCompileTimeout(userB)).to.equal(userBTimeout)
|
||||
})
|
||||
})
|
||||
|
||||
describe('bad options', function () {
|
||||
it('should reject zero timeout', async function () {
|
||||
try {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/change-compile-timeout.mjs --user-id=${userA.id} --compile-timeout=0`
|
||||
)
|
||||
expect.fail('should error out')
|
||||
} catch (err) {
|
||||
expect(err.stderr.toString()).to.include('positive number of seconds')
|
||||
}
|
||||
expect(await getCompileTimeout(userA)).to.equal(userATimeout)
|
||||
expect(await getCompileTimeout(userB)).to.equal(userBTimeout)
|
||||
})
|
||||
|
||||
it('should reject a 20min timeout', async function () {
|
||||
try {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/change-compile-timeout.mjs --user-id=${userA.id} --compile-timeout=1200`
|
||||
)
|
||||
expect.fail('should error out')
|
||||
} catch (err) {
|
||||
expect(err.stderr.toString()).to.include('below 10 minutes')
|
||||
}
|
||||
expect(await getCompileTimeout(userA)).to.equal(userATimeout)
|
||||
expect(await getCompileTimeout(userB)).to.equal(userBTimeout)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('upgrade-user-features', function () {
|
||||
let userLatest, userSP1, userCustomTimeoutLower, userCustomTimeoutHigher
|
||||
beforeEach('create users', async function () {
|
||||
userLatest = new User()
|
||||
userSP1 = new User()
|
||||
userCustomTimeoutLower = new User()
|
||||
userCustomTimeoutHigher = new User()
|
||||
|
||||
await Promise.all([
|
||||
userLatest.ensureUserExists(),
|
||||
userSP1.ensureUserExists(),
|
||||
userCustomTimeoutLower.ensureUserExists(),
|
||||
userCustomTimeoutHigher.ensureUserExists(),
|
||||
])
|
||||
})
|
||||
|
||||
const serverPro1Features = {
|
||||
collaborators: -1,
|
||||
dropbox: true,
|
||||
versioning: true,
|
||||
compileTimeout: 180,
|
||||
compileGroup: 'standard',
|
||||
references: true,
|
||||
trackChanges: true,
|
||||
}
|
||||
|
||||
beforeEach('downgrade userSP1', async function () {
|
||||
await userSP1.mongoUpdate({ $set: { features: serverPro1Features } })
|
||||
})
|
||||
|
||||
beforeEach('downgrade userCustomTimeoutLower', async function () {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/change-compile-timeout.mjs --user-id=${userCustomTimeoutLower.id} --compile-timeout=42`
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach('upgrade userCustomTimeoutHigher', async function () {
|
||||
run(
|
||||
`node modules/server-ce-scripts/scripts/change-compile-timeout.mjs --user-id=${userCustomTimeoutHigher.id} --compile-timeout=360`
|
||||
)
|
||||
})
|
||||
|
||||
async function getFeatures() {
|
||||
return [
|
||||
await userLatest.getFeatures(),
|
||||
await userSP1.getFeatures(),
|
||||
await userCustomTimeoutLower.getFeatures(),
|
||||
await userCustomTimeoutHigher.getFeatures(),
|
||||
]
|
||||
}
|
||||
|
||||
let initialFeatures
|
||||
beforeEach('collect initial features', async function () {
|
||||
initialFeatures = await getFeatures()
|
||||
})
|
||||
|
||||
it('should have prepared the right features', async function () {
|
||||
expect(initialFeatures).to.deep.equal([
|
||||
Settings.defaultFeatures,
|
||||
serverPro1Features,
|
||||
Object.assign({}, Settings.defaultFeatures, {
|
||||
compileTimeout: 42,
|
||||
}),
|
||||
Object.assign({}, Settings.defaultFeatures, {
|
||||
compileTimeout: 360,
|
||||
}),
|
||||
])
|
||||
})
|
||||
|
||||
describe('dry-run', function () {
|
||||
let output
|
||||
beforeEach('run script', function () {
|
||||
output = run(
|
||||
`node modules/server-ce-scripts/scripts/upgrade-user-features.mjs`
|
||||
)
|
||||
})
|
||||
|
||||
it('should update SP1 features', function () {
|
||||
expect(output).to.include(userSP1.id)
|
||||
})
|
||||
|
||||
it('should update lowerTimeout features', function () {
|
||||
expect(output).to.include(userCustomTimeoutLower.id)
|
||||
})
|
||||
|
||||
it('should not update latest features', function () {
|
||||
expect(output).to.not.include(userLatest.id)
|
||||
})
|
||||
|
||||
it('should not update higherTimeout features', function () {
|
||||
expect(output).to.not.include(userCustomTimeoutHigher.id)
|
||||
})
|
||||
|
||||
it('should not change any features in the db', async function () {
|
||||
expect(await getFeatures()).to.deep.equal(initialFeatures)
|
||||
})
|
||||
})
|
||||
|
||||
describe('live run', function () {
|
||||
let output
|
||||
beforeEach('run script', function () {
|
||||
output = run(
|
||||
`node modules/server-ce-scripts/scripts/upgrade-user-features.mjs --dry-run=false`
|
||||
)
|
||||
})
|
||||
|
||||
it('should update SP1 features', function () {
|
||||
expect(output).to.include(userSP1.id)
|
||||
})
|
||||
|
||||
it('should update lowerTimeout features', function () {
|
||||
expect(output).to.include(userCustomTimeoutLower.id)
|
||||
})
|
||||
|
||||
it('should not update latest features', function () {
|
||||
expect(output).to.not.include(userLatest.id)
|
||||
})
|
||||
|
||||
it('should not update higherTimeout features', function () {
|
||||
expect(output).to.not.include(userCustomTimeoutHigher.id)
|
||||
})
|
||||
|
||||
it('should update features in the db', async function () {
|
||||
expect(await getFeatures()).to.deep.equal([
|
||||
Settings.defaultFeatures,
|
||||
Settings.defaultFeatures,
|
||||
Settings.defaultFeatures,
|
||||
Object.assign({}, Settings.defaultFeatures, {
|
||||
compileTimeout: 360,
|
||||
}),
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('check-texlive-images', function () {
|
||||
const TEST_TL_IMAGE = 'sharelatex/texlive:2023'
|
||||
const TEST_TL_IMAGE_LIST =
|
||||
'sharelatex/texlive:2021,sharelatex/texlive:2022,sharelatex/texlive:2023'
|
||||
|
||||
let output
|
||||
|
||||
function buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES,
|
||||
TEX_LIVE_DOCKER_IMAGE,
|
||||
ALL_TEX_LIVE_DOCKER_IMAGES,
|
||||
OVERLEAF_IS_SERVER_PRO = true,
|
||||
}) {
|
||||
let cmd = `SANDBOXED_COMPILES=${SANDBOXED_COMPILES ? 'true' : 'false'}`
|
||||
if (TEX_LIVE_DOCKER_IMAGE) {
|
||||
cmd += ` TEX_LIVE_DOCKER_IMAGE='${TEX_LIVE_DOCKER_IMAGE}'`
|
||||
}
|
||||
if (ALL_TEX_LIVE_DOCKER_IMAGES) {
|
||||
cmd += ` ALL_TEX_LIVE_DOCKER_IMAGES='${ALL_TEX_LIVE_DOCKER_IMAGES}'`
|
||||
}
|
||||
if (OVERLEAF_IS_SERVER_PRO === true) {
|
||||
cmd += ` OVERLEAF_IS_SERVER_PRO=${OVERLEAF_IS_SERVER_PRO}`
|
||||
}
|
||||
return (
|
||||
cmd + ' node modules/server-ce-scripts/scripts/check-texlive-images.mjs'
|
||||
)
|
||||
}
|
||||
|
||||
beforeEach(async function () {
|
||||
const user = new User()
|
||||
await user.ensureUserExists()
|
||||
await user.login()
|
||||
await user.createProject('test-project')
|
||||
})
|
||||
|
||||
describe('when running in CE', function () {
|
||||
beforeEach('run script', function () {
|
||||
output = run(buildCheckTexLiveCmd({ OVERLEAF_IS_SERVER_PRO: false }))
|
||||
})
|
||||
|
||||
it('should skip checks', function () {
|
||||
expect(output).to.include(
|
||||
'Running Overleaf Community Edition, skipping TexLive checks'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when sandboxed compiles are disabled', function () {
|
||||
beforeEach('run script', function () {
|
||||
output = run(buildCheckTexLiveCmd({ SANDBOXED_COMPILES: false }))
|
||||
})
|
||||
|
||||
it('should skip checks', function () {
|
||||
expect(output).to.include(
|
||||
'Sandboxed compiles disabled, skipping TexLive checks'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when texlive configuration is incorrect', function () {
|
||||
it('should fail when TEX_LIVE_DOCKER_IMAGE is not set', function () {
|
||||
runAndExpectError(
|
||||
buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES: true,
|
||||
ALL_TEX_LIVE_DOCKER_IMAGES: TEST_TL_IMAGE_LIST,
|
||||
}),
|
||||
[
|
||||
'Sandboxed compiles require TEX_LIVE_DOCKER_IMAGE and ALL_TEX_LIVE_DOCKER_IMAGES being set',
|
||||
]
|
||||
)
|
||||
})
|
||||
|
||||
it('should fail when ALL_TEX_LIVE_DOCKER_IMAGES is not set', function () {
|
||||
runAndExpectError(
|
||||
buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES: true,
|
||||
TEX_LIVE_DOCKER_IMAGE: TEST_TL_IMAGE,
|
||||
}),
|
||||
[
|
||||
'Sandboxed compiles require TEX_LIVE_DOCKER_IMAGE and ALL_TEX_LIVE_DOCKER_IMAGES being set',
|
||||
]
|
||||
)
|
||||
})
|
||||
|
||||
it('should fail when TEX_LIVE_DOCKER_IMAGE is not defined in ALL_TEX_LIVE_DOCKER_IMAGES', function () {
|
||||
runAndExpectError(
|
||||
buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES: true,
|
||||
TEX_LIVE_DOCKER_IMAGE: 'tl-1',
|
||||
ALL_TEX_LIVE_DOCKER_IMAGES: 'tl-2,tl-3',
|
||||
}),
|
||||
[
|
||||
'TEX_LIVE_DOCKER_IMAGE must be included in ALL_TEX_LIVE_DOCKER_IMAGES',
|
||||
]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe(`when projects don't have 'imageName' set`, function () {
|
||||
beforeEach(async function () {
|
||||
await db.projects.updateMany({}, { $unset: { imageName: 1 } })
|
||||
})
|
||||
|
||||
it('should fail and suggest running backfilling scripts', function () {
|
||||
runAndExpectError(
|
||||
buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES: true,
|
||||
TEX_LIVE_DOCKER_IMAGE: TEST_TL_IMAGE,
|
||||
ALL_TEX_LIVE_DOCKER_IMAGES: TEST_TL_IMAGE_LIST,
|
||||
}),
|
||||
[
|
||||
`'project.imageName' is not set for some projects`,
|
||||
`Set SKIP_TEX_LIVE_CHECK=true in config/variables.env, restart the instance and run 'bin/run-script scripts/backfill_project_image_name.mjs' to initialise TexLive image in existing projects`,
|
||||
]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe(`when projects have a null 'imageName'`, function () {
|
||||
beforeEach(async function () {
|
||||
await db.projects.updateMany({}, { $set: { imageName: null } })
|
||||
})
|
||||
|
||||
it('should fail and suggest running backfilling scripts', function () {
|
||||
runAndExpectError(
|
||||
buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES: true,
|
||||
TEX_LIVE_DOCKER_IMAGE: TEST_TL_IMAGE,
|
||||
ALL_TEX_LIVE_DOCKER_IMAGES: TEST_TL_IMAGE_LIST,
|
||||
}),
|
||||
[
|
||||
`'project.imageName' is not set for some projects`,
|
||||
`Set SKIP_TEX_LIVE_CHECK=true in config/variables.env, restart the instance and run 'bin/run-script scripts/backfill_project_image_name.mjs' to initialise TexLive image in existing projects`,
|
||||
]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when TexLive ALL_TEX_LIVE_DOCKER_IMAGES are upgraded and used images are no longer available', function () {
|
||||
it('should suggest running a fixing script', async function () {
|
||||
await db.projects.updateMany({}, { $set: { imageName: TEST_TL_IMAGE } })
|
||||
runAndExpectError(
|
||||
buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES: true,
|
||||
TEX_LIVE_DOCKER_IMAGE: 'tl-1',
|
||||
ALL_TEX_LIVE_DOCKER_IMAGES: 'tl-1,tl-2',
|
||||
}),
|
||||
[
|
||||
`Set SKIP_TEX_LIVE_CHECK=true in config/variables.env, restart the instance and run 'bin/run-script scripts/update_project_image_name.js <dangling_image> <new_image>' to update projects to a new image`,
|
||||
]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('success scenarios', function () {
|
||||
beforeEach(async function () {
|
||||
await db.projects.updateMany({}, { $set: { imageName: TEST_TL_IMAGE } })
|
||||
})
|
||||
|
||||
it('should succeed when there are no changes to the TexLive images', function () {
|
||||
const output = run(
|
||||
buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES: true,
|
||||
TEX_LIVE_DOCKER_IMAGE: TEST_TL_IMAGE,
|
||||
ALL_TEX_LIVE_DOCKER_IMAGES: TEST_TL_IMAGE_LIST,
|
||||
})
|
||||
)
|
||||
expect(output).to.include('Done.')
|
||||
})
|
||||
|
||||
it('should succeed when there are valid changes to the TexLive images', function () {
|
||||
const output = run(
|
||||
buildCheckTexLiveCmd({
|
||||
SANDBOXED_COMPILES: true,
|
||||
TEX_LIVE_DOCKER_IMAGE: 'new-image',
|
||||
ALL_TEX_LIVE_DOCKER_IMAGES: TEST_TL_IMAGE_LIST + ',new-image',
|
||||
})
|
||||
)
|
||||
expect(output).to.include('Done.')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@@ -0,0 +1 @@
|
||||
{ "extends": "../../../../tsconfig.backend.json" }
|
Reference in New Issue
Block a user