first commit
This commit is contained in:
@@ -0,0 +1,44 @@
|
||||
import minimist from 'minimist'
|
||||
import { db, ObjectId } from '../../../app/src/infrastructure/mongodb.js'
|
||||
|
||||
async function main() {
|
||||
const argv = minimist(process.argv.slice(2), {
|
||||
string: ['user-id', 'compile-timeout'],
|
||||
})
|
||||
|
||||
const { 'user-id': userId, 'compile-timeout': rawCompileTimeout } = argv
|
||||
const compileTimeout = parseInt(rawCompileTimeout, 10)
|
||||
if (
|
||||
!userId ||
|
||||
!ObjectId.isValid(userId) ||
|
||||
!rawCompileTimeout ||
|
||||
Number.isNaN(compileTimeout)
|
||||
) {
|
||||
console.error(
|
||||
`Usage: node ${import.meta.filename} --user-id=5a9414f259776c7900b300e6 --timeout=90`
|
||||
)
|
||||
process.exit(101)
|
||||
}
|
||||
|
||||
if (compileTimeout < 1 || compileTimeout > 600) {
|
||||
console.error(
|
||||
`The compile timeout must be positive number of seconds, below 10 minutes (600).`
|
||||
)
|
||||
process.exit(101)
|
||||
}
|
||||
|
||||
await db.users.updateOne(
|
||||
{ _id: new ObjectId(userId) },
|
||||
{ $set: { 'features.compileTimeout': compileTimeout } }
|
||||
)
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,64 @@
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import {
|
||||
connectionPromise,
|
||||
db,
|
||||
} from '../../../app/src/infrastructure/mongodb.js'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MIN_MONGO_VERSION = [5, 0]
|
||||
|
||||
async function main() {
|
||||
let mongoClient
|
||||
try {
|
||||
mongoClient = await connectionPromise
|
||||
} catch (err) {
|
||||
console.error('Cannot connect to mongodb')
|
||||
throw err
|
||||
}
|
||||
|
||||
await checkMongoVersion(mongoClient)
|
||||
|
||||
try {
|
||||
await testTransactions(mongoClient)
|
||||
} catch (err) {
|
||||
console.error("Mongo instance doesn't support transactions")
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async function testTransactions(mongoClient) {
|
||||
const session = mongoClient.startSession()
|
||||
try {
|
||||
await session.withTransaction(async () => {
|
||||
await db.users.findOne({ _id: new ObjectId() }, { session })
|
||||
})
|
||||
} finally {
|
||||
await session.endSession()
|
||||
}
|
||||
}
|
||||
|
||||
async function checkMongoVersion(mongoClient) {
|
||||
const buildInfo = await mongoClient.db().admin().buildInfo()
|
||||
const [major, minor] = buildInfo.versionArray
|
||||
const [minMajor, minMinor] = MIN_MONGO_VERSION
|
||||
|
||||
if (major < minMajor || (major === minMajor && minor < minMinor)) {
|
||||
const version = buildInfo.version
|
||||
const minVersion = MIN_MONGO_VERSION.join('.')
|
||||
console.error(
|
||||
`The MongoDB server has version ${version}, but Overleaf requires at least version ${minVersion}. Aborting.`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Mongodb is up.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,18 @@
|
||||
import RedisWrapper from '../../../app/src/infrastructure/RedisWrapper.js'
|
||||
const rclient = RedisWrapper.client('health_check')
|
||||
rclient.on('error', err => {
|
||||
console.error('Cannot connect to redis.')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
rclient.healthCheck(err => {
|
||||
if (err) {
|
||||
console.error('Cannot connect to redis.')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
} else {
|
||||
console.error('Redis is up.')
|
||||
process.exit(0)
|
||||
}
|
||||
})
|
@@ -0,0 +1,101 @@
|
||||
import { db } from '../../../app/src/infrastructure/mongodb.js'
|
||||
|
||||
async function readImagesInUse() {
|
||||
const projectCount = await db.projects.countDocuments()
|
||||
if (projectCount === 0) {
|
||||
return []
|
||||
}
|
||||
const images = await db.projects.distinct('imageName')
|
||||
|
||||
if (!images || images.length === 0 || images.includes(null)) {
|
||||
console.error(`'project.imageName' is not set for some projects`)
|
||||
console.error(
|
||||
`Set SKIP_TEX_LIVE_CHECK=true in config/variables.env, restart the instance and run 'bin/run-script scripts/backfill_project_image_name.mjs' to initialise TexLive image in existing projects.`
|
||||
)
|
||||
console.error(
|
||||
`After running the script, remove SKIP_TEX_LIVE_CHECK from config/variables.env and restart the instance.`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
return images
|
||||
}
|
||||
|
||||
function checkIsServerPro() {
|
||||
if (process.env.OVERLEAF_IS_SERVER_PRO !== 'true') {
|
||||
console.log('Running Overleaf Community Edition, skipping TexLive checks')
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
function checkSandboxedCompilesAreEnabled() {
|
||||
if (process.env.SANDBOXED_COMPILES !== 'true') {
|
||||
console.log('Sandboxed compiles disabled, skipping TexLive checks')
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
function checkTexLiveEnvVariablesAreProvided() {
|
||||
if (
|
||||
!process.env.TEX_LIVE_DOCKER_IMAGE ||
|
||||
!process.env.ALL_TEX_LIVE_DOCKER_IMAGES
|
||||
) {
|
||||
console.error(
|
||||
'Sandboxed compiles require TEX_LIVE_DOCKER_IMAGE and ALL_TEX_LIVE_DOCKER_IMAGES being set.'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (process.env.SKIP_TEX_LIVE_CHECK === 'true') {
|
||||
console.log(`SKIP_TEX_LIVE_CHECK=true, skipping TexLive images check`)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
checkIsServerPro()
|
||||
checkSandboxedCompilesAreEnabled()
|
||||
checkTexLiveEnvVariablesAreProvided()
|
||||
|
||||
const allTexLiveImages = process.env.ALL_TEX_LIVE_DOCKER_IMAGES.split(',')
|
||||
|
||||
if (!allTexLiveImages.includes(process.env.TEX_LIVE_DOCKER_IMAGE)) {
|
||||
console.error(
|
||||
`TEX_LIVE_DOCKER_IMAGE must be included in ALL_TEX_LIVE_DOCKER_IMAGES`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const currentImages = await readImagesInUse()
|
||||
|
||||
const danglingImages = []
|
||||
for (const image of currentImages) {
|
||||
if (!allTexLiveImages.includes(image)) {
|
||||
danglingImages.push(image)
|
||||
}
|
||||
}
|
||||
if (danglingImages.length > 0) {
|
||||
danglingImages.forEach(image =>
|
||||
console.error(
|
||||
`${image} is currently in use but it's not included in ALL_TEX_LIVE_DOCKER_IMAGES`
|
||||
)
|
||||
)
|
||||
console.error(
|
||||
`Set SKIP_TEX_LIVE_CHECK=true in config/variables.env, restart the instance and run 'bin/run-script scripts/update_project_image_name.js <dangling_image> <new_image>' to update projects to a new image.`
|
||||
)
|
||||
console.error(
|
||||
`After running the script, remove SKIP_TEX_LIVE_CHECK from config/variables.env and restart the instance.`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log('Done.')
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* WARNING
|
||||
* This file has been replaced by create-user.mjs. It is left in place for backwards compatibility with previous versions of Overleaf.
|
||||
* This will be used by the e2e tests that check the upgrade from the older versions, if these tests are updated or removed,
|
||||
* this file can be removed as well.
|
||||
*/
|
||||
|
||||
async function main() {
|
||||
const { default: createUser } = await import('./create-user.mjs')
|
||||
await createUser()
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,50 @@
|
||||
import minimist from 'minimist'
|
||||
import { db } from '../../../app/src/infrastructure/mongodb.js'
|
||||
import UserRegistrationHandler from '../../../app/src/Features/User/UserRegistrationHandler.js'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
export default async function main() {
|
||||
const argv = minimist(process.argv.slice(2), {
|
||||
string: ['email'],
|
||||
boolean: ['admin'],
|
||||
})
|
||||
|
||||
const { admin, email } = argv
|
||||
if (!email) {
|
||||
console.error(`Usage: node ${filename} [--admin] --email=joe@example.com`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
UserRegistrationHandler.registerNewUserAndSendActivationEmail(
|
||||
email,
|
||||
(error, user, setNewPasswordUrl) => {
|
||||
if (error) {
|
||||
return reject(error)
|
||||
}
|
||||
db.users.updateOne(
|
||||
{ _id: user._id },
|
||||
{ $set: { isAdmin: admin } },
|
||||
error => {
|
||||
if (error) {
|
||||
return reject(error)
|
||||
}
|
||||
|
||||
console.log('')
|
||||
console.log(`\
|
||||
Successfully created ${email} as ${admin ? 'an admin' : 'a'} user.
|
||||
|
||||
Please visit the following URL to set a password for ${email} and log in:
|
||||
|
||||
${setNewPasswordUrl}
|
||||
|
||||
`)
|
||||
resolve()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
@@ -0,0 +1,47 @@
|
||||
import UserGetter from '../../../app/src/Features/User/UserGetter.js'
|
||||
import UserDeleter from '../../../app/src/Features/User/UserDeleter.js'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
async function main() {
|
||||
const email = (process.argv.slice(2).pop() || '').replace(/^--email=/, '')
|
||||
if (!email) {
|
||||
console.error(`Usage: node ${filename} --email=joe@example.com`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
UserGetter.getUser({ email }, { _id: 1 }, function (error, user) {
|
||||
if (error) {
|
||||
return reject(error)
|
||||
}
|
||||
if (!user) {
|
||||
console.log(
|
||||
`user ${email} not in database, potentially already deleted`
|
||||
)
|
||||
return resolve()
|
||||
}
|
||||
const options = {
|
||||
ipAddress: '0.0.0.0',
|
||||
force: true,
|
||||
}
|
||||
UserDeleter.deleteUser(user._id, options, function (err) {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,309 @@
|
||||
const minimist = require('minimist')
|
||||
const {
|
||||
mkdirSync,
|
||||
createWriteStream,
|
||||
existsSync,
|
||||
unlinkSync,
|
||||
renameSync,
|
||||
} = require('fs')
|
||||
const mongodb = require('../app/src/infrastructure/mongodb')
|
||||
const DocumentUpdaterHandler = require('../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js')
|
||||
const ProjectZipStreamManager = require('../app/src/Features/Downloads/ProjectZipStreamManager.js')
|
||||
const logger = require('logger-sharelatex')
|
||||
const { Project } = require('../app/src/models/Project.js')
|
||||
const { User } = require('../app/src/models/User.js')
|
||||
const readline = require('readline')
|
||||
|
||||
function parseArgs() {
|
||||
return minimist(process.argv.slice(2), {
|
||||
boolean: ['help', 'list', 'export-all'],
|
||||
string: ['user-id', 'output', 'project-id', 'output-dir', 'log-level'],
|
||||
alias: { help: 'h' },
|
||||
default: {
|
||||
'log-level': 'error',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
function showUsage() {
|
||||
console.log(`
|
||||
Usage: node scripts/export-user-projects.mjs [options]
|
||||
--help, -h Show help
|
||||
--user-id The user ID (required unless using --export-all or --project-id)
|
||||
--project-id Export a single project (cannot be used with --user-id or --export-all)
|
||||
--list List user's projects (cannot be used with --output)
|
||||
--output Output zip file (for single export operations)
|
||||
--export-all Export all users' projects (requires --output-dir)
|
||||
--output-dir Directory for storing all users' export files
|
||||
--log-level Log level (trace|debug|info|warn|error|fatal) [default: error]
|
||||
`)
|
||||
}
|
||||
|
||||
function findAllUsers(callback) {
|
||||
User.find({}, 'email', callback)
|
||||
}
|
||||
|
||||
function findUserProjects(userId, callback) {
|
||||
Project.find({ owner_ref: userId }, 'name', callback)
|
||||
}
|
||||
|
||||
function listProjects(userId, callback) {
|
||||
findUserProjects(userId, function (err, projects) {
|
||||
if (err) return callback(err)
|
||||
projects.forEach(function (p) {
|
||||
console.log(`${p._id} - ${p.name}`)
|
||||
})
|
||||
callback()
|
||||
})
|
||||
}
|
||||
|
||||
function updateProgress(current, total) {
|
||||
if (!process.stdout.isTTY) return
|
||||
const width = 40
|
||||
const progress = Math.floor((current / total) * width)
|
||||
const SOLID_BLOCK = '\u2588' // Unicode "Full Block"
|
||||
const LIGHT_SHADE = '\u2591' // Unicode "Light Shade"
|
||||
const bar =
|
||||
SOLID_BLOCK.repeat(progress) + LIGHT_SHADE.repeat(width - progress)
|
||||
const percentage = Math.floor((current / total) * 100)
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
process.stdout.write(
|
||||
`Progress: [${bar}] ${percentage}% (${current}/${total} projects)`
|
||||
)
|
||||
}
|
||||
|
||||
function exportUserProjectsToZip(userId, output, callback) {
|
||||
findUserProjects(userId, function (err, projects) {
|
||||
if (err) return callback(err)
|
||||
const allIds = projects.map(p => p._id)
|
||||
if (allIds.length === 0) {
|
||||
console.log('No projects found for user')
|
||||
return callback()
|
||||
}
|
||||
|
||||
console.log('Flushing projects to MongoDB...')
|
||||
let completed = 0
|
||||
|
||||
function flushNext() {
|
||||
if (completed >= allIds.length) {
|
||||
createZip()
|
||||
return
|
||||
}
|
||||
|
||||
DocumentUpdaterHandler.flushProjectToMongoAndDelete(
|
||||
allIds[completed],
|
||||
function (err) {
|
||||
if (err) return callback(err)
|
||||
updateProgress(completed + 1, allIds.length)
|
||||
completed++
|
||||
flushNext()
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function createZip() {
|
||||
console.log('\nAll projects flushed, creating zip...')
|
||||
console.log(
|
||||
`Exporting ${allIds.length} projects for user ${userId} to ${output}`
|
||||
)
|
||||
|
||||
ProjectZipStreamManager.createZipStreamForMultipleProjects(
|
||||
allIds,
|
||||
function (err, zipStream) {
|
||||
if (err) return callback(err)
|
||||
|
||||
zipStream.on('progress', progress => {
|
||||
updateProgress(progress.entries.total, allIds.length)
|
||||
})
|
||||
|
||||
writeStreamToFileAtomically(zipStream, output, function (err) {
|
||||
if (err) return callback(err)
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
console.log(
|
||||
`Successfully exported ${allIds.length} projects to ${output}`
|
||||
)
|
||||
callback()
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
flushNext()
|
||||
})
|
||||
}
|
||||
|
||||
function writeStreamToFileAtomically(stream, finalPath, callback) {
|
||||
const tmpPath = `${finalPath}-${Date.now()}.tmp`
|
||||
const outStream = createWriteStream(tmpPath, { flags: 'wx' })
|
||||
|
||||
stream.pipe(outStream)
|
||||
|
||||
outStream.on('error', function (err) {
|
||||
try {
|
||||
unlinkSync(tmpPath)
|
||||
} catch {
|
||||
console.log('Leaving behind tmp file, please cleanup manually:', tmpPath)
|
||||
}
|
||||
callback(err)
|
||||
})
|
||||
|
||||
outStream.on('finish', function () {
|
||||
try {
|
||||
renameSync(tmpPath, finalPath)
|
||||
callback()
|
||||
} catch (err) {
|
||||
try {
|
||||
unlinkSync(tmpPath)
|
||||
} catch {
|
||||
console.log(
|
||||
'Leaving behind tmp file, please cleanup manually:',
|
||||
tmpPath
|
||||
)
|
||||
}
|
||||
callback(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function exportSingleProject(projectId, output, callback) {
|
||||
console.log('Flushing project to MongoDB...')
|
||||
DocumentUpdaterHandler.flushProjectToMongoAndDelete(
|
||||
projectId,
|
||||
function (err) {
|
||||
if (err) return callback(err)
|
||||
|
||||
console.log(`Exporting project ${projectId} to ${output}`)
|
||||
ProjectZipStreamManager.createZipStreamForProject(
|
||||
projectId,
|
||||
function (err, zipStream) {
|
||||
if (err) return callback(err)
|
||||
writeStreamToFileAtomically(zipStream, output, function (err) {
|
||||
if (err) return callback(err)
|
||||
console.log('Exported project to', output)
|
||||
callback()
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function exportAllUsersProjects(outputDir, callback) {
|
||||
findAllUsers(function (err, users) {
|
||||
if (err) return callback(err)
|
||||
|
||||
console.log(`Found ${users.length} users to process`)
|
||||
mkdirSync(outputDir, { recursive: true })
|
||||
|
||||
let userIndex = 0
|
||||
function processNextUser() {
|
||||
if (userIndex >= users.length) {
|
||||
return callback()
|
||||
}
|
||||
|
||||
const user = users[userIndex]
|
||||
const safeEmail = user.email.toLowerCase().replace(/[^a-z0-9]/g, '_')
|
||||
const outputFile = `${outputDir}/${user._id}_${safeEmail}_projects.zip`
|
||||
|
||||
if (existsSync(outputFile)) {
|
||||
console.log(`Skipping ${user._id} - file already exists`)
|
||||
userIndex++
|
||||
return processNextUser()
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Processing user ${userIndex + 1}/${users.length} (${user._id})`
|
||||
)
|
||||
exportUserProjectsToZip(user._id, outputFile, function (err) {
|
||||
if (err) return callback(err)
|
||||
userIndex++
|
||||
processNextUser()
|
||||
})
|
||||
}
|
||||
|
||||
processNextUser()
|
||||
})
|
||||
}
|
||||
|
||||
function main() {
|
||||
const argv = parseArgs()
|
||||
|
||||
if (argv.help) {
|
||||
showUsage()
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
if (argv['log-level']) {
|
||||
logger.logger.level(argv['log-level'])
|
||||
}
|
||||
|
||||
if (argv.list && argv.output) {
|
||||
console.error('Cannot use both --list and --output together')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (
|
||||
[argv['user-id'], argv['project-id'], argv['export-all']].filter(Boolean)
|
||||
.length > 1
|
||||
) {
|
||||
console.error('Can only use one of: --user-id, --project-id, --export-all')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
function cleanup(err) {
|
||||
// Allow the script to finish gracefully then exit
|
||||
setTimeout(() => {
|
||||
if (err) {
|
||||
logger.error({ err }, 'Error in export-user-projects script')
|
||||
process.exit(1)
|
||||
} else {
|
||||
console.log('Done.')
|
||||
process.exit(0)
|
||||
}
|
||||
}, 1000)
|
||||
}
|
||||
|
||||
if (argv.list) {
|
||||
if (!argv['user-id']) {
|
||||
console.error('--list requires --user-id')
|
||||
process.exit(1)
|
||||
}
|
||||
listProjects(argv['user-id'], cleanup)
|
||||
return
|
||||
}
|
||||
|
||||
if (argv['export-all']) {
|
||||
if (!argv['output-dir']) {
|
||||
console.error('--export-all requires --output-dir')
|
||||
process.exit(1)
|
||||
}
|
||||
exportAllUsersProjects(argv['output-dir'], cleanup)
|
||||
return
|
||||
}
|
||||
|
||||
if (!argv.output) {
|
||||
console.error('Please specify an --output zip file')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (argv['project-id']) {
|
||||
exportSingleProject(argv['project-id'], argv.output, cleanup)
|
||||
} else if (argv['user-id']) {
|
||||
exportUserProjectsToZip(argv['user-id'], argv.output, cleanup)
|
||||
} else {
|
||||
console.error(
|
||||
'Please specify either --user-id, --project-id, or --export-all'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
mongodb
|
||||
.waitForDb()
|
||||
.then(main)
|
||||
.catch(err => {
|
||||
console.error('Failed to connect to MongoDB:', err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,232 @@
|
||||
import minimist from 'minimist'
|
||||
import {
|
||||
mkdirSync,
|
||||
createWriteStream,
|
||||
existsSync,
|
||||
unlinkSync,
|
||||
renameSync,
|
||||
} from 'fs'
|
||||
import { pipeline } from 'stream/promises'
|
||||
import DocumentUpdaterHandler from '../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js'
|
||||
import ProjectZipStreamManager from '../../../app/src/Features/Downloads/ProjectZipStreamManager.mjs'
|
||||
import logger from '@overleaf/logger'
|
||||
import { promisify } from '@overleaf/promise-utils'
|
||||
import { gracefulShutdown } from '../../../app/src/infrastructure/GracefulShutdown.js'
|
||||
import { Project } from '../../../app/src/models/Project.js'
|
||||
import { User } from '../../../app/src/models/User.js'
|
||||
import readline from 'readline'
|
||||
|
||||
function parseArgs() {
|
||||
return minimist(process.argv.slice(2), {
|
||||
boolean: ['help', 'list', 'export-all'],
|
||||
string: ['user-id', 'output', 'project-id', 'output-dir', 'log-level'],
|
||||
alias: { help: 'h' },
|
||||
default: {
|
||||
'log-level': 'error',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
function showUsage() {
|
||||
console.log(`
|
||||
Usage: node scripts/export-user-projects.mjs [options]
|
||||
--help, -h Show help
|
||||
--user-id The user ID (required unless using --export-all or --project-id)
|
||||
--project-id Export a single project (cannot be used with --user-id or --export-all)
|
||||
--list List user's projects (cannot be used with --output)
|
||||
--output Output zip file (for single export operations)
|
||||
--export-all Export all users' projects (requires --output-dir)
|
||||
--output-dir Directory for storing all users' export files
|
||||
--log-level Log level (trace|debug|info|warn|error|fatal) [default: error]
|
||||
`)
|
||||
}
|
||||
|
||||
async function findAllUsers() {
|
||||
const users = await User.find({}, 'email').exec()
|
||||
return users
|
||||
}
|
||||
|
||||
async function findUserProjects(userId) {
|
||||
const ownedProjects = await Project.find({ owner_ref: userId }, 'name').exec()
|
||||
return ownedProjects
|
||||
}
|
||||
|
||||
async function listProjects(userId) {
|
||||
const projects = await findUserProjects(userId)
|
||||
for (const p of projects) {
|
||||
console.log(`${p._id} - ${p.name}`)
|
||||
}
|
||||
}
|
||||
|
||||
const createZipStreamForMultipleProjectsAsync = promisify(
|
||||
ProjectZipStreamManager.createZipStreamForMultipleProjects
|
||||
).bind(ProjectZipStreamManager)
|
||||
|
||||
function updateProgress(current, total) {
|
||||
if (!process.stdout.isTTY) return
|
||||
const width = 40
|
||||
const progress = Math.floor((current / total) * width)
|
||||
const SOLID_BLOCK = '\u2588' // Unicode "Full Block"
|
||||
const LIGHT_SHADE = '\u2591' // Unicode "Light Shade"
|
||||
const bar =
|
||||
SOLID_BLOCK.repeat(progress) + LIGHT_SHADE.repeat(width - progress)
|
||||
const percentage = Math.floor((current / total) * 100)
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
process.stdout.write(
|
||||
`Progress: [${bar}] ${percentage}% (${current}/${total} projects)`
|
||||
)
|
||||
}
|
||||
|
||||
async function exportUserProjectsToZip(userId, output) {
|
||||
const projects = await findUserProjects(userId)
|
||||
const allIds = projects.map(p => p._id)
|
||||
if (allIds.length === 0) {
|
||||
console.log('No projects found for user')
|
||||
return
|
||||
}
|
||||
console.log('Flushing projects to MongoDB...')
|
||||
for (const [index, id] of allIds.entries()) {
|
||||
await DocumentUpdaterHandler.promises.flushProjectToMongoAndDelete(id)
|
||||
updateProgress(index + 1, allIds.length)
|
||||
}
|
||||
console.log('\nAll projects flushed, creating zip...')
|
||||
|
||||
console.log(
|
||||
`Exporting ${allIds.length} projects for user ${userId} to ${output}`
|
||||
)
|
||||
|
||||
const zipStream = await createZipStreamForMultipleProjectsAsync(allIds)
|
||||
|
||||
zipStream.on('progress', progress => {
|
||||
updateProgress(progress.entries.total, allIds.length)
|
||||
})
|
||||
|
||||
await writeStreamToFileAtomically(zipStream, output)
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
console.log(`Successfully exported ${allIds.length} projects to ${output}`)
|
||||
}
|
||||
|
||||
async function writeStreamToFileAtomically(stream, finalPath) {
|
||||
const tmpPath = `${finalPath}-${Date.now()}.tmp`
|
||||
const outStream = createWriteStream(tmpPath, { flags: 'wx' })
|
||||
try {
|
||||
await pipeline(stream, outStream)
|
||||
renameSync(tmpPath, finalPath)
|
||||
} catch (err) {
|
||||
try {
|
||||
unlinkSync(tmpPath)
|
||||
} catch {
|
||||
console.log('Leaving behind tmp file, please cleanup manually:', tmpPath)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
const createZipStreamForProjectAsync = promisify(
|
||||
ProjectZipStreamManager.createZipStreamForProject
|
||||
).bind(ProjectZipStreamManager)
|
||||
|
||||
async function exportSingleProject(projectId, output) {
|
||||
console.log('Flushing project to MongoDB...')
|
||||
await DocumentUpdaterHandler.promises.flushProjectToMongoAndDelete(projectId)
|
||||
console.log(`Exporting project ${projectId} to ${output}`)
|
||||
const zipStream = await createZipStreamForProjectAsync(projectId)
|
||||
await writeStreamToFileAtomically(zipStream, output)
|
||||
console.log('Exported project to', output)
|
||||
}
|
||||
|
||||
async function exportAllUsersProjects(outputDir) {
|
||||
const users = await findAllUsers()
|
||||
console.log(`Found ${users.length} users to process`)
|
||||
|
||||
mkdirSync(outputDir, { recursive: true })
|
||||
|
||||
for (let i = 0; i < users.length; i++) {
|
||||
const user = users[i]
|
||||
const safeEmail = user.email.toLowerCase().replace(/[^a-z0-9]/g, '_')
|
||||
const outputFile = `${outputDir}/${user._id}_${safeEmail}_projects.zip`
|
||||
|
||||
if (existsSync(outputFile)) {
|
||||
console.log(`Skipping ${user._id} - file already exists`)
|
||||
continue
|
||||
}
|
||||
|
||||
console.log(`Processing user ${i + 1}/${users.length} (${user._id})`)
|
||||
await exportUserProjectsToZip(user._id, outputFile)
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const argv = parseArgs()
|
||||
|
||||
if (argv.help) {
|
||||
showUsage()
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
if (argv['log-level']) {
|
||||
logger.logger.level(argv['log-level'])
|
||||
}
|
||||
|
||||
if (argv.list && argv.output) {
|
||||
console.error('Cannot use both --list and --output together')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (
|
||||
[argv['user-id'], argv['project-id'], argv['export-all']].filter(Boolean)
|
||||
.length > 1
|
||||
) {
|
||||
console.error('Can only use one of: --user-id, --project-id, --export-all')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
try {
|
||||
if (argv.list) {
|
||||
if (!argv['user-id']) {
|
||||
console.error('--list requires --user-id')
|
||||
process.exit(1)
|
||||
}
|
||||
await listProjects(argv['user-id'])
|
||||
return
|
||||
}
|
||||
|
||||
if (argv['export-all']) {
|
||||
if (!argv['output-dir']) {
|
||||
console.error('--export-all requires --output-dir')
|
||||
process.exit(1)
|
||||
}
|
||||
await exportAllUsersProjects(argv['output-dir'])
|
||||
return
|
||||
}
|
||||
|
||||
if (!argv.output) {
|
||||
console.error('Please specify an --output zip file')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (argv['project-id']) {
|
||||
await exportSingleProject(argv['project-id'], argv.output)
|
||||
} else if (argv['user-id']) {
|
||||
await exportUserProjectsToZip(argv['user-id'], argv.output)
|
||||
} else {
|
||||
console.error(
|
||||
'Please specify either --user-id, --project-id, or --export-all'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
} finally {
|
||||
await gracefulShutdown({ close: done => done() })
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
.then(async () => {
|
||||
console.log('Done.')
|
||||
})
|
||||
.catch(async err => {
|
||||
logger.error({ err }, 'Error in export-user-projects script')
|
||||
process.exitCode = 1
|
||||
})
|
@@ -0,0 +1,197 @@
|
||||
// Script to migrate user emails using a CSV file with the following format:
|
||||
//
|
||||
// oldEmail,newEmail
|
||||
//
|
||||
// The script will iterate through the CSV file and update the user's email
|
||||
// address from oldEmail to newEmail, after checking all the email addresses
|
||||
// for duplicates.
|
||||
//
|
||||
// Intended for Server Pro customers migrating user emails from one domain to
|
||||
// another.
|
||||
|
||||
import minimist from 'minimist'
|
||||
|
||||
import os from 'os'
|
||||
import fs from 'fs'
|
||||
import * as csv from 'csv/sync'
|
||||
import { parseEmail } from '../../../app/src/Features/Helpers/EmailHelper.js'
|
||||
import UserGetter from '../../../app/src/Features/User/UserGetter.js'
|
||||
import UserUpdater from '../../../app/src/Features/User/UserUpdater.js'
|
||||
import UserSessionsManager from '../../../app/src/Features/User/UserSessionsManager.js'
|
||||
|
||||
const hostname = os.hostname()
|
||||
const scriptTimestamp = new Date().toISOString()
|
||||
|
||||
// support command line option of --commit to actually do the migration
|
||||
const argv = minimist(process.argv.slice(2), {
|
||||
boolean: ['commit', 'ignore-missing'],
|
||||
string: ['admin-id'],
|
||||
alias: {
|
||||
'ignore-missing': 'continue',
|
||||
},
|
||||
default: {
|
||||
commit: false,
|
||||
'ignore-missing': false,
|
||||
'admin-id': '000000000000000000000000', // use a dummy admin ID for script audit log entries
|
||||
},
|
||||
})
|
||||
|
||||
// display usage if no CSV file is provided
|
||||
if (argv._.length === 0) {
|
||||
console.log(
|
||||
'Usage: node migrate_user_emails.mjs [--commit] [--continue|--ignore-missing] [--admin-id=ADMIN_USER_ID] <csv_file>'
|
||||
)
|
||||
console.log(' --commit: actually do the migration (default: false)')
|
||||
console.log(
|
||||
' --continue|--ignore-missing: continue on missing or already-migrated users'
|
||||
)
|
||||
console.log(' --admin-id: admin user ID to use for audit log entries')
|
||||
console.log(' <csv_file>: CSV file with old and new email addresses')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
function filterEmails(rows) {
|
||||
// check that emails have a valid format
|
||||
const result = []
|
||||
const seenOld = new Set()
|
||||
const seenNew = new Set()
|
||||
for (const [oldEmail, newEmail] of rows) {
|
||||
const parsedOld = parseEmail(oldEmail)
|
||||
const parsedNew = parseEmail(newEmail)
|
||||
if (!parsedOld) {
|
||||
throw new Error(`invalid old email "${oldEmail}"`)
|
||||
}
|
||||
if (!parsedNew) {
|
||||
throw new Error(`invalid new email "${newEmail}"`)
|
||||
}
|
||||
// Check for duplicates and overlaps
|
||||
if (seenOld.has(parsedOld)) {
|
||||
throw new Error(`Duplicate old emails found in CSV file ${oldEmail}.`)
|
||||
}
|
||||
if (seenNew.has(parsedNew)) {
|
||||
throw new Error(`Duplicate new emails found in CSV file ${newEmail}.`)
|
||||
}
|
||||
if (seenOld.has(parsedNew) || seenNew.has(parsedOld)) {
|
||||
throw new Error(
|
||||
`Old and new emails cannot overlap ${oldEmail} ${newEmail}`
|
||||
)
|
||||
}
|
||||
seenOld.add(parsedOld)
|
||||
seenNew.add(parsedNew)
|
||||
result.push([parsedOld, parsedNew])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async function checkEmailsAgainstDb(emails) {
|
||||
const result = []
|
||||
for (const [oldEmail, newEmail] of emails) {
|
||||
const userWithEmail = await UserGetter.promises.getUserByMainEmail(
|
||||
oldEmail,
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
if (!userWithEmail) {
|
||||
if (argv['ignore-missing']) {
|
||||
console.log(
|
||||
`User with email "${oldEmail}" not found, skipping update to "${newEmail}"`
|
||||
)
|
||||
continue
|
||||
} else {
|
||||
throw new Error(`no user found with email "${oldEmail}"`)
|
||||
}
|
||||
}
|
||||
const userWithNewEmail = await UserGetter.promises.getUserByAnyEmail(
|
||||
newEmail,
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
if (userWithNewEmail) {
|
||||
throw new Error(
|
||||
`new email "${newEmail}" already exists for user ${userWithNewEmail._id}`
|
||||
)
|
||||
}
|
||||
result.push([oldEmail, newEmail])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async function doMigration(emails) {
|
||||
let success = 0
|
||||
let failure = 0
|
||||
let skipped = 0
|
||||
for (const [oldEmail, newEmail] of emails) {
|
||||
const userWithEmail = await UserGetter.promises.getUserByMainEmail(
|
||||
oldEmail,
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
if (!userWithEmail) {
|
||||
if (argv['ignore-missing']) {
|
||||
continue
|
||||
} else {
|
||||
throw new Error(`no user found with email "${oldEmail}"`)
|
||||
}
|
||||
}
|
||||
if (argv.commit) {
|
||||
console.log(
|
||||
`Updating user ${userWithEmail._id} email "${oldEmail}" to "${newEmail}"\n`
|
||||
)
|
||||
try {
|
||||
// log out all the user's sessions before changing the email address
|
||||
await UserSessionsManager.promises.removeSessionsFromRedis(
|
||||
userWithEmail
|
||||
)
|
||||
|
||||
await UserUpdater.promises.migrateDefaultEmailAddress(
|
||||
userWithEmail._id,
|
||||
oldEmail,
|
||||
newEmail,
|
||||
{
|
||||
initiatorId: argv['admin-id'],
|
||||
ipAddress: hostname,
|
||||
extraInfo: {
|
||||
script: 'migrate_user_emails.js',
|
||||
runAt: scriptTimestamp,
|
||||
},
|
||||
}
|
||||
)
|
||||
success++
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
failure++
|
||||
}
|
||||
} else {
|
||||
console.log(`Dry run, skipping update from ${oldEmail} to ${newEmail}`)
|
||||
skipped++
|
||||
}
|
||||
}
|
||||
console.log('Success: ', success, 'Failure: ', failure, 'Skipped: ', skipped)
|
||||
if (failure > 0) {
|
||||
throw new Error('Some email migrations failed')
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateEmails() {
|
||||
console.log('Starting email migration script')
|
||||
const csvFilePath = argv._[0]
|
||||
const csvFile = fs.readFileSync(csvFilePath, 'utf8')
|
||||
const rows = csv.parse(csvFile)
|
||||
console.log('Number of users to migrate: ', rows.length)
|
||||
const emails = filterEmails(rows)
|
||||
const existingUserEmails = await checkEmailsAgainstDb(emails)
|
||||
await doMigration(existingUserEmails)
|
||||
}
|
||||
|
||||
migrateEmails()
|
||||
.then(() => {
|
||||
console.log('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,34 @@
|
||||
import minimist from 'minimist'
|
||||
import { db } from '../../../app/src/infrastructure/mongodb.js'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
async function main() {
|
||||
const argv = minimist(process.argv.slice(2), {
|
||||
string: ['user-id', 'old-name', 'new_name'],
|
||||
})
|
||||
|
||||
const { 'user-id': userId, 'old-name': oldName, 'new-name': newName } = argv
|
||||
if (!userId || !oldName || !newName) {
|
||||
console.error(
|
||||
`Usage: node ${filename} --user-id=5a9414f259776c7900b300e6 --old-name=my-folder --new-name=my-folder-renamed`
|
||||
)
|
||||
process.exit(101)
|
||||
}
|
||||
|
||||
await db.tags.updateOne(
|
||||
{ name: oldName, user_id: userId },
|
||||
{ $set: { name: newName } }
|
||||
)
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => {
|
||||
console.error('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
@@ -0,0 +1,61 @@
|
||||
import Settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger'
|
||||
import { db } from '../../../app/src/infrastructure/mongodb.js'
|
||||
import {
|
||||
mergeFeatures,
|
||||
compareFeatures,
|
||||
} from '../../../app/src/Features/Subscription/FeaturesHelper.js'
|
||||
import { fileURLToPath } from 'url'
|
||||
const DRY_RUN = !process.argv.includes('--dry-run=false')
|
||||
|
||||
async function main(DRY_RUN, defaultFeatures) {
|
||||
logger.info({ defaultFeatures }, 'default features')
|
||||
|
||||
const cursor = db.users.find(
|
||||
{},
|
||||
{ projection: { _id: 1, email: 1, features: 1 } }
|
||||
)
|
||||
for await (const user of cursor) {
|
||||
const newFeatures = mergeFeatures(user.features, defaultFeatures)
|
||||
const diff = compareFeatures(newFeatures, user.features)
|
||||
if (Object.keys(diff).length > 0) {
|
||||
logger.warn(
|
||||
{
|
||||
userId: user._id,
|
||||
email: user.email,
|
||||
oldFeatures: user.features,
|
||||
newFeatures,
|
||||
},
|
||||
'user features upgraded'
|
||||
)
|
||||
|
||||
if (!DRY_RUN) {
|
||||
await db.users.updateOne(
|
||||
{ _id: user._id },
|
||||
{ $set: { features: newFeatures } }
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default main
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
if (filename === process.argv[1]) {
|
||||
if (DRY_RUN) {
|
||||
console.error('---')
|
||||
console.error('Dry-run enabled, use --dry-run=false to commit changes')
|
||||
console.error('---')
|
||||
}
|
||||
main(DRY_RUN, Settings.defaultFeatures)
|
||||
.then(() => {
|
||||
console.log('Done.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(error => {
|
||||
console.error({ error })
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
Reference in New Issue
Block a user