first commit
This commit is contained in:
@@ -0,0 +1,28 @@
|
||||
import { db } from '../../app/src/infrastructure/mongodb.js'
|
||||
import { batchedUpdate } from '@overleaf/mongo-utils/batchedUpdate.js'
|
||||
|
||||
async function main() {
|
||||
// update all applicable user models
|
||||
await batchedUpdate(
|
||||
db.users,
|
||||
{
|
||||
'writefull.enabled': false,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
'aiErrorAssistant.enabled': false,
|
||||
},
|
||||
}
|
||||
)
|
||||
console.log('completed syncing writefull state with error assist')
|
||||
}
|
||||
|
||||
export default main
|
||||
|
||||
try {
|
||||
await main()
|
||||
process.exit(0)
|
||||
} catch (error) {
|
||||
console.error({ error })
|
||||
process.exit(1)
|
||||
}
|
@@ -0,0 +1,46 @@
|
||||
import { db } from '../../app/src/infrastructure/mongodb.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import fs from 'node:fs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import { chunkArray } from '../helpers/chunkArray.mjs'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
async function main() {
|
||||
// search for file of users to transition
|
||||
const userIdsPath = process.argv[2]
|
||||
const userIdsFile = fs.readFileSync(userIdsPath, 'utf8')
|
||||
let userIdsList = userIdsFile
|
||||
|
||||
userIdsList = userIdsList
|
||||
.split('\n')
|
||||
.filter(id => id?.length)
|
||||
.map(id => new ObjectId(id))
|
||||
|
||||
const chunks = chunkArray(userIdsList)
|
||||
console.log(
|
||||
`transitioning ${userIdsList.length} users to auto-account-created state in ${chunks.length} chunks`
|
||||
)
|
||||
|
||||
// Iterate over each chunk and update their autoAccountCreated flag
|
||||
for (const chunkedIds of chunks) {
|
||||
console.log('batch update started')
|
||||
await db.users.updateMany(
|
||||
{ _id: { $in: chunkedIds } },
|
||||
{ $set: { 'writefull.autoCreatedAccount': true } }
|
||||
)
|
||||
console.log('batch completed')
|
||||
}
|
||||
}
|
||||
|
||||
export default main
|
||||
|
||||
if (fileURLToPath(import.meta.url) === process.argv[1]) {
|
||||
try {
|
||||
await main()
|
||||
process.exit(0)
|
||||
} catch (error) {
|
||||
console.error({ error })
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
@@ -0,0 +1,45 @@
|
||||
import { db } from '../../app/src/infrastructure/mongodb.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import fs from 'node:fs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
async function main() {
|
||||
// search for file of users to force into auto load
|
||||
const userIdsPath = process.argv[2]
|
||||
const userIdsFile = fs.readFileSync(userIdsPath, 'utf8')
|
||||
let userIdsList = userIdsFile
|
||||
userIdsList = userIdsList
|
||||
.split('\n')
|
||||
.filter(id => id?.length)
|
||||
.map(id => new ObjectId(id))
|
||||
|
||||
console.log(
|
||||
`enabling writefull with autoCreatedAccount:false for ${userIdsList.length} users`
|
||||
)
|
||||
|
||||
// set them to writefull.enabled true, and autoCreatedAccount false which is the same state an auto-load account is placed in after their first load
|
||||
// not this does NOT call writefull's first load function for the user's account
|
||||
await db.users.updateMany(
|
||||
{ _id: { $in: userIdsList } },
|
||||
{
|
||||
$set: {
|
||||
'writefull.enabled': true,
|
||||
'writefull.autoCreatedAccount': false,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export default main
|
||||
|
||||
if (fileURLToPath(import.meta.url) === process.argv[1]) {
|
||||
try {
|
||||
await main()
|
||||
process.exit(0)
|
||||
} catch (error) {
|
||||
console.error({ error })
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
@@ -0,0 +1,50 @@
|
||||
import { db } from '../../app/src/infrastructure/mongodb.js'
|
||||
import { batchedUpdate } from '@overleaf/mongo-utils/batchedUpdate.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import fs from 'node:fs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import { chunkArray } from '../helpers/chunkArray.mjs'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
async function main() {
|
||||
// search for file of users who already explicitly opted out first
|
||||
const optOutPath = process.argv[2]
|
||||
const optedOutFile = fs.readFileSync(optOutPath, 'utf8')
|
||||
let optedOutList = optedOutFile
|
||||
|
||||
optedOutList = optedOutFile.split('\n').map(id => new ObjectId(id))
|
||||
|
||||
console.log(`preserving opt-outs of ${optedOutList.length} users`)
|
||||
// update all applicable user models
|
||||
await batchedUpdate(
|
||||
db.users,
|
||||
{ 'writefull.enabled': false }, // and is false
|
||||
{ $set: { 'writefull.enabled': null } }
|
||||
)
|
||||
|
||||
const chunks = chunkArray(optedOutList)
|
||||
|
||||
// then reset any explicit false back to being false
|
||||
// Iterate over each chunk and perform the query
|
||||
for (const chunkedIds of chunks) {
|
||||
console.log('batch update started')
|
||||
await db.users.updateMany(
|
||||
{ _id: { $in: chunkedIds } },
|
||||
{ $set: { 'writefull.enabled': false } }
|
||||
)
|
||||
console.log('batch completed')
|
||||
}
|
||||
}
|
||||
|
||||
export default main
|
||||
|
||||
if (fileURLToPath(import.meta.url) === process.argv[1]) {
|
||||
try {
|
||||
await main()
|
||||
process.exit(0)
|
||||
} catch (error) {
|
||||
console.error({ error })
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user