first commit
This commit is contained in:
@@ -0,0 +1,97 @@
|
||||
const mappings = new Map([
|
||||
['salesforce_id', generateSubscriptionToSalesforceMapping],
|
||||
['v1_id', generateSubscriptionToV1Mapping],
|
||||
['recurlySubscription_id', generateSubscriptionToRecurlyMapping],
|
||||
])
|
||||
|
||||
/**
|
||||
* @typedef {(import('./types.d.ts').AccountMapping)} AccountMapping
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} subscription
|
||||
* @param {Object} updatedSubscription
|
||||
* @return {Array<AccountMapping>}
|
||||
*/
|
||||
function extractAccountMappingsFromSubscription(
|
||||
subscription,
|
||||
updatedSubscription
|
||||
) {
|
||||
const accountMappings = []
|
||||
mappings.forEach((generateMapping, param) => {
|
||||
if (updatedSubscription[param] || updatedSubscription[param] === '') {
|
||||
if (subscription[param] !== updatedSubscription[param]) {
|
||||
accountMappings.push(
|
||||
generateMapping(subscription.id, updatedSubscription[param])
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
return accountMappings
|
||||
}
|
||||
|
||||
function generateV1Mapping(v1Id, salesforceId, createdAt) {
|
||||
return {
|
||||
source: 'salesforce',
|
||||
sourceEntity: 'account',
|
||||
sourceEntityId: salesforceId,
|
||||
target: 'v1',
|
||||
targetEntity: 'university',
|
||||
targetEntityId: v1Id,
|
||||
createdAt,
|
||||
}
|
||||
}
|
||||
|
||||
function generateSubscriptionToV1Mapping(subscriptionId, v1Id) {
|
||||
return {
|
||||
source: 'v1',
|
||||
sourceEntity: 'university',
|
||||
sourceEntityId: v1Id,
|
||||
target: 'v2',
|
||||
targetEntity: 'subscription',
|
||||
targetEntityId: subscriptionId,
|
||||
createdAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
function generateSubscriptionToSalesforceMapping(subscriptionId, salesforceId) {
|
||||
return {
|
||||
source: 'salesforce',
|
||||
sourceEntity: 'account',
|
||||
sourceEntityId: salesforceId,
|
||||
target: 'v2',
|
||||
targetEntity: 'subscription',
|
||||
targetEntityId: subscriptionId,
|
||||
createdAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} subscriptionId
|
||||
* @param {string} recurlyId
|
||||
* @param {string} [createdAt] - Should be an ISO date
|
||||
* @return {AccountMapping}
|
||||
*/
|
||||
function generateSubscriptionToRecurlyMapping(
|
||||
subscriptionId,
|
||||
recurlyId,
|
||||
createdAt = new Date().toISOString()
|
||||
) {
|
||||
return {
|
||||
source: 'recurly',
|
||||
sourceEntity: 'subscription',
|
||||
sourceEntityId: recurlyId,
|
||||
target: 'v2',
|
||||
targetEntity: 'subscription',
|
||||
targetEntityId: subscriptionId,
|
||||
createdAt,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractAccountMappingsFromSubscription,
|
||||
generateV1Mapping,
|
||||
generateSubscriptionToRecurlyMapping,
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
import metrics from '@overleaf/metrics'
|
||||
import AnalyticsManager from './AnalyticsManager.js'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import GeoIpLookup from '../../infrastructure/GeoIpLookup.js'
|
||||
import Features from '../../infrastructure/Features.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import AccountMappingHelper from './AccountMappingHelper.js'
|
||||
|
||||
async function registerSalesforceMapping(req, res, next) {
|
||||
if (!Features.hasFeature('analytics')) {
|
||||
return res.sendStatus(202)
|
||||
}
|
||||
const { createdAt, salesforceId, v1Id } = req.body
|
||||
AnalyticsManager.registerAccountMapping(
|
||||
AccountMappingHelper.generateV1Mapping(v1Id, salesforceId, createdAt)
|
||||
)
|
||||
res.sendStatus(202)
|
||||
}
|
||||
|
||||
async function updateEditingSession(req, res, next) {
|
||||
if (!Features.hasFeature('analytics')) {
|
||||
return res.sendStatus(202)
|
||||
}
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
const { projectId } = req.params
|
||||
const segmentation = req.body.segmentation || {}
|
||||
let countryCode = null
|
||||
|
||||
if (userId) {
|
||||
try {
|
||||
const geoDetails = await GeoIpLookup.promises.getDetails(req.ip)
|
||||
if (geoDetails && geoDetails.country_code) {
|
||||
countryCode = geoDetails.country_code
|
||||
}
|
||||
AnalyticsManager.updateEditingSession(
|
||||
userId,
|
||||
projectId,
|
||||
countryCode,
|
||||
segmentation
|
||||
)
|
||||
} catch (error) {
|
||||
metrics.inc('analytics_geo_ip_lookup_errors')
|
||||
}
|
||||
}
|
||||
res.sendStatus(202)
|
||||
}
|
||||
|
||||
function recordEvent(req, res, next) {
|
||||
if (!Features.hasFeature('analytics')) {
|
||||
return res.sendStatus(202)
|
||||
}
|
||||
delete req.body._csrf
|
||||
AnalyticsManager.recordEventForSession(
|
||||
req.session,
|
||||
req.params.event,
|
||||
req.body
|
||||
)
|
||||
res.sendStatus(202)
|
||||
}
|
||||
|
||||
export default {
|
||||
registerSalesforceMapping: expressify(registerSalesforceMapping),
|
||||
updateEditingSession: expressify(updateEditingSession),
|
||||
recordEvent,
|
||||
}
|
||||
404
services/web/app/src/Features/Analytics/AnalyticsManager.js
Normal file
404
services/web/app/src/Features/Analytics/AnalyticsManager.js
Normal file
@@ -0,0 +1,404 @@
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const UserAnalyticsIdCache = require('./UserAnalyticsIdCache')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Metrics = require('../../infrastructure/Metrics')
|
||||
const Queues = require('../../infrastructure/Queues')
|
||||
const crypto = require('crypto')
|
||||
const _ = require('lodash')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const logger = require('@overleaf/logger')
|
||||
|
||||
const analyticsEventsQueue = Queues.getQueue('analytics-events')
|
||||
const analyticsEditingSessionsQueue = Queues.getQueue(
|
||||
'analytics-editing-sessions'
|
||||
)
|
||||
const analyticsUserPropertiesQueue = Queues.getQueue(
|
||||
'analytics-user-properties'
|
||||
)
|
||||
const analyticsAccountMappingQueue = Queues.getQueue(
|
||||
'analytics-account-mapping'
|
||||
)
|
||||
|
||||
const ONE_MINUTE_MS = 60 * 1000
|
||||
|
||||
const UUID_REGEXP = /^[\w]{8}(-[\w]{4}){3}-[\w]{12}$/
|
||||
|
||||
function identifyUser(userId, analyticsId, isNewUser) {
|
||||
if (!userId || !analyticsId || !analyticsId.toString().match(UUID_REGEXP)) {
|
||||
return
|
||||
}
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
Metrics.analyticsQueue.inc({ status: 'adding', event_type: 'identify' })
|
||||
Queues.createScheduledJob(
|
||||
'analytics-events',
|
||||
{
|
||||
name: 'identify',
|
||||
data: { userId, analyticsId, isNewUser, createdAt: new Date() },
|
||||
},
|
||||
ONE_MINUTE_MS
|
||||
)
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({ status: 'added', event_type: 'identify' })
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({ status: 'error', event_type: 'identify' })
|
||||
})
|
||||
}
|
||||
|
||||
async function recordEventForUser(userId, event, segmentation) {
|
||||
if (!userId) {
|
||||
return
|
||||
}
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
const analyticsId = await UserAnalyticsIdCache.get(userId)
|
||||
if (analyticsId) {
|
||||
_recordEvent({ analyticsId, userId, event, segmentation, isLoggedIn: true })
|
||||
}
|
||||
}
|
||||
|
||||
function recordEventForUserInBackground(userId, event, segmentation) {
|
||||
recordEventForUser(userId, event, segmentation).catch(err => {
|
||||
logger.warn(
|
||||
{ err, userId, event, segmentation },
|
||||
'failed to record event for user'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function recordEventForSession(session, event, segmentation) {
|
||||
const { analyticsId, userId } = getIdsFromSession(session)
|
||||
if (!analyticsId) {
|
||||
return
|
||||
}
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
_recordEvent({
|
||||
analyticsId,
|
||||
userId,
|
||||
event,
|
||||
segmentation,
|
||||
isLoggedIn: !!userId,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
}
|
||||
|
||||
async function setUserPropertyForUser(userId, propertyName, propertyValue) {
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
|
||||
_checkPropertyValue(propertyValue)
|
||||
|
||||
const analyticsId = await UserAnalyticsIdCache.get(userId)
|
||||
if (analyticsId) {
|
||||
await _setUserProperty({ analyticsId, propertyName, propertyValue })
|
||||
}
|
||||
}
|
||||
|
||||
function setUserPropertyForUserInBackground(userId, property, value) {
|
||||
setUserPropertyForUser(userId, property, value).catch(err => {
|
||||
logger.warn(
|
||||
{ err, userId, property, value },
|
||||
'failed to set user property for user'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async function setUserPropertyForAnalyticsId(
|
||||
analyticsId,
|
||||
propertyName,
|
||||
propertyValue
|
||||
) {
|
||||
if (_isAnalyticsDisabled()) {
|
||||
return
|
||||
}
|
||||
|
||||
_checkPropertyValue(propertyValue)
|
||||
|
||||
await _setUserProperty({ analyticsId, propertyName, propertyValue })
|
||||
}
|
||||
|
||||
async function setUserPropertyForSession(session, propertyName, propertyValue) {
|
||||
const { analyticsId, userId } = getIdsFromSession(session)
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
|
||||
_checkPropertyValue(propertyValue)
|
||||
|
||||
if (analyticsId) {
|
||||
await _setUserProperty({ analyticsId, propertyName, propertyValue })
|
||||
}
|
||||
}
|
||||
|
||||
function setUserPropertyForSessionInBackground(session, property, value) {
|
||||
setUserPropertyForSession(session, property, value).catch(err => {
|
||||
const { analyticsId, userId } = getIdsFromSession(session)
|
||||
logger.warn(
|
||||
{ err, analyticsId, userId, property, value },
|
||||
'failed to set user property for session'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {(import('./types').AccountMapping)} AccountMapping
|
||||
*/
|
||||
|
||||
/**
|
||||
* Register mapping between two accounts.
|
||||
*
|
||||
* @param {AccountMapping} payload - The event payload to send to Analytics
|
||||
*/
|
||||
function registerAccountMapping({
|
||||
source,
|
||||
sourceEntity,
|
||||
sourceEntityId,
|
||||
target,
|
||||
targetEntity,
|
||||
targetEntityId,
|
||||
createdAt,
|
||||
}) {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'adding',
|
||||
event_type: 'account-mapping',
|
||||
})
|
||||
|
||||
analyticsAccountMappingQueue
|
||||
.add('account-mapping', {
|
||||
source,
|
||||
sourceEntity,
|
||||
sourceEntityId,
|
||||
target,
|
||||
targetEntity,
|
||||
targetEntityId,
|
||||
createdAt: createdAt ?? new Date(),
|
||||
})
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'added',
|
||||
event_type: 'account-mapping',
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'error',
|
||||
event_type: 'account-mapping',
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function updateEditingSession(userId, projectId, countryCode, segmentation) {
|
||||
if (!userId) {
|
||||
return
|
||||
}
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
if (!_isSegmentationValid(segmentation)) {
|
||||
logger.info(
|
||||
{ userId, projectId, segmentation },
|
||||
'rejecting analytics editing session due to bad segmentation'
|
||||
)
|
||||
return
|
||||
}
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'adding',
|
||||
event_type: 'editing-session',
|
||||
})
|
||||
analyticsEditingSessionsQueue
|
||||
.add('editing-session', {
|
||||
userId,
|
||||
projectId,
|
||||
countryCode,
|
||||
segmentation,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'added',
|
||||
event_type: 'editing-session',
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'error',
|
||||
event_type: 'editing-session',
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function _recordEvent(
|
||||
{ analyticsId, userId, event, segmentation, isLoggedIn },
|
||||
{ delay } = {}
|
||||
) {
|
||||
if (!_isAttributeValid(event)) {
|
||||
logger.info(
|
||||
{ analyticsId, event, segmentation },
|
||||
'rejecting analytics event due to bad event name'
|
||||
)
|
||||
return
|
||||
}
|
||||
if (!_isSegmentationValid(segmentation)) {
|
||||
logger.info(
|
||||
{ analyticsId, event, segmentation },
|
||||
'rejecting analytics event due to bad segmentation'
|
||||
)
|
||||
return
|
||||
}
|
||||
logger.debug(
|
||||
{
|
||||
analyticsId,
|
||||
userId,
|
||||
event,
|
||||
segmentation,
|
||||
isLoggedIn: !!userId,
|
||||
createdAt: new Date(),
|
||||
},
|
||||
'queueing analytics event'
|
||||
)
|
||||
Metrics.analyticsQueue.inc({ status: 'adding', event_type: 'event' })
|
||||
analyticsEventsQueue
|
||||
.add(
|
||||
'event',
|
||||
{
|
||||
analyticsId,
|
||||
userId,
|
||||
event,
|
||||
segmentation,
|
||||
isLoggedIn,
|
||||
createdAt: new Date(),
|
||||
},
|
||||
{ delay }
|
||||
)
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({ status: 'added', event_type: 'event' })
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({ status: 'error', event_type: 'event' })
|
||||
})
|
||||
}
|
||||
|
||||
async function _setUserProperty({ analyticsId, propertyName, propertyValue }) {
|
||||
if (!_isAttributeValid(propertyName)) {
|
||||
logger.info(
|
||||
{ analyticsId, propertyName, propertyValue },
|
||||
'rejecting analytics user property due to bad name'
|
||||
)
|
||||
return
|
||||
}
|
||||
if (!_isAttributeValueValid(propertyValue)) {
|
||||
logger.info(
|
||||
{ analyticsId, propertyName, propertyValue },
|
||||
'rejecting analytics user property due to bad value'
|
||||
)
|
||||
return
|
||||
}
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'adding',
|
||||
event_type: 'user-property',
|
||||
})
|
||||
await analyticsUserPropertiesQueue
|
||||
.add('user-property', {
|
||||
analyticsId,
|
||||
propertyName,
|
||||
propertyValue,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'added',
|
||||
event_type: 'user-property',
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'error',
|
||||
event_type: 'user-property',
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function _isSmokeTestUser(userId) {
|
||||
const smokeTestUserId = Settings.smokeTest && Settings.smokeTest.userId
|
||||
return (
|
||||
smokeTestUserId != null &&
|
||||
userId != null &&
|
||||
userId.toString() === smokeTestUserId
|
||||
)
|
||||
}
|
||||
|
||||
function _isAnalyticsDisabled() {
|
||||
return !(Settings.analytics && Settings.analytics.enabled)
|
||||
}
|
||||
|
||||
function _checkPropertyValue(propertyValue) {
|
||||
if (propertyValue === undefined) {
|
||||
throw new Error(
|
||||
'propertyValue cannot be undefined, use null to unset a property'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function _isAttributeValid(attribute) {
|
||||
return !attribute || /^[a-zA-Z0-9-_.:;,/]+$/.test(attribute)
|
||||
}
|
||||
|
||||
function _isAttributeValueValid(attributeValue) {
|
||||
return _isAttributeValid(attributeValue) || attributeValue instanceof Date
|
||||
}
|
||||
|
||||
function _isSegmentationValid(segmentation) {
|
||||
if (segmentation) {
|
||||
for (const key of Object.keys(segmentation)) {
|
||||
if (!_isAttributeValid(key)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getIdsFromSession(session) {
|
||||
const analyticsId = _.get(session, ['analyticsId'])
|
||||
const userId = SessionManager.getLoggedInUserId(session)
|
||||
return { analyticsId, userId }
|
||||
}
|
||||
|
||||
async function analyticsIdMiddleware(req, res, next) {
|
||||
const session = req.session
|
||||
const sessionUser = SessionManager.getSessionUser(session)
|
||||
|
||||
if (sessionUser) {
|
||||
session.analyticsId = await UserAnalyticsIdCache.get(sessionUser._id)
|
||||
} else if (!session.analyticsId) {
|
||||
// generate an `analyticsId` if needed
|
||||
session.analyticsId = crypto.randomUUID()
|
||||
}
|
||||
|
||||
res.locals.getSessionAnalyticsId = () => session.analyticsId
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
identifyUser,
|
||||
recordEventForSession,
|
||||
recordEventForUser,
|
||||
recordEventForUserInBackground,
|
||||
setUserPropertyForUser,
|
||||
setUserPropertyForUserInBackground,
|
||||
setUserPropertyForSession,
|
||||
setUserPropertyForSessionInBackground,
|
||||
setUserPropertyForAnalyticsId,
|
||||
updateEditingSession,
|
||||
getIdsFromSession,
|
||||
registerAccountMapping,
|
||||
analyticsIdMiddleware: expressify(analyticsIdMiddleware),
|
||||
}
|
||||
28
services/web/app/src/Features/Analytics/AnalyticsProxy.mjs
Normal file
28
services/web/app/src/Features/Analytics/AnalyticsProxy.mjs
Normal file
@@ -0,0 +1,28 @@
|
||||
import settings from '@overleaf/settings'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import httpProxy from 'express-http-proxy'
|
||||
|
||||
export default {
|
||||
call(basePath) {
|
||||
if (!settings.apis.analytics) {
|
||||
return (req, res, next) =>
|
||||
next(
|
||||
new Errors.ServiceNotConfiguredError(
|
||||
'Analytics service not configured'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return httpProxy(settings.apis.analytics.url, {
|
||||
proxyReqPathResolver(req) {
|
||||
// req.url is the part of the path that comes after the mount point in
|
||||
// app.use()
|
||||
return `${basePath}${req.url}`
|
||||
},
|
||||
proxyReqOptDecorator(proxyReqOpts, srcReq) {
|
||||
proxyReqOpts.headers = {} // unset all headers
|
||||
return proxyReqOpts
|
||||
},
|
||||
})
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
const AnalyticsManager = require('./AnalyticsManager')
|
||||
const RequestHelper = require('./RequestHelper')
|
||||
|
||||
function clearSource(session) {
|
||||
if (session) {
|
||||
delete session.required_login_from_product_medium
|
||||
delete session.required_login_from_product_source
|
||||
}
|
||||
}
|
||||
|
||||
function setInbound(session, url, query, referrer) {
|
||||
const inboundSession = {
|
||||
referrer: RequestHelper.parseReferrer(referrer, url),
|
||||
utm: RequestHelper.parseUtm(query),
|
||||
}
|
||||
|
||||
if (inboundSession.referrer || inboundSession.utm) {
|
||||
session.inbound = inboundSession
|
||||
}
|
||||
}
|
||||
|
||||
function clearInbound(session) {
|
||||
if (session) {
|
||||
delete session.inbound
|
||||
}
|
||||
}
|
||||
|
||||
function addUserProperties(userId, session) {
|
||||
if (!session) {
|
||||
return
|
||||
}
|
||||
|
||||
if (session.required_login_from_product_medium) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-product-medium`,
|
||||
session.required_login_from_product_medium
|
||||
)
|
||||
if (session.required_login_from_product_source) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-product-source`,
|
||||
session.required_login_from_product_source
|
||||
)
|
||||
}
|
||||
} else if (session.referal_id) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-bonus-scheme`,
|
||||
true
|
||||
)
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-product-medium`,
|
||||
'bonus-scheme'
|
||||
)
|
||||
}
|
||||
|
||||
if (session.inbound) {
|
||||
if (session.inbound.referrer && session.inbound.referrer.medium) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-referrer-medium`,
|
||||
`${session.inbound.referrer.medium
|
||||
.charAt(0)
|
||||
.toUpperCase()}${session.inbound.referrer.medium.slice(1)}`
|
||||
)
|
||||
if (session.inbound.referrer.source) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-referrer-source`,
|
||||
session.inbound.referrer.source
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (session.inbound.utm) {
|
||||
for (const utmKey of RequestHelper.UTM_KEYS) {
|
||||
if (session.inbound.utm[utmKey]) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-${utmKey.replace('_', '-')}`,
|
||||
session.inbound.utm[utmKey]
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
clearSource,
|
||||
setInbound,
|
||||
clearInbound,
|
||||
addUserProperties,
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const AnalyticsRegistrationSourceHelper = require('./AnalyticsRegistrationSourceHelper')
|
||||
const SessionManager = require('../../Features/Authentication/SessionManager')
|
||||
|
||||
function setSource(medium, source) {
|
||||
return function (req, res, next) {
|
||||
if (req.session) {
|
||||
req.session.required_login_from_product_medium = medium
|
||||
if (source) {
|
||||
req.session.required_login_from_product_source = source
|
||||
}
|
||||
}
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
function clearSource() {
|
||||
return function (req, res, next) {
|
||||
AnalyticsRegistrationSourceHelper.clearSource(req.session)
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
function setInbound() {
|
||||
return function setInbound(req, res, next) {
|
||||
if (req.session.inbound) {
|
||||
return next() // don't overwrite referrer
|
||||
}
|
||||
|
||||
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
return next() // don't store referrer if user is already logged in
|
||||
}
|
||||
|
||||
const referrer = req.header('referrer')
|
||||
try {
|
||||
AnalyticsRegistrationSourceHelper.setInbound(
|
||||
req.session,
|
||||
req.url,
|
||||
req.query,
|
||||
referrer
|
||||
)
|
||||
} catch (error) {
|
||||
// log errors and fail silently
|
||||
OError.tag(error, 'failed to parse inbound referrer', {
|
||||
referrer,
|
||||
})
|
||||
logger.warn({ error }, error.message)
|
||||
}
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setSource,
|
||||
clearSource,
|
||||
setInbound,
|
||||
}
|
||||
51
services/web/app/src/Features/Analytics/AnalyticsRouter.mjs
Normal file
51
services/web/app/src/Features/Analytics/AnalyticsRouter.mjs
Normal file
@@ -0,0 +1,51 @@
|
||||
import AuthenticationController from './../Authentication/AuthenticationController.js'
|
||||
import AnalyticsController from './AnalyticsController.mjs'
|
||||
import AnalyticsProxy from './AnalyticsProxy.mjs'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js'
|
||||
|
||||
const rateLimiters = {
|
||||
recordEvent: new RateLimiter('analytics-record-event', {
|
||||
points: 200,
|
||||
duration: 60,
|
||||
}),
|
||||
updateEditingSession: new RateLimiter('analytics-update-editing-session', {
|
||||
points: 20,
|
||||
duration: 60,
|
||||
}),
|
||||
uniExternalCollabProxy: new RateLimiter(
|
||||
'analytics-uni-external-collab-proxy',
|
||||
{ points: 20, duration: 60 }
|
||||
),
|
||||
}
|
||||
|
||||
export default {
|
||||
apply(webRouter, privateApiRouter, publicApiRouter) {
|
||||
webRouter.post(
|
||||
'/event/:event([a-z0-9-_]+)',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.recordEvent),
|
||||
AnalyticsController.recordEvent
|
||||
)
|
||||
|
||||
webRouter.put(
|
||||
'/editingSession/:projectId',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.updateEditingSession, {
|
||||
params: ['projectId'],
|
||||
}),
|
||||
AnalyticsController.updateEditingSession
|
||||
)
|
||||
|
||||
publicApiRouter.use(
|
||||
'/analytics/uniExternalCollaboration',
|
||||
AuthenticationController.requirePrivateApiAuth(),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.uniExternalCollabProxy),
|
||||
AnalyticsProxy.call('/uniExternalCollaboration')
|
||||
)
|
||||
|
||||
publicApiRouter.post(
|
||||
'/analytics/register-v-1-salesforce-mapping',
|
||||
AuthenticationController.requirePrivateApiAuth(),
|
||||
AnalyticsController.registerSalesforceMapping
|
||||
)
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
import _ from 'lodash'
|
||||
import RequestHelper from './RequestHelper.js'
|
||||
import AnalyticsManager from './AnalyticsManager.js'
|
||||
import querystring from 'node:querystring'
|
||||
import { URL } from 'node:url'
|
||||
import Settings from '@overleaf/settings'
|
||||
import OError from '@overleaf/o-error'
|
||||
import logger from '@overleaf/logger'
|
||||
|
||||
function recordUTMTags() {
|
||||
return function (req, res, next) {
|
||||
const query = req.query
|
||||
|
||||
try {
|
||||
const utmValues = RequestHelper.parseUtm(query)
|
||||
|
||||
if (utmValues) {
|
||||
const path = new URL(req.url, Settings.siteUrl).pathname
|
||||
|
||||
AnalyticsManager.recordEventForSession(req.session, 'page-view', {
|
||||
path,
|
||||
...utmValues,
|
||||
})
|
||||
|
||||
const propertyValue = `${utmValues.utm_source || 'N/A'};${
|
||||
utmValues.utm_medium || 'N/A'
|
||||
};${utmValues.utm_campaign || 'N/A'};${
|
||||
utmValues.utm_content || utmValues.utm_term || 'N/A'
|
||||
}`
|
||||
AnalyticsManager.setUserPropertyForSessionInBackground(
|
||||
req.session,
|
||||
'utm-tags',
|
||||
propertyValue
|
||||
)
|
||||
|
||||
// redirect to URL without UTM query params
|
||||
const queryWithoutUtm = _.omit(query, RequestHelper.UTM_KEYS)
|
||||
const queryString =
|
||||
Object.keys(queryWithoutUtm).length > 0
|
||||
? '?' + querystring.stringify(queryWithoutUtm)
|
||||
: ''
|
||||
return res.redirect(path + queryString)
|
||||
}
|
||||
} catch (error) {
|
||||
// log errors and fail silently
|
||||
OError.tag(error, 'failed to track UTM tags', {
|
||||
query,
|
||||
})
|
||||
logger.warn({ error }, error.message)
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
recordUTMTags,
|
||||
}
|
||||
56
services/web/app/src/Features/Analytics/RequestHelper.js
Normal file
56
services/web/app/src/Features/Analytics/RequestHelper.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const RefererParser = require('referer-parser')
|
||||
const { URL } = require('url')
|
||||
|
||||
const UTM_KEYS = [
|
||||
'utm_campaign',
|
||||
'utm_source',
|
||||
'utm_term',
|
||||
'utm_content',
|
||||
'utm_medium',
|
||||
'utm_count',
|
||||
]
|
||||
|
||||
function parseUtm(query) {
|
||||
const utmValues = {}
|
||||
for (const utmKey of UTM_KEYS) {
|
||||
if (query[utmKey]) {
|
||||
utmValues[utmKey] = query[utmKey]
|
||||
}
|
||||
}
|
||||
return Object.keys(utmValues).length > 0 ? utmValues : null
|
||||
}
|
||||
|
||||
function parseReferrer(referrer, url) {
|
||||
if (!referrer) {
|
||||
return {
|
||||
medium: 'direct',
|
||||
}
|
||||
}
|
||||
|
||||
const parsedReferrer = new RefererParser(referrer, url)
|
||||
|
||||
const referrerValues = {
|
||||
medium: parsedReferrer.medium,
|
||||
source: parsedReferrer.referer || 'other',
|
||||
}
|
||||
|
||||
if (referrerValues.medium === 'unknown') {
|
||||
try {
|
||||
const referrerHostname = new URL(referrer).hostname
|
||||
if (referrerHostname) {
|
||||
referrerValues.medium = 'link'
|
||||
referrerValues.source = referrerHostname
|
||||
}
|
||||
} catch (error) {
|
||||
// ignore referrer parsing errors
|
||||
}
|
||||
}
|
||||
|
||||
return referrerValues
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
UTM_KEYS,
|
||||
parseUtm,
|
||||
parseReferrer,
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const { CacheLoader } = require('cache-flow')
|
||||
const { callbackify } = require('util')
|
||||
|
||||
class UserAnalyticsIdCache extends CacheLoader {
|
||||
constructor() {
|
||||
super('user-analytics-id', {
|
||||
expirationTime: 60,
|
||||
maxSize: 10000,
|
||||
})
|
||||
}
|
||||
|
||||
async load(userId) {
|
||||
const user = await UserGetter.promises.getUser(userId, { analyticsId: 1 })
|
||||
if (user) {
|
||||
return user.analyticsId || user._id.toString()
|
||||
}
|
||||
}
|
||||
|
||||
keyToString(userId) {
|
||||
if (userId) {
|
||||
return userId.toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const userAnalyticsIdCache = new UserAnalyticsIdCache()
|
||||
userAnalyticsIdCache.callbacks = {
|
||||
get: callbackify(userAnalyticsIdCache.get).bind(userAnalyticsIdCache),
|
||||
}
|
||||
module.exports = userAnalyticsIdCache
|
||||
9
services/web/app/src/Features/Analytics/types.d.ts
vendored
Normal file
9
services/web/app/src/Features/Analytics/types.d.ts
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export type AccountMapping = {
|
||||
source: string
|
||||
sourceEntity: string
|
||||
sourceEntityId: string
|
||||
target: string
|
||||
targetEntity: string
|
||||
targetEntityId: string
|
||||
createdAt: string
|
||||
}
|
||||
@@ -0,0 +1,671 @@
|
||||
const AuthenticationManager = require('./AuthenticationManager')
|
||||
const SessionManager = require('./SessionManager')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const LoginRateLimiter = require('../Security/LoginRateLimiter')
|
||||
const UserUpdater = require('../User/UserUpdater')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const logger = require('@overleaf/logger')
|
||||
const querystring = require('querystring')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const basicAuth = require('basic-auth')
|
||||
const tsscmp = require('tsscmp')
|
||||
const UserHandler = require('../User/UserHandler')
|
||||
const UserSessionsManager = require('../User/UserSessionsManager')
|
||||
const Analytics = require('../Analytics/AnalyticsManager')
|
||||
const passport = require('passport')
|
||||
const NotificationsBuilder = require('../Notifications/NotificationsBuilder')
|
||||
const UrlHelper = require('../Helpers/UrlHelper')
|
||||
const AsyncFormHelper = require('../Helpers/AsyncFormHelper')
|
||||
const _ = require('lodash')
|
||||
const UserAuditLogHandler = require('../User/UserAuditLogHandler')
|
||||
const AnalyticsRegistrationSourceHelper = require('../Analytics/AnalyticsRegistrationSourceHelper')
|
||||
const {
|
||||
acceptsJson,
|
||||
} = require('../../infrastructure/RequestContentTypeDetection')
|
||||
const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper')
|
||||
const Modules = require('../../infrastructure/Modules')
|
||||
const { expressify, promisify } = require('@overleaf/promise-utils')
|
||||
const { handleAuthenticateErrors } = require('./AuthenticationErrors')
|
||||
const EmailHelper = require('../Helpers/EmailHelper')
|
||||
|
||||
function send401WithChallenge(res) {
|
||||
res.setHeader('WWW-Authenticate', 'OverleafLogin')
|
||||
res.sendStatus(401)
|
||||
}
|
||||
|
||||
function checkCredentials(userDetailsMap, user, password) {
|
||||
const expectedPassword = userDetailsMap.get(user)
|
||||
const userExists = userDetailsMap.has(user) && expectedPassword // user exists with a non-null password
|
||||
const isValid = userExists && tsscmp(expectedPassword, password)
|
||||
if (!isValid) {
|
||||
logger.err({ user }, 'invalid login details')
|
||||
}
|
||||
Metrics.inc('security.http-auth.check-credentials', 1, {
|
||||
path: userExists ? 'known-user' : 'unknown-user',
|
||||
status: isValid ? 'pass' : 'fail',
|
||||
})
|
||||
return isValid
|
||||
}
|
||||
|
||||
function reduceStaffAccess(staffAccess) {
|
||||
const reducedStaffAccess = {}
|
||||
for (const field in staffAccess) {
|
||||
if (staffAccess[field]) {
|
||||
reducedStaffAccess[field] = true
|
||||
}
|
||||
}
|
||||
return reducedStaffAccess
|
||||
}
|
||||
|
||||
function userHasStaffAccess(user) {
|
||||
return user.staffAccess && Object.values(user.staffAccess).includes(true)
|
||||
}
|
||||
|
||||
// TODO: Finish making these methods async
|
||||
const AuthenticationController = {
|
||||
serializeUser(user, callback) {
|
||||
if (!user._id || !user.email) {
|
||||
const err = new Error('serializeUser called with non-user object')
|
||||
logger.warn({ user }, err.message)
|
||||
return callback(err)
|
||||
}
|
||||
const lightUser = {
|
||||
_id: user._id,
|
||||
first_name: user.first_name,
|
||||
last_name: user.last_name,
|
||||
email: user.email,
|
||||
referal_id: user.referal_id,
|
||||
session_created: new Date().toISOString(),
|
||||
ip_address: user._login_req_ip,
|
||||
must_reconfirm: user.must_reconfirm,
|
||||
v1_id: user.overleaf != null ? user.overleaf.id : undefined,
|
||||
analyticsId: user.analyticsId || user._id,
|
||||
alphaProgram: user.alphaProgram || undefined, // only store if set
|
||||
betaProgram: user.betaProgram || undefined, // only store if set
|
||||
}
|
||||
if (user.isAdmin) {
|
||||
lightUser.isAdmin = true
|
||||
}
|
||||
if (userHasStaffAccess(user)) {
|
||||
lightUser.staffAccess = reduceStaffAccess(user.staffAccess)
|
||||
}
|
||||
|
||||
callback(null, lightUser)
|
||||
},
|
||||
|
||||
deserializeUser(user, cb) {
|
||||
cb(null, user)
|
||||
},
|
||||
|
||||
passportLogin(req, res, next) {
|
||||
// This function is middleware which wraps the passport.authenticate middleware,
|
||||
// so we can send back our custom `{message: {text: "", type: ""}}` responses on failure,
|
||||
// and send a `{redir: ""}` response on success
|
||||
passport.authenticate(
|
||||
'local',
|
||||
{ keepSessionInfo: true },
|
||||
async function (err, user, info) {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
if (user) {
|
||||
// `user` is either a user object or false
|
||||
AuthenticationController.setAuditInfo(req, {
|
||||
method: 'Password login',
|
||||
})
|
||||
|
||||
try {
|
||||
// We could investigate whether this can be done together with 'preFinishLogin' instead of being its own hook
|
||||
await Modules.promises.hooks.fire(
|
||||
'saasLogin',
|
||||
{ email: user.email },
|
||||
req
|
||||
)
|
||||
await AuthenticationController.promises.finishLogin(user, req, res)
|
||||
} catch (err) {
|
||||
return next(err)
|
||||
}
|
||||
} else {
|
||||
if (info.redir != null) {
|
||||
return res.json({ redir: info.redir })
|
||||
} else {
|
||||
res.status(info.status || 200)
|
||||
delete info.status
|
||||
const body = { message: info }
|
||||
const { errorReason } = info
|
||||
if (errorReason) {
|
||||
body.errorReason = errorReason
|
||||
delete info.errorReason
|
||||
}
|
||||
return res.json(body)
|
||||
}
|
||||
}
|
||||
}
|
||||
)(req, res, next)
|
||||
},
|
||||
|
||||
async _finishLoginAsync(user, req, res) {
|
||||
if (user === false) {
|
||||
return AsyncFormHelper.redirect(req, res, '/login')
|
||||
} // OAuth2 'state' mismatch
|
||||
|
||||
if (user.suspended) {
|
||||
return AsyncFormHelper.redirect(req, res, '/account-suspended')
|
||||
}
|
||||
|
||||
if (Settings.adminOnlyLogin && !hasAdminAccess(user)) {
|
||||
return res.status(403).json({
|
||||
message: { type: 'error', text: 'Admin only panel' },
|
||||
})
|
||||
}
|
||||
|
||||
const auditInfo = AuthenticationController.getAuditInfo(req)
|
||||
|
||||
const anonymousAnalyticsId = req.session.analyticsId
|
||||
const isNewUser = req.session.justRegistered || false
|
||||
|
||||
const results = await Modules.promises.hooks.fire(
|
||||
'preFinishLogin',
|
||||
req,
|
||||
res,
|
||||
user
|
||||
)
|
||||
|
||||
if (results.some(result => result && result.doNotFinish)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (user.must_reconfirm) {
|
||||
return AuthenticationController._redirectToReconfirmPage(req, res, user)
|
||||
}
|
||||
|
||||
const redir =
|
||||
AuthenticationController.getRedirectFromSession(req) || '/project'
|
||||
|
||||
_loginAsyncHandlers(req, user, anonymousAnalyticsId, isNewUser)
|
||||
const userId = user._id
|
||||
|
||||
await UserAuditLogHandler.promises.addEntry(
|
||||
userId,
|
||||
'login',
|
||||
userId,
|
||||
req.ip,
|
||||
auditInfo
|
||||
)
|
||||
|
||||
await _afterLoginSessionSetupAsync(req, user)
|
||||
|
||||
AuthenticationController._clearRedirectFromSession(req)
|
||||
AnalyticsRegistrationSourceHelper.clearSource(req.session)
|
||||
AnalyticsRegistrationSourceHelper.clearInbound(req.session)
|
||||
AsyncFormHelper.redirect(req, res, redir)
|
||||
},
|
||||
|
||||
finishLogin(user, req, res, next) {
|
||||
AuthenticationController._finishLoginAsync(user, req, res).catch(err =>
|
||||
next(err)
|
||||
)
|
||||
},
|
||||
|
||||
async doPassportLogin(req, username, password, done) {
|
||||
let user, info
|
||||
try {
|
||||
;({ user, info } = await AuthenticationController._doPassportLogin(
|
||||
req,
|
||||
username,
|
||||
password
|
||||
))
|
||||
} catch (error) {
|
||||
return done(error)
|
||||
}
|
||||
return done(undefined, user, info)
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param req
|
||||
* @param username
|
||||
* @param password
|
||||
* @returns {Promise<{ user: any, info: any}>}
|
||||
*/
|
||||
async _doPassportLogin(req, username, password) {
|
||||
const email = EmailHelper.parseEmail(username)
|
||||
if (!email) {
|
||||
Metrics.inc('login_failure_reason', 1, { status: 'invalid_email' })
|
||||
return {
|
||||
user: null,
|
||||
info: {
|
||||
status: 400,
|
||||
type: 'error',
|
||||
text: req.i18n.translate('email_address_is_invalid'),
|
||||
},
|
||||
}
|
||||
}
|
||||
AuthenticationController.setAuditInfo(req, { method: 'Password login' })
|
||||
|
||||
const { fromKnownDevice } = AuthenticationController.getAuditInfo(req)
|
||||
const auditLog = {
|
||||
ipAddress: req.ip,
|
||||
info: { method: 'Password login', fromKnownDevice },
|
||||
}
|
||||
|
||||
let user, isPasswordReused
|
||||
try {
|
||||
;({ user, isPasswordReused } =
|
||||
await AuthenticationManager.promises.authenticate(
|
||||
{ email },
|
||||
password,
|
||||
auditLog,
|
||||
{
|
||||
enforceHIBPCheck: !fromKnownDevice,
|
||||
}
|
||||
))
|
||||
} catch (error) {
|
||||
return {
|
||||
user: false,
|
||||
info: handleAuthenticateErrors(error, req),
|
||||
}
|
||||
}
|
||||
|
||||
if (user && AuthenticationController.captchaRequiredForLogin(req, user)) {
|
||||
Metrics.inc('login_failure_reason', 1, { status: 'captcha_missing' })
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
text: req.i18n.translate('cannot_verify_user_not_robot'),
|
||||
type: 'error',
|
||||
errorReason: 'cannot_verify_user_not_robot',
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
} else if (user) {
|
||||
if (
|
||||
isPasswordReused &&
|
||||
AuthenticationController.getRedirectFromSession(req) == null
|
||||
) {
|
||||
AuthenticationController.setRedirectInSession(
|
||||
req,
|
||||
'/compromised-password'
|
||||
)
|
||||
}
|
||||
|
||||
// async actions
|
||||
return { user, info: undefined }
|
||||
} else {
|
||||
Metrics.inc('login_failure_reason', 1, { status: 'password_invalid' })
|
||||
AuthenticationController._recordFailedLogin()
|
||||
logger.debug({ email }, 'failed log in')
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
type: 'error',
|
||||
key: 'invalid-password-retry-or-reset',
|
||||
status: 401,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
captchaRequiredForLogin(req, user) {
|
||||
switch (AuthenticationController.getAuditInfo(req).captcha) {
|
||||
case 'trusted':
|
||||
case 'disabled':
|
||||
return false
|
||||
case 'solved':
|
||||
return false
|
||||
case 'skipped': {
|
||||
let required = false
|
||||
if (user.lastFailedLogin) {
|
||||
const requireCaptchaUntil =
|
||||
user.lastFailedLogin.getTime() +
|
||||
Settings.elevateAccountSecurityAfterFailedLogin
|
||||
required = requireCaptchaUntil >= Date.now()
|
||||
}
|
||||
Metrics.inc('force_captcha_on_login', 1, {
|
||||
status: required ? 'yes' : 'no',
|
||||
})
|
||||
return required
|
||||
}
|
||||
default:
|
||||
throw new Error('captcha middleware missing in handler chain')
|
||||
}
|
||||
},
|
||||
|
||||
ipMatchCheck(req, user) {
|
||||
if (req.ip !== user.lastLoginIp) {
|
||||
NotificationsBuilder.ipMatcherAffiliation(user._id).create(
|
||||
req.ip,
|
||||
() => {}
|
||||
)
|
||||
}
|
||||
return UserUpdater.updateUser(
|
||||
user._id.toString(),
|
||||
{
|
||||
$set: { lastLoginIp: req.ip },
|
||||
},
|
||||
() => {}
|
||||
)
|
||||
},
|
||||
|
||||
requireLogin() {
|
||||
const doRequest = function (req, res, next) {
|
||||
if (next == null) {
|
||||
next = function () {}
|
||||
}
|
||||
if (!SessionManager.isUserLoggedIn(req.session)) {
|
||||
if (acceptsJson(req)) return send401WithChallenge(res)
|
||||
return AuthenticationController._redirectToLoginOrRegisterPage(req, res)
|
||||
} else {
|
||||
req.user = SessionManager.getSessionUser(req.session)
|
||||
req.logger?.addFields({ userId: req.user._id })
|
||||
return next()
|
||||
}
|
||||
}
|
||||
|
||||
return doRequest
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} scope
|
||||
* @return {import('express').Handler}
|
||||
*/
|
||||
requireOauth(scope) {
|
||||
if (typeof scope !== 'string' || !scope) {
|
||||
throw new Error(
|
||||
"requireOauth() expects a non-empty string as 'scope' parameter"
|
||||
)
|
||||
}
|
||||
|
||||
// require this here because module may not be included in some versions
|
||||
const Oauth2Server = require('../../../../modules/oauth2-server/app/src/Oauth2Server')
|
||||
const middleware = async (req, res, next) => {
|
||||
const request = new Oauth2Server.Request(req)
|
||||
const response = new Oauth2Server.Response(res)
|
||||
try {
|
||||
const token = await Oauth2Server.server.authenticate(
|
||||
request,
|
||||
response,
|
||||
{ scope }
|
||||
)
|
||||
req.oauth = { access_token: token.accessToken }
|
||||
req.oauth_token = token
|
||||
req.oauth_user = token.user
|
||||
next()
|
||||
} catch (err) {
|
||||
if (
|
||||
err.code === 400 &&
|
||||
err.message === 'Invalid request: malformed authorization header'
|
||||
) {
|
||||
err.code = 401
|
||||
}
|
||||
// send all other errors
|
||||
res
|
||||
.status(err.code)
|
||||
.json({ error: err.name, error_description: err.message })
|
||||
}
|
||||
}
|
||||
return expressify(middleware)
|
||||
},
|
||||
|
||||
_globalLoginWhitelist: [],
|
||||
addEndpointToLoginWhitelist(endpoint) {
|
||||
return AuthenticationController._globalLoginWhitelist.push(endpoint)
|
||||
},
|
||||
|
||||
requireGlobalLogin(req, res, next) {
|
||||
if (
|
||||
AuthenticationController._globalLoginWhitelist.includes(
|
||||
req._parsedUrl.pathname
|
||||
)
|
||||
) {
|
||||
return next()
|
||||
}
|
||||
|
||||
if (req.headers.authorization != null) {
|
||||
AuthenticationController.requirePrivateApiAuth()(req, res, next)
|
||||
} else if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
next()
|
||||
} else {
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user trying to access endpoint not in global whitelist'
|
||||
)
|
||||
if (acceptsJson(req)) return send401WithChallenge(res)
|
||||
AuthenticationController.setRedirectInSession(req)
|
||||
res.redirect('/login')
|
||||
}
|
||||
},
|
||||
|
||||
validateAdmin(req, res, next) {
|
||||
const adminDomains = Settings.adminDomains
|
||||
if (
|
||||
!adminDomains ||
|
||||
!(Array.isArray(adminDomains) && adminDomains.length)
|
||||
) {
|
||||
return next()
|
||||
}
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
if (!hasAdminAccess(user)) {
|
||||
return next()
|
||||
}
|
||||
const email = user.email
|
||||
if (email == null) {
|
||||
return next(
|
||||
new OError('[ValidateAdmin] Admin user without email address', {
|
||||
userId: user._id,
|
||||
})
|
||||
)
|
||||
}
|
||||
if (!adminDomains.find(domain => email.endsWith(`@${domain}`))) {
|
||||
return next(
|
||||
new OError('[ValidateAdmin] Admin user with invalid email domain', {
|
||||
email,
|
||||
userId: user._id,
|
||||
})
|
||||
)
|
||||
}
|
||||
return next()
|
||||
},
|
||||
|
||||
checkCredentials,
|
||||
|
||||
requireBasicAuth: function (userDetails) {
|
||||
const userDetailsMap = new Map(Object.entries(userDetails))
|
||||
return function (req, res, next) {
|
||||
const credentials = basicAuth(req)
|
||||
if (
|
||||
!credentials ||
|
||||
!checkCredentials(userDetailsMap, credentials.name, credentials.pass)
|
||||
) {
|
||||
send401WithChallenge(res)
|
||||
Metrics.inc('security.http-auth', 1, { status: 'reject' })
|
||||
} else {
|
||||
Metrics.inc('security.http-auth', 1, { status: 'accept' })
|
||||
next()
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
requirePrivateApiAuth() {
|
||||
return AuthenticationController.requireBasicAuth(Settings.httpAuthUsers)
|
||||
},
|
||||
|
||||
setAuditInfo(req, info) {
|
||||
if (!req.__authAuditInfo) {
|
||||
req.__authAuditInfo = {}
|
||||
}
|
||||
Object.assign(req.__authAuditInfo, info)
|
||||
},
|
||||
|
||||
getAuditInfo(req) {
|
||||
return req.__authAuditInfo || {}
|
||||
},
|
||||
|
||||
setRedirectInSession(req, value) {
|
||||
if (value == null) {
|
||||
value =
|
||||
Object.keys(req.query).length > 0
|
||||
? `${req.path}?${querystring.stringify(req.query)}`
|
||||
: `${req.path}`
|
||||
}
|
||||
if (
|
||||
req.session != null &&
|
||||
!/^\/(socket.io|js|stylesheets|img)\/.*$/.test(value) &&
|
||||
!/^.*\.(png|jpeg|svg)$/.test(value)
|
||||
) {
|
||||
const safePath = UrlHelper.getSafeRedirectPath(value)
|
||||
return (req.session.postLoginRedirect = safePath)
|
||||
}
|
||||
},
|
||||
|
||||
_redirectToLoginOrRegisterPage(req, res) {
|
||||
if (
|
||||
req.query.zipUrl != null ||
|
||||
req.session.sharedProjectData ||
|
||||
req.path === '/user/subscription/new'
|
||||
) {
|
||||
AuthenticationController._redirectToRegisterPage(req, res)
|
||||
} else {
|
||||
AuthenticationController._redirectToLoginPage(req, res)
|
||||
}
|
||||
},
|
||||
|
||||
_redirectToLoginPage(req, res) {
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user not logged in so redirecting to login page'
|
||||
)
|
||||
AuthenticationController.setRedirectInSession(req)
|
||||
const url = `/login?${querystring.stringify(req.query)}`
|
||||
res.redirect(url)
|
||||
Metrics.inc('security.login-redirect')
|
||||
},
|
||||
|
||||
_redirectToReconfirmPage(req, res, user) {
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user needs to reconfirm so redirecting to reconfirm page'
|
||||
)
|
||||
req.session.reconfirm_email = user != null ? user.email : undefined
|
||||
const redir = '/user/reconfirm'
|
||||
AsyncFormHelper.redirect(req, res, redir)
|
||||
},
|
||||
|
||||
_redirectToRegisterPage(req, res) {
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user not logged in so redirecting to register page'
|
||||
)
|
||||
AuthenticationController.setRedirectInSession(req)
|
||||
const url = `/register?${querystring.stringify(req.query)}`
|
||||
res.redirect(url)
|
||||
Metrics.inc('security.login-redirect')
|
||||
},
|
||||
|
||||
_recordSuccessfulLogin(userId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
UserUpdater.updateUser(
|
||||
userId.toString(),
|
||||
{
|
||||
$set: { lastLoggedIn: new Date() },
|
||||
$inc: { loginCount: 1 },
|
||||
},
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
callback(error)
|
||||
}
|
||||
Metrics.inc('user.login.success')
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_recordFailedLogin(callback) {
|
||||
Metrics.inc('user.login.failed')
|
||||
if (callback) callback()
|
||||
},
|
||||
|
||||
getRedirectFromSession(req) {
|
||||
let safePath
|
||||
const value = _.get(req, ['session', 'postLoginRedirect'])
|
||||
if (value) {
|
||||
safePath = UrlHelper.getSafeRedirectPath(value)
|
||||
}
|
||||
return safePath || null
|
||||
},
|
||||
|
||||
_clearRedirectFromSession(req) {
|
||||
if (req.session != null) {
|
||||
delete req.session.postLoginRedirect
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
function _afterLoginSessionSetup(req, user, callback) {
|
||||
req.login(user, { keepSessionInfo: true }, function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error from req.login', {
|
||||
user_id: user._id,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
delete req.session.__tmp
|
||||
delete req.session.csrfSecret
|
||||
req.session.save(function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error saving regenerated session after login', {
|
||||
user_id: user._id,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
UserSessionsManager.trackSession(user, req.sessionID, function () {})
|
||||
if (!req.deviceHistory) {
|
||||
// Captcha disabled or SSO-based login.
|
||||
return callback()
|
||||
}
|
||||
req.deviceHistory.add(user.email)
|
||||
req.deviceHistory
|
||||
.serialize(req.res)
|
||||
.catch(err => {
|
||||
logger.err({ err }, 'cannot serialize deviceHistory')
|
||||
})
|
||||
.finally(() => callback())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const _afterLoginSessionSetupAsync = promisify(_afterLoginSessionSetup)
|
||||
|
||||
function _loginAsyncHandlers(req, user, anonymousAnalyticsId, isNewUser) {
|
||||
UserHandler.populateTeamInvites(user, err => {
|
||||
if (err != null) {
|
||||
logger.warn({ err }, 'error setting up login data')
|
||||
}
|
||||
})
|
||||
LoginRateLimiter.recordSuccessfulLogin(user.email, () => {})
|
||||
AuthenticationController._recordSuccessfulLogin(user._id, () => {})
|
||||
AuthenticationController.ipMatchCheck(req, user)
|
||||
Analytics.recordEventForUserInBackground(user._id, 'user-logged-in', {
|
||||
source: req.session.saml
|
||||
? 'saml'
|
||||
: req.user_info?.auth_provider || 'email-password',
|
||||
})
|
||||
Analytics.identifyUser(user._id, anonymousAnalyticsId, isNewUser)
|
||||
|
||||
logger.debug(
|
||||
{ email: user.email, userId: user._id.toString() },
|
||||
'successful log in'
|
||||
)
|
||||
|
||||
req.session.justLoggedIn = true
|
||||
// capture the request ip for use when creating the session
|
||||
return (user._login_req_ip = req.ip)
|
||||
}
|
||||
|
||||
AuthenticationController.promises = {
|
||||
finishLogin: AuthenticationController._finishLoginAsync,
|
||||
}
|
||||
|
||||
module.exports = AuthenticationController
|
||||
@@ -0,0 +1,58 @@
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Errors = require('../Errors/Errors')
|
||||
|
||||
class InvalidEmailError extends Errors.BackwardCompatibleError {}
|
||||
class InvalidPasswordError extends Errors.BackwardCompatibleError {}
|
||||
class ParallelLoginError extends Errors.BackwardCompatibleError {}
|
||||
class PasswordMustBeDifferentError extends Errors.BackwardCompatibleError {}
|
||||
class PasswordReusedError extends Errors.BackwardCompatibleError {}
|
||||
|
||||
function handleAuthenticateErrors(error, req) {
|
||||
if (error.message === 'password is too long') {
|
||||
Metrics.inc('login_failure_reason', 1, {
|
||||
status: 'password_is_too_long',
|
||||
})
|
||||
return {
|
||||
status: 422,
|
||||
type: 'error',
|
||||
key: 'password-too-long',
|
||||
text: req.i18n.translate('password_too_long_please_reset'),
|
||||
}
|
||||
}
|
||||
if (error instanceof ParallelLoginError) {
|
||||
Metrics.inc('login_failure_reason', 1, { status: 'parallel_login' })
|
||||
return { status: 429 }
|
||||
}
|
||||
if (error instanceof PasswordReusedError) {
|
||||
Metrics.inc('login_failure_reason', 1, {
|
||||
status: 'password_compromised',
|
||||
})
|
||||
const text = `${req.i18n
|
||||
.translate('password_compromised_try_again_or_use_known_device_or_reset')
|
||||
.replace('<0>', '')
|
||||
.replace('</0>', ' (https://haveibeenpwned.com/passwords)')
|
||||
.replace('<1>', '')
|
||||
.replace('</1>', ` (${Settings.siteUrl}/user/password/reset)`)}.`
|
||||
return {
|
||||
status: 400,
|
||||
type: 'error',
|
||||
key: 'password-compromised',
|
||||
text,
|
||||
}
|
||||
}
|
||||
Metrics.inc('login_failure_reason', 1, {
|
||||
status: error instanceof OError ? error.name : 'error',
|
||||
})
|
||||
throw error
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
InvalidEmailError,
|
||||
InvalidPasswordError,
|
||||
ParallelLoginError,
|
||||
PasswordMustBeDifferentError,
|
||||
PasswordReusedError,
|
||||
handleAuthenticateErrors,
|
||||
}
|
||||
@@ -0,0 +1,477 @@
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { User } = require('../../models/User')
|
||||
const { db, ObjectId } = require('../../infrastructure/mongodb')
|
||||
const bcrypt = require('bcrypt')
|
||||
const EmailHelper = require('../Helpers/EmailHelper')
|
||||
const {
|
||||
InvalidEmailError,
|
||||
InvalidPasswordError,
|
||||
ParallelLoginError,
|
||||
PasswordMustBeDifferentError,
|
||||
PasswordReusedError,
|
||||
} = require('./AuthenticationErrors')
|
||||
const {
|
||||
callbackify,
|
||||
callbackifyMultiResult,
|
||||
} = require('@overleaf/promise-utils')
|
||||
const HaveIBeenPwned = require('./HaveIBeenPwned')
|
||||
const UserAuditLogHandler = require('../User/UserAuditLogHandler')
|
||||
const logger = require('@overleaf/logger')
|
||||
const DiffHelper = require('../Helpers/DiffHelper')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
|
||||
const BCRYPT_ROUNDS = Settings.security.bcryptRounds || 12
|
||||
const BCRYPT_MINOR_VERSION = Settings.security.bcryptMinorVersion || 'a'
|
||||
const MAX_SIMILARITY = 0.7
|
||||
|
||||
function _exceedsMaximumLengthRatio(password, maxSimilarity, value) {
|
||||
const passwordLength = password.length
|
||||
const lengthBoundSimilarity = (maxSimilarity / 2) * passwordLength
|
||||
const valueLength = value.length
|
||||
return (
|
||||
passwordLength >= 10 * valueLength && valueLength < lengthBoundSimilarity
|
||||
)
|
||||
}
|
||||
|
||||
const _checkWriteResult = function (result) {
|
||||
// for MongoDB
|
||||
return !!(result && result.modifiedCount === 1)
|
||||
}
|
||||
|
||||
function _validatePasswordNotTooLong(password) {
|
||||
// bcrypt has a hard limit of 72 characters.
|
||||
if (password.length > 72) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password is too long',
|
||||
info: { code: 'too_long' },
|
||||
})
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function _metricsForSuccessfulPasswordMatch(password) {
|
||||
const validationResult = AuthenticationManager.validatePassword(password)
|
||||
const status =
|
||||
validationResult === null ? 'success' : validationResult?.info?.code
|
||||
Metrics.inc('check-password', { status })
|
||||
return null
|
||||
}
|
||||
|
||||
const AuthenticationManager = {
|
||||
async _checkUserPassword(query, password) {
|
||||
// Using Mongoose for legacy reasons here. The returned User instance
|
||||
// gets serialized into the session and there may be subtle differences
|
||||
// between the user returned by Mongoose vs mongodb (such as default values)
|
||||
const user = await User.findOne(query).exec()
|
||||
|
||||
if (!user || !user.hashedPassword) {
|
||||
return { user: null, match: null }
|
||||
}
|
||||
|
||||
let rounds = 0
|
||||
try {
|
||||
rounds = bcrypt.getRounds(user.hashedPassword)
|
||||
} catch (err) {
|
||||
let prefix, suffix, length
|
||||
if (typeof user.hashedPassword === 'string') {
|
||||
length = user.hashedPassword.length
|
||||
if (user.hashedPassword.length > 50) {
|
||||
// A full bcrypt hash is 60 characters long.
|
||||
prefix = user.hashedPassword.slice(0, '$2a$12$x'.length)
|
||||
suffix = user.hashedPassword.slice(-4)
|
||||
} else if (user.hashedPassword.length > 20) {
|
||||
prefix = user.hashedPassword.slice(0, 4)
|
||||
suffix = user.hashedPassword.slice(-4)
|
||||
} else {
|
||||
prefix = user.hashedPassword.slice(0, 4)
|
||||
}
|
||||
}
|
||||
logger.warn(
|
||||
{
|
||||
err,
|
||||
userId: user._id,
|
||||
hashedPassword: {
|
||||
type: typeof user.hashedPassword,
|
||||
length,
|
||||
prefix,
|
||||
suffix,
|
||||
},
|
||||
},
|
||||
'unexpected user.hashedPassword value'
|
||||
)
|
||||
}
|
||||
Metrics.inc('bcrypt', 1, {
|
||||
method: 'compare',
|
||||
path: rounds,
|
||||
})
|
||||
|
||||
const match = await bcrypt.compare(password, user.hashedPassword)
|
||||
|
||||
if (match) {
|
||||
_metricsForSuccessfulPasswordMatch(password)
|
||||
}
|
||||
|
||||
return { user, match }
|
||||
},
|
||||
|
||||
async authenticate(query, password, auditLog, { enforceHIBPCheck = true }) {
|
||||
const { user, match } = await AuthenticationManager._checkUserPassword(
|
||||
query,
|
||||
password
|
||||
)
|
||||
|
||||
if (!user) {
|
||||
return { user: null }
|
||||
}
|
||||
|
||||
const update = { $inc: { loginEpoch: 1 } }
|
||||
if (!match) {
|
||||
update.$set = { lastFailedLogin: new Date() }
|
||||
}
|
||||
|
||||
const result = await User.updateOne(
|
||||
{ _id: user._id, loginEpoch: user.loginEpoch },
|
||||
update,
|
||||
{}
|
||||
).exec()
|
||||
|
||||
if (result.modifiedCount !== 1) {
|
||||
throw new ParallelLoginError()
|
||||
}
|
||||
|
||||
if (!match) {
|
||||
if (!auditLog) {
|
||||
return { user: null }
|
||||
} else {
|
||||
try {
|
||||
await UserAuditLogHandler.promises.addEntry(
|
||||
user._id,
|
||||
'failed-password-match',
|
||||
user._id,
|
||||
auditLog.ipAddress,
|
||||
auditLog.info
|
||||
)
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
{ userId: user._id, err, info: auditLog.info },
|
||||
'Error while adding AuditLog entry for failed-password-match'
|
||||
)
|
||||
}
|
||||
return { user: null }
|
||||
}
|
||||
}
|
||||
await AuthenticationManager.checkRounds(user, user.hashedPassword, password)
|
||||
|
||||
let isPasswordReused
|
||||
try {
|
||||
isPasswordReused =
|
||||
await HaveIBeenPwned.promises.checkPasswordForReuse(password)
|
||||
} catch (err) {
|
||||
logger.err({ err }, 'cannot check password for re-use')
|
||||
}
|
||||
|
||||
if (isPasswordReused && enforceHIBPCheck) {
|
||||
throw new PasswordReusedError()
|
||||
}
|
||||
|
||||
return { user, isPasswordReused }
|
||||
},
|
||||
|
||||
validateEmail(email) {
|
||||
const parsed = EmailHelper.parseEmail(email)
|
||||
if (!parsed) {
|
||||
return new InvalidEmailError({ message: 'email not valid' })
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
// validates a password based on a similar set of rules previously used by `passfield.js` on the frontend
|
||||
// note that `passfield.js` enforced more rules than this, but these are the most commonly set.
|
||||
// returns null on success, or an error object.
|
||||
validatePassword(password, email) {
|
||||
if (password == null) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password not set',
|
||||
info: { code: 'not_set' },
|
||||
})
|
||||
}
|
||||
|
||||
Metrics.inc('try-validate-password')
|
||||
|
||||
let allowAnyChars, min, max
|
||||
if (Settings.passwordStrengthOptions) {
|
||||
allowAnyChars = Settings.passwordStrengthOptions.allowAnyChars === true
|
||||
if (Settings.passwordStrengthOptions.length) {
|
||||
min = Settings.passwordStrengthOptions.length.min
|
||||
max = Settings.passwordStrengthOptions.length.max
|
||||
}
|
||||
}
|
||||
allowAnyChars = !!allowAnyChars
|
||||
min = min || 8
|
||||
max = max || 72
|
||||
|
||||
// we don't support passwords > 72 characters in length, because bcrypt truncates them
|
||||
if (max > 72) {
|
||||
max = 72
|
||||
}
|
||||
|
||||
if (password.length < min) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password is too short',
|
||||
info: { code: 'too_short' },
|
||||
})
|
||||
}
|
||||
if (password.length > max) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password is too long',
|
||||
info: { code: 'too_long' },
|
||||
})
|
||||
}
|
||||
const passwordLengthError = _validatePasswordNotTooLong(password)
|
||||
if (passwordLengthError) {
|
||||
return passwordLengthError
|
||||
}
|
||||
if (
|
||||
!allowAnyChars &&
|
||||
!AuthenticationManager._passwordCharactersAreValid(password)
|
||||
) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password contains an invalid character',
|
||||
info: { code: 'invalid_character' },
|
||||
})
|
||||
}
|
||||
if (typeof email === 'string' && email !== '') {
|
||||
const startOfEmail = email.split('@')[0]
|
||||
if (
|
||||
password.includes(email) ||
|
||||
password.includes(startOfEmail) ||
|
||||
email.includes(password)
|
||||
) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password contains part of email address',
|
||||
info: { code: 'contains_email' },
|
||||
})
|
||||
}
|
||||
try {
|
||||
const passwordTooSimilarError =
|
||||
AuthenticationManager._validatePasswordNotTooSimilar(password, email)
|
||||
if (passwordTooSimilarError) {
|
||||
Metrics.inc('password-too-similar-to-email')
|
||||
return new InvalidPasswordError({
|
||||
message: 'password is too similar to email address',
|
||||
info: { code: 'too_similar' },
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ error },
|
||||
'error while checking password similarity to email'
|
||||
)
|
||||
}
|
||||
// TODO: remove this check once the password-too-similar checks are active?
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
async setUserPassword(user, password) {
|
||||
return await AuthenticationManager.setUserPasswordInV2(user, password)
|
||||
},
|
||||
|
||||
async checkRounds(user, hashedPassword, password) {
|
||||
// Temporarily disable this function, TODO: re-enable this
|
||||
if (Settings.security.disableBcryptRoundsUpgrades) {
|
||||
Metrics.inc('bcrypt_check_rounds', 1, { status: 'disabled' })
|
||||
return
|
||||
}
|
||||
// check current number of rounds and rehash if necessary
|
||||
const currentRounds = bcrypt.getRounds(hashedPassword)
|
||||
if (currentRounds < BCRYPT_ROUNDS) {
|
||||
Metrics.inc('bcrypt_check_rounds', 1, { status: 'upgrade' })
|
||||
return await AuthenticationManager._setUserPasswordInMongo(user, password)
|
||||
} else {
|
||||
Metrics.inc('bcrypt_check_rounds', 1, { status: 'success' })
|
||||
}
|
||||
},
|
||||
|
||||
async hashPassword(password) {
|
||||
// Double-check the size to avoid truncating in bcrypt.
|
||||
const error = _validatePasswordNotTooLong(password)
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
|
||||
const salt = await bcrypt.genSalt(BCRYPT_ROUNDS, BCRYPT_MINOR_VERSION)
|
||||
|
||||
Metrics.inc('bcrypt', 1, {
|
||||
method: 'hash',
|
||||
path: BCRYPT_ROUNDS,
|
||||
})
|
||||
return await bcrypt.hash(password, salt)
|
||||
},
|
||||
|
||||
async setUserPasswordInV2(user, password) {
|
||||
if (!user || !user.email || !user._id) {
|
||||
throw new Error('invalid user object')
|
||||
}
|
||||
const validationError = this.validatePassword(password, user.email)
|
||||
if (validationError) {
|
||||
throw validationError
|
||||
}
|
||||
// check if we can log in with this password. In which case we should reject it,
|
||||
// because it is the same as the existing password.
|
||||
const { match } = await AuthenticationManager._checkUserPassword(
|
||||
{ _id: user._id },
|
||||
password
|
||||
)
|
||||
|
||||
if (match) {
|
||||
throw new PasswordMustBeDifferentError()
|
||||
}
|
||||
|
||||
let isPasswordReused
|
||||
try {
|
||||
isPasswordReused =
|
||||
await HaveIBeenPwned.promises.checkPasswordForReuse(password)
|
||||
} catch (error) {
|
||||
logger.err({ error }, 'cannot check password for re-use')
|
||||
}
|
||||
|
||||
if (isPasswordReused) {
|
||||
throw new PasswordReusedError()
|
||||
}
|
||||
|
||||
// password is strong enough or the validation with the service did not happen
|
||||
return await this._setUserPasswordInMongo(user, password)
|
||||
},
|
||||
|
||||
async _setUserPasswordInMongo(user, password) {
|
||||
const hash = await this.hashPassword(password)
|
||||
const result = await db.users.updateOne(
|
||||
{ _id: new ObjectId(user._id.toString()) },
|
||||
{
|
||||
$set: {
|
||||
hashedPassword: hash,
|
||||
},
|
||||
$unset: {
|
||||
password: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return _checkWriteResult(result)
|
||||
},
|
||||
|
||||
_passwordCharactersAreValid(password) {
|
||||
let digits, letters, lettersUp, symbols
|
||||
if (
|
||||
Settings.passwordStrengthOptions &&
|
||||
Settings.passwordStrengthOptions.chars
|
||||
) {
|
||||
digits = Settings.passwordStrengthOptions.chars.digits
|
||||
letters = Settings.passwordStrengthOptions.chars.letters
|
||||
lettersUp = Settings.passwordStrengthOptions.chars.letters_up
|
||||
symbols = Settings.passwordStrengthOptions.chars.symbols
|
||||
}
|
||||
digits = digits || '1234567890'
|
||||
letters = letters || 'abcdefghijklmnopqrstuvwxyz'
|
||||
lettersUp = lettersUp || 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
symbols = symbols || '@#$%^&*()-_=+[]{};:<>/?!£€.,'
|
||||
|
||||
for (let charIndex = 0; charIndex <= password.length - 1; charIndex++) {
|
||||
if (
|
||||
digits.indexOf(password[charIndex]) === -1 &&
|
||||
letters.indexOf(password[charIndex]) === -1 &&
|
||||
lettersUp.indexOf(password[charIndex]) === -1 &&
|
||||
symbols.indexOf(password[charIndex]) === -1
|
||||
) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if the password is similar to (parts of) the email address.
|
||||
* For now, this merely sends a metric when the password and
|
||||
* email address are deemed to be too similar to each other.
|
||||
* Later we will reject passwords that fail this check.
|
||||
*
|
||||
* This logic was borrowed from the django project:
|
||||
* https://github.com/django/django/blob/fa3afc5d86f1f040922cca2029d6a34301597a70/django/contrib/auth/password_validation.py#L159-L214
|
||||
*/
|
||||
_validatePasswordNotTooSimilar(password, email) {
|
||||
password = password.toLowerCase()
|
||||
email = email.toLowerCase()
|
||||
const stringsToCheck = [email]
|
||||
.concat(email.split(/\W+/))
|
||||
.concat(email.split(/@/))
|
||||
for (const emailPart of stringsToCheck) {
|
||||
if (!_exceedsMaximumLengthRatio(password, MAX_SIMILARITY, emailPart)) {
|
||||
const similarity = DiffHelper.stringSimilarity(password, emailPart)
|
||||
if (similarity > MAX_SIMILARITY) {
|
||||
return new Error('password is too similar to email')
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
getMessageForInvalidPasswordError(error, req) {
|
||||
const errorCode = error?.info?.code
|
||||
const message = {
|
||||
type: 'error',
|
||||
}
|
||||
switch (errorCode) {
|
||||
case 'not_set':
|
||||
message.key = 'password-not-set'
|
||||
message.text = req.i18n.translate('invalid_password_not_set')
|
||||
break
|
||||
case 'invalid_character':
|
||||
message.key = 'password-invalid-character'
|
||||
message.text = req.i18n.translate('invalid_password_invalid_character')
|
||||
break
|
||||
case 'contains_email':
|
||||
message.key = 'password-contains-email'
|
||||
message.text = req.i18n.translate('invalid_password_contains_email')
|
||||
break
|
||||
case 'too_similar':
|
||||
message.key = 'password-too-similar'
|
||||
message.text = req.i18n.translate('invalid_password_too_similar')
|
||||
break
|
||||
case 'too_short':
|
||||
message.key = 'password-too-short'
|
||||
message.text = req.i18n.translate('invalid_password_too_short', {
|
||||
minLength: Settings.passwordStrengthOptions?.length?.min || 8,
|
||||
})
|
||||
break
|
||||
case 'too_long':
|
||||
message.key = 'password-too-long'
|
||||
message.text = req.i18n.translate('invalid_password_too_long', {
|
||||
maxLength: Settings.passwordStrengthOptions?.length?.max || 72,
|
||||
})
|
||||
break
|
||||
default:
|
||||
logger.error({ err: error }, 'Unknown password validation error code')
|
||||
message.text = req.i18n.translate('invalid_password')
|
||||
break
|
||||
}
|
||||
return message
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
_validatePasswordNotTooSimilar:
|
||||
AuthenticationManager._validatePasswordNotTooSimilar, // Private function exported for tests
|
||||
validateEmail: AuthenticationManager.validateEmail,
|
||||
validatePassword: AuthenticationManager.validatePassword,
|
||||
getMessageForInvalidPasswordError:
|
||||
AuthenticationManager.getMessageForInvalidPasswordError,
|
||||
authenticate: callbackifyMultiResult(AuthenticationManager.authenticate, [
|
||||
'user',
|
||||
'isPasswordReused',
|
||||
]),
|
||||
setUserPassword: callbackify(AuthenticationManager.setUserPassword),
|
||||
checkRounds: callbackify(AuthenticationManager.checkRounds),
|
||||
hashPassword: callbackify(AuthenticationManager.hashPassword),
|
||||
setUserPasswordInV2: callbackify(AuthenticationManager.setUserPasswordInV2),
|
||||
promises: AuthenticationManager,
|
||||
}
|
||||
127
services/web/app/src/Features/Authentication/HaveIBeenPwned.js
Normal file
127
services/web/app/src/Features/Authentication/HaveIBeenPwned.js
Normal file
@@ -0,0 +1,127 @@
|
||||
/*
|
||||
This module is operating on raw user passwords. Be very defensive.
|
||||
Pay special attention when passing the password or even a hash/prefix around.
|
||||
We need to ensure that no parts of it get logged or returned on either the
|
||||
happy path or via an error (message or attributes).
|
||||
*/
|
||||
|
||||
const { callbackify } = require('util')
|
||||
const { fetchString } = require('@overleaf/fetch-utils')
|
||||
const crypto = require('crypto')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const logger = require('@overleaf/logger')
|
||||
|
||||
const HEX_CHARS_UPPER = '1234567890ABCDEF'
|
||||
const API_ERROR = new Error('cannot contact HaveIBeenPwned api')
|
||||
const INVALID_PREFIX = new Error(
|
||||
'This is not a valid hex prefix. Rejecting to pass it to HaveIBeenPwned'
|
||||
)
|
||||
const INVALID_RESPONSE = new Error('cannot consume HaveIBeenPwned api response')
|
||||
const INVALID_SCORE = new Error(
|
||||
'non integer score returned by HaveIBeenPwned api'
|
||||
)
|
||||
const CODED_ERROR_MESSAGES = [
|
||||
API_ERROR,
|
||||
INVALID_PREFIX,
|
||||
INVALID_RESPONSE,
|
||||
INVALID_SCORE,
|
||||
].map(err => err.message)
|
||||
|
||||
async function getScoresForPrefix(prefix) {
|
||||
if (
|
||||
typeof prefix !== 'string' ||
|
||||
prefix.length !== 5 ||
|
||||
Array.from(prefix).some(c => !HEX_CHARS_UPPER.includes(c))
|
||||
) {
|
||||
// Make sure we do not pass arbitrary objects to the api.
|
||||
throw INVALID_PREFIX
|
||||
}
|
||||
try {
|
||||
return await fetchString(
|
||||
`${Settings.apis.haveIBeenPwned.url}/range/${prefix}`,
|
||||
{
|
||||
headers: {
|
||||
'User-Agent': 'www.overleaf.com',
|
||||
// Docs: https://haveibeenpwned.com/API/v3#PwnedPasswordsPadding
|
||||
'Add-Padding': true,
|
||||
},
|
||||
signal: AbortSignal.timeout(Settings.apis.haveIBeenPwned.timeout),
|
||||
}
|
||||
)
|
||||
} catch (_errorWithPotentialReferenceToPrefix) {
|
||||
// NOTE: Do not leak request details by passing the original error up.
|
||||
throw API_ERROR
|
||||
}
|
||||
}
|
||||
|
||||
async function isPasswordReused(password) {
|
||||
const sha1 = crypto
|
||||
.createHash('sha1')
|
||||
.update(password)
|
||||
.digest('hex')
|
||||
.toUpperCase()
|
||||
const prefix = sha1.slice(0, 5)
|
||||
const body = await getScoresForPrefix(prefix)
|
||||
|
||||
let score = 0
|
||||
try {
|
||||
for (const line of body.split('\r\n')) {
|
||||
const [candidate, scoreRaw] = line.split(':')
|
||||
if (prefix + candidate === sha1) {
|
||||
score = parseInt(scoreRaw)
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch (_errorWithPotentialReferenceToHash) {
|
||||
// NOTE: Do not leak password details by logging the original error.
|
||||
throw INVALID_RESPONSE
|
||||
}
|
||||
|
||||
if (Number.isNaN(score)) {
|
||||
// NOTE: Do not leak password details by logging the score.
|
||||
throw INVALID_SCORE
|
||||
}
|
||||
return score > 0
|
||||
}
|
||||
|
||||
async function checkPasswordForReuse(password) {
|
||||
if (!Settings.apis.haveIBeenPwned.enabled) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const isReused = await isPasswordReused(password)
|
||||
|
||||
Metrics.inc('password_re_use', {
|
||||
status: isReused ? 're-used' : 'unique',
|
||||
})
|
||||
|
||||
return isReused
|
||||
} catch (err) {
|
||||
let error = err
|
||||
// Make sure we do not leak any password details.
|
||||
if (!CODED_ERROR_MESSAGES.includes(err.message)) {
|
||||
error = new Error('hidden message')
|
||||
}
|
||||
error = new Error(error.message)
|
||||
|
||||
Metrics.inc('password_re_use', { status: 'failure' })
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
function checkPasswordForReuseInBackground(password) {
|
||||
checkPasswordForReuse(password).catch(error => {
|
||||
logger.err({ error }, 'cannot check password for re-use')
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkPasswordForReuse: callbackify(checkPasswordForReuse),
|
||||
checkPasswordForReuseInBackground,
|
||||
promises: {
|
||||
checkPasswordForReuse,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
const _ = require('lodash')
|
||||
|
||||
const SessionManager = {
|
||||
getSessionUser(session) {
|
||||
const sessionUser = _.get(session, ['user'])
|
||||
const sessionPassportUser = _.get(session, ['passport', 'user'])
|
||||
return sessionUser || sessionPassportUser || null
|
||||
},
|
||||
|
||||
setInSessionUser(session, props) {
|
||||
const sessionUser = SessionManager.getSessionUser(session)
|
||||
if (!sessionUser) {
|
||||
return
|
||||
}
|
||||
for (const key in props) {
|
||||
const value = props[key]
|
||||
sessionUser[key] = value
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
isUserLoggedIn(session) {
|
||||
const userId = SessionManager.getLoggedInUserId(session)
|
||||
return ![null, undefined, false].includes(userId)
|
||||
},
|
||||
|
||||
getLoggedInUserId(session) {
|
||||
const user = SessionManager.getSessionUser(session)
|
||||
if (user) {
|
||||
return user._id
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
},
|
||||
|
||||
getLoggedInUserV1Id(session) {
|
||||
const user = SessionManager.getSessionUser(session)
|
||||
if (user != null && user.v1_id != null) {
|
||||
return user.v1_id
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = SessionManager
|
||||
@@ -0,0 +1,315 @@
|
||||
const { callbackify } = require('util')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter')
|
||||
const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const { User } = require('../../models/User')
|
||||
const PrivilegeLevels = require('./PrivilegeLevels')
|
||||
const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
|
||||
const PublicAccessLevels = require('./PublicAccessLevels')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||
|
||||
function isRestrictedUser(
|
||||
userId,
|
||||
privilegeLevel,
|
||||
isTokenMember,
|
||||
isInvitedMember
|
||||
) {
|
||||
if (privilegeLevel === PrivilegeLevels.NONE) {
|
||||
return true
|
||||
}
|
||||
return (
|
||||
privilegeLevel === PrivilegeLevels.READ_ONLY &&
|
||||
(isTokenMember || !userId) &&
|
||||
!isInvitedMember
|
||||
)
|
||||
}
|
||||
|
||||
async function isRestrictedUserForProject(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
const isInvitedMember =
|
||||
await CollaboratorsGetter.promises.isUserInvitedMemberOfProject(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
return isRestrictedUser(
|
||||
userId,
|
||||
privilegeLevel,
|
||||
isTokenMember,
|
||||
isInvitedMember
|
||||
)
|
||||
}
|
||||
|
||||
async function getPublicAccessLevel(projectId) {
|
||||
if (!ObjectId.isValid(projectId)) {
|
||||
throw new Error('invalid project id')
|
||||
}
|
||||
|
||||
// Note, the Project property in the DB is `publicAccesLevel`, without the second `s`
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
publicAccesLevel: 1,
|
||||
})
|
||||
if (!project) {
|
||||
throw new Errors.NotFoundError(`no project found with id ${projectId}`)
|
||||
}
|
||||
return project.publicAccesLevel
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the privilege level that the user has for the project.
|
||||
*
|
||||
* @param userId - The id of the user that wants to access the project.
|
||||
* @param projectId - The id of the project to be accessed.
|
||||
* @param {string} token
|
||||
* @param {Object} opts
|
||||
* @param {boolean} opts.ignoreSiteAdmin - Do not consider whether the user is
|
||||
* a site admin.
|
||||
* @param {boolean} opts.ignorePublicAccess - Do not consider the project is
|
||||
* publicly accessible.
|
||||
*
|
||||
* @returns {string|boolean} The privilege level. One of "owner",
|
||||
* "readAndWrite", "readOnly" or false.
|
||||
*/
|
||||
async function getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token,
|
||||
opts = {}
|
||||
) {
|
||||
if (userId) {
|
||||
return getPrivilegeLevelForProjectWithUser(userId, projectId, opts)
|
||||
} else {
|
||||
return getPrivilegeLevelForProjectWithoutUser(projectId, token, opts)
|
||||
}
|
||||
}
|
||||
|
||||
// User is present, get their privilege level from database
|
||||
async function getPrivilegeLevelForProjectWithUser(
|
||||
userId,
|
||||
projectId,
|
||||
opts = {}
|
||||
) {
|
||||
if (!opts.ignoreSiteAdmin) {
|
||||
if (await isUserSiteAdmin(userId)) {
|
||||
return PrivilegeLevels.OWNER
|
||||
}
|
||||
}
|
||||
|
||||
const privilegeLevel =
|
||||
await CollaboratorsGetter.promises.getMemberIdPrivilegeLevel(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
if (privilegeLevel && privilegeLevel !== PrivilegeLevels.NONE) {
|
||||
// The user has direct access
|
||||
return privilegeLevel
|
||||
}
|
||||
|
||||
if (!opts.ignorePublicAccess) {
|
||||
// Legacy public-access system
|
||||
// User is present (not anonymous), but does not have direct access
|
||||
const publicAccessLevel = await getPublicAccessLevel(projectId)
|
||||
if (publicAccessLevel === PublicAccessLevels.READ_ONLY) {
|
||||
return PrivilegeLevels.READ_ONLY
|
||||
}
|
||||
if (publicAccessLevel === PublicAccessLevels.READ_AND_WRITE) {
|
||||
return PrivilegeLevels.READ_AND_WRITE
|
||||
}
|
||||
}
|
||||
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
|
||||
// User is Anonymous, Try Token-based access
|
||||
async function getPrivilegeLevelForProjectWithoutUser(
|
||||
projectId,
|
||||
token,
|
||||
opts = {}
|
||||
) {
|
||||
const publicAccessLevel = await getPublicAccessLevel(projectId)
|
||||
if (!opts.ignorePublicAccess) {
|
||||
if (publicAccessLevel === PublicAccessLevels.READ_ONLY) {
|
||||
// Legacy public read-only access for anonymous user
|
||||
return PrivilegeLevels.READ_ONLY
|
||||
}
|
||||
if (publicAccessLevel === PublicAccessLevels.READ_AND_WRITE) {
|
||||
// Legacy public read-write access for anonymous user
|
||||
return PrivilegeLevels.READ_AND_WRITE
|
||||
}
|
||||
}
|
||||
if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||
return getPrivilegeLevelForProjectWithToken(projectId, token)
|
||||
}
|
||||
|
||||
// Deny anonymous user access
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
|
||||
async function getPrivilegeLevelForProjectWithToken(projectId, token) {
|
||||
// Anonymous users can have read-only access to token-based projects,
|
||||
// while read-write access must be logged in,
|
||||
// unless the `enableAnonymousReadAndWriteSharing` setting is enabled
|
||||
const { isValidReadAndWrite, isValidReadOnly } =
|
||||
await TokenAccessHandler.promises.validateTokenForAnonymousAccess(
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (isValidReadOnly) {
|
||||
// Grant anonymous user read-only access
|
||||
return PrivilegeLevels.READ_ONLY
|
||||
}
|
||||
if (isValidReadAndWrite) {
|
||||
// Grant anonymous user read-and-write access
|
||||
return PrivilegeLevels.READ_AND_WRITE
|
||||
}
|
||||
// Deny anonymous access
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
|
||||
async function canUserReadProject(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return [
|
||||
PrivilegeLevels.OWNER,
|
||||
PrivilegeLevels.READ_AND_WRITE,
|
||||
PrivilegeLevels.READ_ONLY,
|
||||
PrivilegeLevels.REVIEW,
|
||||
].includes(privilegeLevel)
|
||||
}
|
||||
|
||||
async function canUserWriteProjectContent(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return [PrivilegeLevels.OWNER, PrivilegeLevels.READ_AND_WRITE].includes(
|
||||
privilegeLevel
|
||||
)
|
||||
}
|
||||
|
||||
async function canUserWriteOrReviewProjectContent(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return (
|
||||
privilegeLevel === PrivilegeLevels.OWNER ||
|
||||
privilegeLevel === PrivilegeLevels.READ_AND_WRITE ||
|
||||
privilegeLevel === PrivilegeLevels.REVIEW
|
||||
)
|
||||
}
|
||||
|
||||
async function canUserWriteProjectSettings(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token,
|
||||
{ ignorePublicAccess: true }
|
||||
)
|
||||
return [PrivilegeLevels.OWNER, PrivilegeLevels.READ_AND_WRITE].includes(
|
||||
privilegeLevel
|
||||
)
|
||||
}
|
||||
|
||||
async function canUserRenameProject(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return privilegeLevel === PrivilegeLevels.OWNER
|
||||
}
|
||||
|
||||
async function canUserAdminProject(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return privilegeLevel === PrivilegeLevels.OWNER
|
||||
}
|
||||
|
||||
async function isUserSiteAdmin(userId) {
|
||||
if (!userId) {
|
||||
return false
|
||||
}
|
||||
if (!Settings.adminPrivilegeAvailable) return false
|
||||
const user = await User.findOne({ _id: userId }, { isAdmin: 1 }).exec()
|
||||
return hasAdminAccess(user)
|
||||
}
|
||||
|
||||
async function canUserDeleteOrResolveThread(
|
||||
userId,
|
||||
projectId,
|
||||
docId,
|
||||
threadId,
|
||||
token
|
||||
) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token,
|
||||
{ ignorePublicAccess: true }
|
||||
)
|
||||
if (
|
||||
privilegeLevel === PrivilegeLevels.OWNER ||
|
||||
privilegeLevel === PrivilegeLevels.READ_AND_WRITE
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (privilegeLevel !== PrivilegeLevels.REVIEW) {
|
||||
return false
|
||||
}
|
||||
|
||||
const comment = await DocumentUpdaterHandler.promises.getComment(
|
||||
projectId,
|
||||
docId,
|
||||
threadId
|
||||
)
|
||||
return comment.metadata.user_id === userId
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
canUserReadProject: callbackify(canUserReadProject),
|
||||
canUserWriteProjectContent: callbackify(canUserWriteProjectContent),
|
||||
canUserWriteOrReviewProjectContent: callbackify(
|
||||
canUserWriteOrReviewProjectContent
|
||||
),
|
||||
canUserDeleteOrResolveThread: callbackify(canUserDeleteOrResolveThread),
|
||||
canUserWriteProjectSettings: callbackify(canUserWriteProjectSettings),
|
||||
canUserRenameProject: callbackify(canUserRenameProject),
|
||||
canUserAdminProject: callbackify(canUserAdminProject),
|
||||
getPrivilegeLevelForProject: callbackify(getPrivilegeLevelForProject),
|
||||
isRestrictedUser,
|
||||
isRestrictedUserForProject: callbackify(isRestrictedUserForProject),
|
||||
isUserSiteAdmin: callbackify(isUserSiteAdmin),
|
||||
promises: {
|
||||
canUserReadProject,
|
||||
canUserWriteProjectContent,
|
||||
canUserWriteOrReviewProjectContent,
|
||||
canUserDeleteOrResolveThread,
|
||||
canUserWriteProjectSettings,
|
||||
canUserRenameProject,
|
||||
canUserAdminProject,
|
||||
getPrivilegeLevelForProject,
|
||||
isRestrictedUserForProject,
|
||||
isUserSiteAdmin,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,292 @@
|
||||
const AuthorizationManager = require('./AuthorizationManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const HttpErrorHandler = require('../Errors/HttpErrorHandler')
|
||||
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const {
|
||||
canRedirectToAdminDomain,
|
||||
} = require('../Helpers/AdminAuthorizationHelper')
|
||||
const { getSafeAdminDomainRedirect } = require('../Helpers/UrlHelper')
|
||||
|
||||
function _handleAdminDomainRedirect(req, res) {
|
||||
if (canRedirectToAdminDomain(SessionManager.getSessionUser(req.session))) {
|
||||
logger.warn({ req }, 'redirecting admin user to admin domain')
|
||||
res.redirect(getSafeAdminDomainRedirect(req.originalUrl))
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async function ensureUserCanReadMultipleProjects(req, res, next) {
|
||||
const projectIds = (req.query.project_ids || '').split(',')
|
||||
const userId = _getUserId(req)
|
||||
for (const projectId of projectIds) {
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canRead = await AuthorizationManager.promises.canUserReadProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (!canRead) {
|
||||
return _redirectToRestricted(req, res, next)
|
||||
}
|
||||
}
|
||||
next()
|
||||
}
|
||||
|
||||
async function blockRestrictedUserFromProject(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const isRestrictedUser =
|
||||
await AuthorizationManager.promises.isRestrictedUserForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (isRestrictedUser) {
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
next()
|
||||
}
|
||||
|
||||
async function ensureUserCanReadProject(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canRead = await AuthorizationManager.promises.canUserReadProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (canRead) {
|
||||
logger.debug({ userId, projectId }, 'allowing user read access to project')
|
||||
return next()
|
||||
}
|
||||
logger.debug({ userId, projectId }, 'denying user read access to project')
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserCanWriteProjectSettings(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
|
||||
if (req.body.name != null) {
|
||||
const canRename = await AuthorizationManager.promises.canUserRenameProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (!canRename) {
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
}
|
||||
|
||||
const otherParams = Object.keys(req.body).filter(x => x !== 'name')
|
||||
if (otherParams.length > 0) {
|
||||
const canWrite =
|
||||
await AuthorizationManager.promises.canUserWriteProjectSettings(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (!canWrite) {
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
async function ensureUserCanDeleteOrResolveThread(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const docId = _getDocId(req)
|
||||
const threadId = _getThreadId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canDeleteThread =
|
||||
await AuthorizationManager.promises.canUserDeleteOrResolveThread(
|
||||
userId,
|
||||
projectId,
|
||||
docId,
|
||||
threadId,
|
||||
token
|
||||
)
|
||||
if (canDeleteThread) {
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'allowing user to delete or resolve a comment thread'
|
||||
)
|
||||
return next()
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
{ userId, projectId, threadId },
|
||||
'denying user to delete or resolve a comment thread'
|
||||
)
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserCanWriteProjectContent(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canWrite =
|
||||
await AuthorizationManager.promises.canUserWriteProjectContent(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (canWrite) {
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'allowing user write access to project content'
|
||||
)
|
||||
return next()
|
||||
}
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'denying user write access to project settings'
|
||||
)
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserCanWriteOrReviewProjectContent(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
|
||||
const canWriteOrReviewProjectContent =
|
||||
await AuthorizationManager.promises.canUserWriteOrReviewProjectContent(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (canWriteOrReviewProjectContent) {
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'allowing user write or review access to project content'
|
||||
)
|
||||
return next()
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'denying user write or review access to project content'
|
||||
)
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserCanAdminProject(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canAdmin = await AuthorizationManager.promises.canUserAdminProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (canAdmin) {
|
||||
logger.debug({ userId, projectId }, 'allowing user admin access to project')
|
||||
return next()
|
||||
}
|
||||
logger.debug({ userId, projectId }, 'denying user admin access to project')
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserIsSiteAdmin(req, res, next) {
|
||||
const userId = _getUserId(req)
|
||||
if (await AuthorizationManager.promises.isUserSiteAdmin(userId)) {
|
||||
logger.debug({ userId }, 'allowing user admin access to site')
|
||||
return next()
|
||||
}
|
||||
if (_handleAdminDomainRedirect(req, res)) return
|
||||
logger.debug({ userId }, 'denying user admin access to site')
|
||||
_redirectToRestricted(req, res, next)
|
||||
}
|
||||
|
||||
function _getProjectId(req) {
|
||||
const projectId = req.params.project_id || req.params.Project_id
|
||||
if (!projectId) {
|
||||
throw new Error('Expected project_id in request parameters')
|
||||
}
|
||||
if (!ObjectId.isValid(projectId)) {
|
||||
throw new Errors.NotFoundError(`invalid projectId: ${projectId}`)
|
||||
}
|
||||
return projectId
|
||||
}
|
||||
|
||||
function _getDocId(req) {
|
||||
const docId = req.params.doc_id
|
||||
if (!docId) {
|
||||
throw new Error('Expected doc_id in request parameters')
|
||||
}
|
||||
if (!ObjectId.isValid(docId)) {
|
||||
throw new Errors.NotFoundError(`invalid docId: ${docId}`)
|
||||
}
|
||||
return docId
|
||||
}
|
||||
|
||||
function _getThreadId(req) {
|
||||
const threadId = req.params.thread_id
|
||||
if (!threadId) {
|
||||
throw new Error('Expected thread_id in request parameters')
|
||||
}
|
||||
if (!ObjectId.isValid(threadId)) {
|
||||
throw new Errors.NotFoundError(`invalid threadId: ${threadId}`)
|
||||
}
|
||||
return threadId
|
||||
}
|
||||
|
||||
function _getUserId(req) {
|
||||
return (
|
||||
SessionManager.getLoggedInUserId(req.session) ||
|
||||
(req.oauth_user && req.oauth_user._id) ||
|
||||
null
|
||||
)
|
||||
}
|
||||
|
||||
function _redirectToRestricted(req, res, next) {
|
||||
// TODO: move this to throwing ForbiddenError
|
||||
res.redirect(`/restricted?from=${encodeURIComponent(res.locals.currentUrl)}`)
|
||||
}
|
||||
|
||||
function restricted(req, res, next) {
|
||||
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
return res.render('user/restricted', { title: 'restricted' })
|
||||
}
|
||||
const { from } = req.query
|
||||
logger.debug({ from }, 'redirecting to login')
|
||||
if (from) {
|
||||
AuthenticationController.setRedirectInSession(req, from)
|
||||
}
|
||||
res.redirect('/login')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ensureUserCanReadMultipleProjects: expressify(
|
||||
ensureUserCanReadMultipleProjects
|
||||
),
|
||||
blockRestrictedUserFromProject: expressify(blockRestrictedUserFromProject),
|
||||
ensureUserCanReadProject: expressify(ensureUserCanReadProject),
|
||||
ensureUserCanWriteProjectSettings: expressify(
|
||||
ensureUserCanWriteProjectSettings
|
||||
),
|
||||
ensureUserCanDeleteOrResolveThread: expressify(
|
||||
ensureUserCanDeleteOrResolveThread
|
||||
),
|
||||
ensureUserCanWriteProjectContent: expressify(
|
||||
ensureUserCanWriteProjectContent
|
||||
),
|
||||
ensureUserCanWriteOrReviewProjectContent: expressify(
|
||||
ensureUserCanWriteOrReviewProjectContent
|
||||
),
|
||||
ensureUserCanAdminProject: expressify(ensureUserCanAdminProject),
|
||||
ensureUserIsSiteAdmin: expressify(ensureUserIsSiteAdmin),
|
||||
restricted,
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
const { ForbiddenError, UserNotFoundError } = require('../Errors/Errors')
|
||||
const {
|
||||
getUserCapabilities,
|
||||
getUserRestrictions,
|
||||
combineGroupPolicies,
|
||||
combineAllowedProperties,
|
||||
} = require('./PermissionsManager')
|
||||
const { assertUserPermissions } = require('./PermissionsManager').promises
|
||||
const Modules = require('../../infrastructure/Modules')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
|
||||
/**
|
||||
* Function that returns middleware to add an `assertPermission` function to the request object to check if the user has a specific capability.
|
||||
* @returns {Function} The middleware function that adds the `assertPermission` function to the request object.
|
||||
*/
|
||||
function useCapabilities() {
|
||||
const middleware = async function (req, res, next) {
|
||||
// attach the user's capabilities to the request object
|
||||
req.capabilitySet = new Set()
|
||||
// provide a function to assert that a capability is present
|
||||
req.assertPermission = capability => {
|
||||
if (!req.capabilitySet.has(capability)) {
|
||||
throw new ForbiddenError(
|
||||
`user does not have permission for ${capability}`
|
||||
)
|
||||
}
|
||||
}
|
||||
if (!req.user) {
|
||||
return next()
|
||||
}
|
||||
try {
|
||||
let results = await Modules.promises.hooks.fire(
|
||||
'getGroupPolicyForUser',
|
||||
req.user
|
||||
)
|
||||
// merge array of all results from all modules
|
||||
results = results.flat()
|
||||
|
||||
if (results.length > 0) {
|
||||
// get the combined group policy applying to the user
|
||||
const groupPolicies = results.map(result => result.groupPolicy)
|
||||
const combinedGroupPolicy = combineGroupPolicies(groupPolicies)
|
||||
// attach the new capabilities to the request object
|
||||
for (const cap of getUserCapabilities(combinedGroupPolicy)) {
|
||||
req.capabilitySet.add(cap)
|
||||
}
|
||||
// also attach the user's restrictions (the capabilities they don't have)
|
||||
req.userRestrictions = getUserRestrictions(combinedGroupPolicy)
|
||||
|
||||
// attach allowed properties to the request object
|
||||
const allowedProperties = combineAllowedProperties(results)
|
||||
for (const [prop, value] of Object.entries(allowedProperties)) {
|
||||
req[prop] = value
|
||||
}
|
||||
}
|
||||
next()
|
||||
} catch (error) {
|
||||
if (error instanceof UserNotFoundError) {
|
||||
// the user is logged in but doesn't exist in the database
|
||||
// this can happen if the user has just deleted their account
|
||||
return next()
|
||||
} else {
|
||||
next(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
return expressify(middleware)
|
||||
}
|
||||
|
||||
/**
|
||||
* Function that returns middleware to check if the user has permission to access a resource.
|
||||
* @param {[string]} requiredCapabilities - the capabilities required to access the resource.
|
||||
* @returns {Function} The middleware function that checks if the user has the required capabilities.
|
||||
*/
|
||||
function requirePermission(...requiredCapabilities) {
|
||||
if (
|
||||
requiredCapabilities.length === 0 ||
|
||||
requiredCapabilities.some(capability => typeof capability !== 'string')
|
||||
) {
|
||||
throw new Error('invalid required capabilities')
|
||||
}
|
||||
const doRequest = async function (req, res, next) {
|
||||
if (!Features.hasFeature('saas')) {
|
||||
return next()
|
||||
}
|
||||
if (!req.user) {
|
||||
return next(new Error('no user'))
|
||||
}
|
||||
try {
|
||||
await assertUserPermissions(req.user, requiredCapabilities)
|
||||
next()
|
||||
} catch (error) {
|
||||
next(error)
|
||||
}
|
||||
}
|
||||
return doRequest
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
requirePermission,
|
||||
useCapabilities,
|
||||
}
|
||||
@@ -0,0 +1,480 @@
|
||||
/**
|
||||
* This module exports functions for managing permissions and policies.
|
||||
*
|
||||
* It provides a way to:
|
||||
*
|
||||
* - Register capabilities and policies
|
||||
* - Associate policies with custom validators
|
||||
* - Apply collections of policies to a user
|
||||
* - Check whether a user has a given capability
|
||||
* - Check whether a user complies with a given policy
|
||||
*
|
||||
* Capabilities: boolean values that represent whether a user is allowed to
|
||||
* perform a certain action or not. The capabilities are represented as a Set.
|
||||
* For example, to delete their account a user would need the
|
||||
* `delete-own-account` capability. A user starts with a set of default
|
||||
* capabilities that let them do all the things they can currently do in
|
||||
* Overleaf.
|
||||
*
|
||||
* Policy: a rule which specifies which capabilities will be removed from a user
|
||||
* when the policy is applied.
|
||||
*
|
||||
* For example, a policy `userCannotDeleteOwnAccount` is represented as
|
||||
* `{'delete-own-account' : false}` meaning that the `delete-own-account`
|
||||
* capability will be removed. A policy can remove more than one capability, and
|
||||
* more than one policy could apply to a user.
|
||||
*
|
||||
* Validator: a function that takes an object with user and subscription properties
|
||||
* and returns a boolean indicating whether the user satisfies the policy or not.
|
||||
* For example, a validator for the `userCannotHaveSecondaryEmail` policy would
|
||||
* check whether the user has more than one email address.
|
||||
*
|
||||
* Group Policies: a collection of policies with a setting indicating whether
|
||||
* they are enforced or not. Used to place restrictions on managed users in a
|
||||
* group.
|
||||
*
|
||||
* For example, a group policy could be
|
||||
*
|
||||
* {
|
||||
* "userCannotDeleteOwnAccount": true, // enforced
|
||||
* "userCannotHaveSecondaryEmail": false // not enforced
|
||||
* }
|
||||
*/
|
||||
|
||||
const { callbackify } = require('util')
|
||||
const { ForbiddenError } = require('../Errors/Errors')
|
||||
const Modules = require('../../infrastructure/Modules')
|
||||
|
||||
const POLICY_TO_CAPABILITY_MAP = new Map()
|
||||
const POLICY_TO_VALIDATOR_MAP = new Map()
|
||||
const DEFAULT_PERMISSIONS = new Map()
|
||||
const ALLOWED_PROPERTIES = new Set()
|
||||
|
||||
/**
|
||||
* Throws an error if the given capability is not registered.
|
||||
*
|
||||
* @private
|
||||
* @param {string} capability - The name of the capability to check.
|
||||
* @throws {Error} If the capability is not registered.
|
||||
*/
|
||||
function ensureCapabilityExists(capability) {
|
||||
if (!DEFAULT_PERMISSIONS.has(capability)) {
|
||||
throw new Error(`unknown capability: ${capability}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates an group policy object
|
||||
*
|
||||
* @param {Object} policies - An object containing policy names and booleans
|
||||
* as key-value entries.
|
||||
* @throws {Error} if the `policies` object contains a policy that is not
|
||||
* registered, or the policy value is not a boolean
|
||||
*/
|
||||
function validatePolicies(policies) {
|
||||
for (const [policy, value] of Object.entries(policies)) {
|
||||
if (!POLICY_TO_CAPABILITY_MAP.has(policy)) {
|
||||
throw new Error(`unknown policy: ${policy}`)
|
||||
}
|
||||
if (typeof value !== 'boolean') {
|
||||
throw new Error(`policy value must be a boolean: ${policy} = ${value}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a new capability with the given name and options.
|
||||
*
|
||||
* @param {string} name - The name of the capability to register.
|
||||
* @param {Object} options - The options for the capability.
|
||||
* @param {boolean} options.default - The default value for the capability
|
||||
* (required).
|
||||
* @throws {Error} If the default value is not a boolean or if the capability is
|
||||
* already registered.
|
||||
*/
|
||||
function registerCapability(name, options) {
|
||||
// check that the default value is a boolean
|
||||
const defaultValue = options?.default
|
||||
if (typeof defaultValue !== 'boolean') {
|
||||
throw new Error('default value must be a boolean')
|
||||
}
|
||||
if (DEFAULT_PERMISSIONS.has(name)) {
|
||||
throw new Error(`capability already registered: ${name}`)
|
||||
}
|
||||
DEFAULT_PERMISSIONS.set(name, defaultValue)
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a new policy with the given name, capabilities, and options.
|
||||
*
|
||||
* @param {string} name - The name of the policy to register.
|
||||
* @param {Object} capabilities - The capabilities for the policy.
|
||||
* @param {Object} [options] - The options for the policy.
|
||||
* @param {Function?} [options.validator] - The optional validator function for the
|
||||
* policy.
|
||||
* @throws {Error} If the policy is already registered or if a capability is not
|
||||
* a boolean or is unknown.
|
||||
*/
|
||||
function registerPolicy(name, capabilities, options = {}) {
|
||||
const { validator } = options
|
||||
// check that the only options provided are capabilities and validators
|
||||
// FIXME: maybe use a schema validator here?
|
||||
if (POLICY_TO_CAPABILITY_MAP.has(name)) {
|
||||
throw new Error(`policy already registered: ${name}`)
|
||||
}
|
||||
// check that all the entries in the capability set exist and are booleans
|
||||
for (const [capabilityName, capabilityValue] of Object.entries(
|
||||
capabilities
|
||||
)) {
|
||||
// check that the capability exists (look in the default permissions)
|
||||
if (!DEFAULT_PERMISSIONS.has(capabilityName)) {
|
||||
throw new Error(`unknown capability: ${capabilityName}`)
|
||||
}
|
||||
// check that the value is a boolean
|
||||
if (typeof capabilityValue !== 'boolean') {
|
||||
throw new Error(
|
||||
`capability value must be a boolean: ${capabilityName} = ${capabilityValue}`
|
||||
)
|
||||
}
|
||||
}
|
||||
// set the policy capabilities
|
||||
POLICY_TO_CAPABILITY_MAP.set(name, new Map(Object.entries(capabilities)))
|
||||
|
||||
// set the policy validator (if present)
|
||||
if (validator) {
|
||||
POLICY_TO_VALIDATOR_MAP.set(name, validator)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers an allowed property that can be added to the request object.
|
||||
*
|
||||
* @param {string} name - The name of the property to register.
|
||||
* @returns {void}
|
||||
*/
|
||||
function registerAllowedProperty(name) {
|
||||
ALLOWED_PROPERTIES.add(name)
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the set of allowed properties that have been registered
|
||||
*
|
||||
* @returns {Set} ALLOWED_PROPERTIES
|
||||
*/
|
||||
function getAllowedProperties() {
|
||||
return ALLOWED_PROPERTIES
|
||||
}
|
||||
/**
|
||||
* Returns an array of policy names that are enforced based on the provided
|
||||
* group policy object.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} groupPolicy - The group policy object to check.
|
||||
* @returns {Array} An array of policy names that are enforced.
|
||||
*/
|
||||
function getEnforcedPolicyNames(groupPolicy = {}) {
|
||||
if (!groupPolicy) {
|
||||
return []
|
||||
}
|
||||
return Object.keys(
|
||||
typeof groupPolicy.toObject === 'function'
|
||||
? groupPolicy.toObject()
|
||||
: groupPolicy
|
||||
).filter(
|
||||
policyName =>
|
||||
!['__v', '_id'].includes(policyName) && groupPolicy[policyName] !== false
|
||||
) // filter out the policies that are not enforced
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the specified capability for the given policy.
|
||||
*
|
||||
* @private
|
||||
* @param {string} policyName - The name of the policy to retrieve the
|
||||
* capability value from.
|
||||
* @param {string} capability - The name of the capability to retrieve the value
|
||||
* for.
|
||||
* @returns {boolean | undefined} The value of the capability for the policy, or
|
||||
* undefined if the policy or capability is not found.
|
||||
*/
|
||||
function getCapabilityValueFromPolicy(policyName, capability) {
|
||||
return POLICY_TO_CAPABILITY_MAP.get(policyName)?.get(capability)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the default value for the specified capability.
|
||||
*
|
||||
* @private
|
||||
* @param {string} capability - The name of the capability to retrieve the
|
||||
* default value for.
|
||||
* @returns {boolean | undefined} The default value for the capability, or
|
||||
* undefined if the capability is not found.
|
||||
*/
|
||||
function getDefaultPermission(capability) {
|
||||
return DEFAULT_PERMISSIONS.get(capability)
|
||||
}
|
||||
|
||||
function getValidatorFromPolicy(policyName) {
|
||||
return POLICY_TO_VALIDATOR_MAP.get(policyName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of default capabilities based on the DEFAULT_PERMISSIONS map.
|
||||
*
|
||||
* @private
|
||||
* @returns {Set} A set of default capabilities.
|
||||
*/
|
||||
function getDefaultCapabilities() {
|
||||
const defaultCapabilities = new Set()
|
||||
for (const [
|
||||
capabilityName,
|
||||
capabilityValue,
|
||||
] of DEFAULT_PERMISSIONS.entries()) {
|
||||
if (capabilityValue === true) {
|
||||
defaultCapabilities.add(capabilityName)
|
||||
}
|
||||
}
|
||||
return defaultCapabilities
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies a given policy to a set of capabilities, to remove those capabilities
|
||||
* which are not allowed by the policy.
|
||||
*
|
||||
* @private
|
||||
* @param {Set} capabilitySet - The set of capabilities to apply the policy to.
|
||||
* @param {string} policyName - The name of the policy to apply.
|
||||
* @throws {Error} If the policy is unknown.
|
||||
*/
|
||||
function applyPolicy(capabilitySet, policyName) {
|
||||
const policyCapabilities = POLICY_TO_CAPABILITY_MAP.get(policyName)
|
||||
if (!policyCapabilities) {
|
||||
throw new Error(`unknown policy: ${policyName}`)
|
||||
}
|
||||
for (const [
|
||||
capabilityName,
|
||||
capabilityValue,
|
||||
] of policyCapabilities.entries()) {
|
||||
if (capabilityValue !== true) {
|
||||
capabilitySet.delete(capabilityName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of capabilities that a user has based on their group policy.
|
||||
*
|
||||
* @param {Object} groupPolicy - The group policy object to check.
|
||||
* @returns {Set} A set of capabilities that the user has, based on their group
|
||||
* policy.
|
||||
* @throws {Error} If the policy is unknown.
|
||||
*/
|
||||
function getUserCapabilities(groupPolicy) {
|
||||
const userCapabilities = getDefaultCapabilities()
|
||||
const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy)
|
||||
for (const enforcedPolicyName of enforcedPolicyNames) {
|
||||
applyPolicy(userCapabilities, enforcedPolicyName)
|
||||
}
|
||||
return userCapabilities
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines an array of group policies into a single policy object.
|
||||
*
|
||||
* @param {Array} groupPolicies - An array of group policies.
|
||||
* @returns {Object} - The combined group policy object.
|
||||
*/
|
||||
function combineGroupPolicies(groupPolicies) {
|
||||
const combinedGroupPolicy = {}
|
||||
for (const groupPolicy of groupPolicies) {
|
||||
const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy)
|
||||
for (const enforcedPolicyName of enforcedPolicyNames) {
|
||||
combinedGroupPolicy[enforcedPolicyName] = true
|
||||
}
|
||||
}
|
||||
return combinedGroupPolicy
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines the allowed properties from an array of property objects.
|
||||
*
|
||||
* @param {Array<Object>} propertyObjects - An array of property objects.
|
||||
* @returns {Object} - An object containing the combined allowed properties.
|
||||
*/
|
||||
function combineAllowedProperties(propertyObjects) {
|
||||
const userProperties = {}
|
||||
for (const properties of propertyObjects) {
|
||||
for (const [key, value] of Object.entries(properties)) {
|
||||
if (ALLOWED_PROPERTIES.has(key)) {
|
||||
userProperties[key] ??= value
|
||||
}
|
||||
}
|
||||
}
|
||||
return userProperties
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of capabilities that a user does not have based on their group policy.
|
||||
*
|
||||
* @param {Object} groupPolicy - The group policy object to check.
|
||||
* @returns {Set} A set of capabilities that the user does not have, based on their group
|
||||
* policy.
|
||||
* @throws {Error} If the policy is unknown.
|
||||
*/
|
||||
function getUserRestrictions(groupPolicy) {
|
||||
const userCapabilities = getUserCapabilities(groupPolicy)
|
||||
const userRestrictions = getDefaultCapabilities()
|
||||
for (const capability of userCapabilities) {
|
||||
userRestrictions.delete(capability)
|
||||
}
|
||||
return userRestrictions
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a user has permission for a given capability based on their group
|
||||
* policy.
|
||||
*
|
||||
* @param {Object} groupPolicy - The group policy object for the user.
|
||||
* @param {string} capability - The name of the capability to check permission
|
||||
* for.
|
||||
* @returns {boolean} True if the user has permission for the capability, false
|
||||
* otherwise.
|
||||
* @throws {Error} If the capability does not exist.
|
||||
*/
|
||||
function hasPermission(groupPolicy, capability) {
|
||||
ensureCapabilityExists(capability)
|
||||
// look through all the entries in the group policy and see if any of them apply to the capability
|
||||
const results = getEnforcedPolicyNames(groupPolicy).map(userPolicyName =>
|
||||
getCapabilityValueFromPolicy(userPolicyName, capability)
|
||||
)
|
||||
// if there are no results, or none of the policies apply, return the default permission
|
||||
if (results.length === 0 || results.every(result => result === undefined)) {
|
||||
return getDefaultPermission(capability)
|
||||
}
|
||||
// only allow the permission if all the results are true, otherwise deny it
|
||||
return results.every(result => result === true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously checks which policies a user complies with using the
|
||||
* applicable validators. Each validator is an async function that takes an object
|
||||
* with user, groupPolicy, and subscription properties and returns a boolean.
|
||||
*
|
||||
* @param {Object} options - The options object.
|
||||
* @param {Object} options.user - The user object to check.
|
||||
* @param {Object} options.groupPolicy - The group policy object to check.
|
||||
* @param {Object} options.subscription - The subscription object for the group policy.
|
||||
* @returns {Promise<Map>} A promise that resolves with a Map object containing
|
||||
* the validation status for each enforced policy. The keys of the Map are the
|
||||
* enforced policy names, and the values are booleans indicating whether the
|
||||
* user complies with the policy.
|
||||
*/
|
||||
async function getUserValidationStatus({ user, groupPolicy, subscription }) {
|
||||
// find all the enforced policies for the user
|
||||
const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy)
|
||||
// for each enforced policy, we have a list of capabilities with expected values
|
||||
// some of those capabilities have validators
|
||||
// we need to run the validators and the result to see if if the user is complies with the policy
|
||||
const userValidationStatus = new Map()
|
||||
for (const enforcedPolicyName of enforcedPolicyNames) {
|
||||
const validator = getValidatorFromPolicy(enforcedPolicyName)
|
||||
if (validator) {
|
||||
userValidationStatus.set(
|
||||
enforcedPolicyName,
|
||||
await validator({ user, subscription })
|
||||
)
|
||||
}
|
||||
}
|
||||
return userValidationStatus
|
||||
}
|
||||
|
||||
/**
|
||||
* asserts that a user has permission for a given set of capabilities
|
||||
* as set out in both their current group subscription, and any institutions they are affiliated with,
|
||||
* throwing an ForbiddenError if they do not
|
||||
*
|
||||
* @param {Object} user - The user object to retrieve the group policy for.
|
||||
* Only the user's _id is required
|
||||
* @param {Array} capabilities - The list of the capabilities to check permission for.
|
||||
* @returns {Promise<void>}
|
||||
* @throws {Error} If the user does not have permission
|
||||
*/
|
||||
async function assertUserPermissions(user, requiredCapabilities) {
|
||||
const hasAllPermissions = await checkUserPermissions(
|
||||
user,
|
||||
requiredCapabilities
|
||||
)
|
||||
if (!hasAllPermissions) {
|
||||
throw new ForbiddenError(
|
||||
`user does not have one or more permissions within ${requiredCapabilities}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a user has permission for a given set of capabilities
|
||||
* as set out in both their current group subscription, and any institutions they are affiliated with
|
||||
*
|
||||
* @param {Object} user - The user object to retrieve the group policy for.
|
||||
* Only the user's _id is required
|
||||
* @param {Array} capabilities - The list of the capabilities to check permission for.
|
||||
* @returns {Promise<Boolean>} - true if the user has all permissions, false if not
|
||||
*/
|
||||
async function checkUserPermissions(user, requiredCapabilities) {
|
||||
let results = await Modules.promises.hooks.fire('getGroupPolicyForUser', user)
|
||||
results = results.flat()
|
||||
if (!results?.length) return true
|
||||
|
||||
// get the combined group policy applying to the user
|
||||
const groupPolicies = results.map(result => result.groupPolicy)
|
||||
const combinedGroupPolicy = combineGroupPolicies(groupPolicies)
|
||||
for (const requiredCapability of requiredCapabilities) {
|
||||
// if the user has the permission, continue
|
||||
if (!hasPermission(combinedGroupPolicy, requiredCapability)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* checks if all collaborators of a given project have the specified capability, including the owner
|
||||
*
|
||||
* @async
|
||||
* @function checkUserListPermissions
|
||||
* @param {Object[]} userList - An array of all user to check permissions for
|
||||
* @param {Array} capabilities - The list of the capabilities to check permission for.
|
||||
* @returns {Promise<boolean>} - A promise that resolves to `true` if all collaborators have the specified capability, otherwise `false`.
|
||||
*/
|
||||
async function checkUserListPermissions(userList, capabilities) {
|
||||
for (const user of userList) {
|
||||
// mimic a user object with only id, since we need it to fetch permissions
|
||||
const allowed = await checkUserPermissions(user, capabilities)
|
||||
if (!allowed) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validatePolicies,
|
||||
registerCapability,
|
||||
registerPolicy,
|
||||
registerAllowedProperty,
|
||||
combineGroupPolicies,
|
||||
combineAllowedProperties,
|
||||
getAllowedProperties,
|
||||
hasPermission,
|
||||
getUserCapabilities,
|
||||
getUserRestrictions,
|
||||
getUserValidationStatus: callbackify(getUserValidationStatus),
|
||||
checkCollaboratorsPermission: callbackify(checkUserListPermissions),
|
||||
checkUserPermissions: callbackify(checkUserPermissions),
|
||||
promises: {
|
||||
assertUserPermissions,
|
||||
getUserValidationStatus,
|
||||
checkUserListPermissions,
|
||||
checkUserPermissions,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
const PrivilegeLevels = {
|
||||
NONE: false,
|
||||
READ_ONLY: 'readOnly',
|
||||
READ_AND_WRITE: 'readAndWrite',
|
||||
REVIEW: 'review',
|
||||
OWNER: 'owner',
|
||||
}
|
||||
|
||||
module.exports = PrivilegeLevels
|
||||
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Note:
|
||||
* It used to be that `project.publicAccessLevel` could be set to `private`,
|
||||
* `readOnly` or `readAndWrite`, the latter of which made the project publicly
|
||||
* accessible.
|
||||
*
|
||||
* This system was replaced with "link sharing", therafter the valid values are
|
||||
* `private` or `tokenBased`. While it is no longer possible to set
|
||||
* `publicAccessLevel` to the legacy values, there are projects in the system
|
||||
* that already have those values set.
|
||||
*/
|
||||
module.exports = {
|
||||
READ_ONLY: 'readOnly', // LEGACY
|
||||
READ_AND_WRITE: 'readAndWrite', // LEGACY
|
||||
PRIVATE: 'private',
|
||||
TOKEN_BASED: 'tokenBased',
|
||||
}
|
||||
5
services/web/app/src/Features/Authorization/Sources.js
Normal file
5
services/web/app/src/Features/Authorization/Sources.js
Normal file
@@ -0,0 +1,5 @@
|
||||
module.exports = {
|
||||
INVITE: 'invite',
|
||||
TOKEN: 'token',
|
||||
OWNER: 'owner',
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
import BetaProgramHandler from './BetaProgramHandler.mjs'
|
||||
import OError from '@overleaf/o-error'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import SplitTestSessionHandler from '../SplitTests/SplitTestSessionHandler.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
async function optIn(req, res) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await BetaProgramHandler.promises.optIn(userId)
|
||||
try {
|
||||
await SplitTestSessionHandler.promises.sessionMaintenance(req, null)
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error },
|
||||
'Failed to perform session maintenance after beta program opt in'
|
||||
)
|
||||
}
|
||||
res.redirect('/beta/participate')
|
||||
}
|
||||
|
||||
async function optOut(req, res) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await BetaProgramHandler.promises.optOut(userId)
|
||||
try {
|
||||
await SplitTestSessionHandler.promises.sessionMaintenance(req, null)
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error },
|
||||
'Failed to perform session maintenance after beta program opt out'
|
||||
)
|
||||
}
|
||||
res.redirect('/beta/participate')
|
||||
}
|
||||
|
||||
async function optInPage(req, res) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
let user
|
||||
try {
|
||||
user = await UserGetter.promises.getUser(userId, { betaProgram: 1 })
|
||||
} catch (error) {
|
||||
throw OError.tag(error, 'error fetching user', {
|
||||
userId,
|
||||
})
|
||||
}
|
||||
res.render('beta_program/opt_in', {
|
||||
title: 'sharelatex_beta_program',
|
||||
user,
|
||||
})
|
||||
}
|
||||
|
||||
export default {
|
||||
optIn: expressify(optIn),
|
||||
optOut: expressify(optOut),
|
||||
optInPage: expressify(optInPage),
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
import { callbackify } from 'node:util'
|
||||
import metrics from '@overleaf/metrics'
|
||||
import UserUpdater from '../User/UserUpdater.js'
|
||||
import AnalyticsManager from '../Analytics/AnalyticsManager.js'
|
||||
|
||||
async function optIn(userId) {
|
||||
await UserUpdater.promises.updateUser(userId, { $set: { betaProgram: true } })
|
||||
metrics.inc('beta-program.opt-in')
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
'beta-program',
|
||||
true
|
||||
)
|
||||
}
|
||||
|
||||
async function optOut(userId) {
|
||||
await UserUpdater.promises.updateUser(userId, {
|
||||
$set: { betaProgram: false },
|
||||
})
|
||||
metrics.inc('beta-program.opt-out')
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
'beta-program',
|
||||
false
|
||||
)
|
||||
}
|
||||
|
||||
export default {
|
||||
optIn: callbackify(optIn),
|
||||
optOut: callbackify(optOut),
|
||||
promises: {
|
||||
optIn,
|
||||
optOut,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { URL } = require('url')
|
||||
const settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const V1Api = require('../V1/V1Api')
|
||||
const sanitizeHtml = require('sanitize-html')
|
||||
const { promisify } = require('@overleaf/promise-utils')
|
||||
|
||||
module.exports = {
|
||||
getBrandVariationById,
|
||||
promises: {
|
||||
getBrandVariationById: promisify(getBrandVariationById),
|
||||
},
|
||||
}
|
||||
|
||||
function getBrandVariationById(brandVariationId, callback) {
|
||||
if (brandVariationId == null || brandVariationId === '') {
|
||||
return callback(new Error('Branding variation id not provided'))
|
||||
}
|
||||
logger.debug({ brandVariationId }, 'fetching brand variation details from v1')
|
||||
V1Api.request(
|
||||
{
|
||||
uri: `/api/v2/brand_variations/${brandVariationId}`,
|
||||
},
|
||||
function (error, response, brandVariationDetails) {
|
||||
if (error != null) {
|
||||
OError.tag(error, 'error getting brand variation details', {
|
||||
brandVariationId,
|
||||
})
|
||||
return callback(error)
|
||||
}
|
||||
formatBrandVariationDetails(brandVariationDetails)
|
||||
sanitizeBrandVariationDetails(brandVariationDetails)
|
||||
callback(null, brandVariationDetails)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function formatBrandVariationDetails(details) {
|
||||
if (details.export_url != null) {
|
||||
details.export_url = setV1AsHostIfRelativeURL(details.export_url)
|
||||
}
|
||||
if (details.home_url != null) {
|
||||
details.home_url = setV1AsHostIfRelativeURL(details.home_url)
|
||||
}
|
||||
if (details.logo_url != null) {
|
||||
details.logo_url = setV1AsHostIfRelativeURL(details.logo_url)
|
||||
}
|
||||
if (details.journal_guidelines_url != null) {
|
||||
details.journal_guidelines_url = setV1AsHostIfRelativeURL(
|
||||
details.journal_guidelines_url
|
||||
)
|
||||
}
|
||||
if (details.journal_cover_url != null) {
|
||||
details.journal_cover_url = setV1AsHostIfRelativeURL(
|
||||
details.journal_cover_url
|
||||
)
|
||||
}
|
||||
if (details.submission_confirmation_page_logo_url != null) {
|
||||
details.submission_confirmation_page_logo_url = setV1AsHostIfRelativeURL(
|
||||
details.submission_confirmation_page_logo_url
|
||||
)
|
||||
}
|
||||
if (details.publish_menu_icon != null) {
|
||||
details.publish_menu_icon = setV1AsHostIfRelativeURL(
|
||||
details.publish_menu_icon
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function sanitizeBrandVariationDetails(details) {
|
||||
if (details.submit_button_html) {
|
||||
details.submit_button_html = sanitizeHtml(
|
||||
details.submit_button_html,
|
||||
settings.modules.sanitize.options
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function setV1AsHostIfRelativeURL(urlString) {
|
||||
// The first argument is the base URL to resolve against if the second argument is not absolute.
|
||||
// As it only applies if the second argument is not absolute, we can use it to transform relative URLs into
|
||||
// absolute ones using v1 as the host. If the URL is absolute (e.g. a filepicker one), then the base
|
||||
// argument is just ignored
|
||||
return new URL(urlString, settings.apis.v1.publicUrl).href
|
||||
}
|
||||
119
services/web/app/src/Features/Captcha/CaptchaMiddleware.js
Normal file
119
services/web/app/src/Features/Captcha/CaptchaMiddleware.js
Normal file
@@ -0,0 +1,119 @@
|
||||
const { fetchJson } = require('@overleaf/fetch-utils')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const DeviceHistory = require('./DeviceHistory')
|
||||
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const EmailsHelper = require('../Helpers/EmailHelper')
|
||||
|
||||
function respondInvalidCaptcha(req, res) {
|
||||
res.status(400).json({
|
||||
errorReason: 'cannot_verify_user_not_robot',
|
||||
message: {
|
||||
text: req.i18n.translate('cannot_verify_user_not_robot'),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function initializeDeviceHistory(req) {
|
||||
req.deviceHistory = new DeviceHistory()
|
||||
try {
|
||||
await req.deviceHistory.parse(req)
|
||||
} catch (err) {
|
||||
logger.err({ err }, 'cannot parse deviceHistory')
|
||||
}
|
||||
}
|
||||
|
||||
async function canSkipCaptcha(req, res) {
|
||||
const trustedUser =
|
||||
req.body?.email && Settings.recaptcha.trustedUsers.includes(req.body.email)
|
||||
if (trustedUser) {
|
||||
return res.json(true)
|
||||
}
|
||||
await initializeDeviceHistory(req)
|
||||
const canSkip = req.deviceHistory.has(req.body?.email)
|
||||
Metrics.inc('captcha_pre_flight', 1, {
|
||||
status: canSkip ? 'skipped' : 'missing',
|
||||
})
|
||||
res.json(canSkip)
|
||||
}
|
||||
|
||||
function validateCaptcha(action) {
|
||||
return expressify(async function (req, res, next) {
|
||||
const email = EmailsHelper.parseEmail(req.body?.email)
|
||||
const trustedUser =
|
||||
email &&
|
||||
(Settings.recaptcha.trustedUsers.includes(email) ||
|
||||
Settings.recaptcha.trustedUsersRegex?.test(email))
|
||||
if (!Settings.recaptcha?.siteKey || Settings.recaptcha.disabled[action]) {
|
||||
if (action === 'login') {
|
||||
AuthenticationController.setAuditInfo(req, { captcha: 'disabled' })
|
||||
}
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'disabled' })
|
||||
return next()
|
||||
}
|
||||
if (trustedUser) {
|
||||
if (action === 'login') {
|
||||
AuthenticationController.setAuditInfo(req, { captcha: 'trusted' })
|
||||
}
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'trusted' })
|
||||
return next()
|
||||
}
|
||||
const reCaptchaResponse = req.body['g-recaptcha-response']
|
||||
if (action === 'login') {
|
||||
await initializeDeviceHistory(req)
|
||||
const fromKnownDevice = req.deviceHistory.has(email)
|
||||
AuthenticationController.setAuditInfo(req, { fromKnownDevice })
|
||||
if (!reCaptchaResponse && fromKnownDevice) {
|
||||
// The user has previously logged in from this device, which required
|
||||
// solving a captcha or keeping the device history alive.
|
||||
// We can skip checking the (missing) captcha response.
|
||||
AuthenticationController.setAuditInfo(req, { captcha: 'skipped' })
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'skipped' })
|
||||
return next()
|
||||
}
|
||||
}
|
||||
if (!reCaptchaResponse) {
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'missing' })
|
||||
return respondInvalidCaptcha(req, res)
|
||||
}
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchJson(Settings.recaptcha.endpoint, {
|
||||
method: 'POST',
|
||||
body: new URLSearchParams([
|
||||
['secret', Settings.recaptcha.secretKey],
|
||||
['response', reCaptchaResponse],
|
||||
]),
|
||||
})
|
||||
} catch (err) {
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'error' })
|
||||
throw OError.tag(err, 'failed recaptcha siteverify request', {
|
||||
body: err.body,
|
||||
})
|
||||
}
|
||||
|
||||
if (!body.success) {
|
||||
logger.warn(
|
||||
{ statusCode: 200, body },
|
||||
'failed recaptcha siteverify request'
|
||||
)
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'failed' })
|
||||
return respondInvalidCaptcha(req, res)
|
||||
}
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'solved' })
|
||||
if (action === 'login') {
|
||||
AuthenticationController.setAuditInfo(req, { captcha: 'solved' })
|
||||
}
|
||||
next()
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
respondInvalidCaptcha,
|
||||
validateCaptcha,
|
||||
canSkipCaptcha: expressify(canSkipCaptcha),
|
||||
}
|
||||
103
services/web/app/src/Features/Captcha/DeviceHistory.js
Normal file
103
services/web/app/src/Features/Captcha/DeviceHistory.js
Normal file
@@ -0,0 +1,103 @@
|
||||
const crypto = require('crypto')
|
||||
const jose = require('jose')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
const COOKIE_NAME = Settings.deviceHistory.cookieName
|
||||
const ENTRY_EXPIRY = Settings.deviceHistory.entryExpiry
|
||||
const MAX_ENTRIES = Settings.deviceHistory.maxEntries
|
||||
|
||||
let SECRET
|
||||
if (Settings.deviceHistory.secret) {
|
||||
SECRET = crypto.createSecretKey(
|
||||
Buffer.from(Settings.deviceHistory.secret, 'hex')
|
||||
)
|
||||
}
|
||||
const CONTENT_ENCRYPTION_ALGORITHM = 'A256GCM'
|
||||
const KEY_MANAGEMENT_ALGORITHM = 'A256GCMKW'
|
||||
const ENCRYPTION_HEADER = {
|
||||
alg: KEY_MANAGEMENT_ALGORITHM,
|
||||
enc: CONTENT_ENCRYPTION_ALGORITHM,
|
||||
}
|
||||
const DECRYPTION_OPTIONS = {
|
||||
contentEncryptionAlgorithms: [CONTENT_ENCRYPTION_ALGORITHM],
|
||||
keyManagementAlgorithms: [KEY_MANAGEMENT_ALGORITHM],
|
||||
}
|
||||
|
||||
const ENCODER = new TextEncoder()
|
||||
const DECODER = new TextDecoder()
|
||||
|
||||
class DeviceHistory {
|
||||
constructor() {
|
||||
this.entries = []
|
||||
}
|
||||
|
||||
has(email) {
|
||||
return this.entries.some(entry => entry.e === email)
|
||||
}
|
||||
|
||||
add(email) {
|
||||
// Entries are sorted by age, starting from oldest (idx 0) to newest.
|
||||
// When parsing/serializing we are looking at the last n=MAX_ENTRIES entries
|
||||
// from the list and discard any other stale entries.
|
||||
this.entries = this.entries.filter(entry => entry.e !== email)
|
||||
this.entries.push({ e: email, t: Date.now() })
|
||||
}
|
||||
|
||||
async serialize(res) {
|
||||
let v = ''
|
||||
if (this.entries.length > 0 && SECRET) {
|
||||
v = await new jose.CompactEncrypt(
|
||||
ENCODER.encode(JSON.stringify(this.entries.slice(-MAX_ENTRIES)))
|
||||
)
|
||||
.setProtectedHeader(ENCRYPTION_HEADER)
|
||||
.encrypt(SECRET)
|
||||
}
|
||||
|
||||
const options = {
|
||||
domain: Settings.cookieDomain,
|
||||
maxAge: ENTRY_EXPIRY,
|
||||
secure: Settings.secureCookie,
|
||||
sameSite: Settings.sameSiteCookie,
|
||||
httpOnly: true,
|
||||
path: '/login',
|
||||
}
|
||||
if (v) {
|
||||
res.cookie(COOKIE_NAME, v, options)
|
||||
} else {
|
||||
options.maxAge = -1
|
||||
res.clearCookie(COOKIE_NAME, options)
|
||||
}
|
||||
}
|
||||
|
||||
async parse(req) {
|
||||
const blob = req.cookies[COOKIE_NAME]
|
||||
if (!blob || !SECRET) {
|
||||
Metrics.inc('device_history', 1, { status: 'missing' })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const { plaintext } = await jose.compactDecrypt(
|
||||
blob,
|
||||
SECRET,
|
||||
DECRYPTION_OPTIONS
|
||||
)
|
||||
const minTimestamp = Date.now() - ENTRY_EXPIRY
|
||||
this.entries = JSON.parse(DECODER.decode(plaintext))
|
||||
.slice(-MAX_ENTRIES)
|
||||
.filter(entry => entry.t > minTimestamp)
|
||||
} catch (err) {
|
||||
Metrics.inc('device_history', 1, { status: 'failure' })
|
||||
throw err
|
||||
}
|
||||
if (this.entries.length === MAX_ENTRIES) {
|
||||
// Track hitting the limit, we might need to increase the limit.
|
||||
Metrics.inc('device_history_at_limit')
|
||||
}
|
||||
// Collect quantiles of the size
|
||||
Metrics.summary('device_history_size', this.entries.length)
|
||||
Metrics.inc('device_history', 1, { status: 'success' })
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DeviceHistory
|
||||
166
services/web/app/src/Features/Chat/ChatApiHandler.js
Normal file
166
services/web/app/src/Features/Chat/ChatApiHandler.js
Normal file
@@ -0,0 +1,166 @@
|
||||
// @ts-check
|
||||
|
||||
const { fetchJson, fetchNothing } = require('@overleaf/fetch-utils')
|
||||
const settings = require('@overleaf/settings')
|
||||
const { callbackify } = require('util')
|
||||
|
||||
async function getThreads(projectId) {
|
||||
return await fetchJson(chatApiUrl(`/project/${projectId}/threads`))
|
||||
}
|
||||
|
||||
async function destroyProject(projectId) {
|
||||
await fetchNothing(chatApiUrl(`/project/${projectId}`), { method: 'DELETE' })
|
||||
}
|
||||
|
||||
async function sendGlobalMessage(projectId, userId, content) {
|
||||
const message = await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/messages`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { user_id: userId, content },
|
||||
}
|
||||
)
|
||||
return message
|
||||
}
|
||||
|
||||
async function getGlobalMessages(projectId, limit, before) {
|
||||
const url = chatApiUrl(`/project/${projectId}/messages`)
|
||||
if (limit != null) {
|
||||
url.searchParams.set('limit', limit)
|
||||
}
|
||||
if (before != null) {
|
||||
url.searchParams.set('before', before)
|
||||
}
|
||||
|
||||
return await fetchJson(url)
|
||||
}
|
||||
|
||||
async function sendComment(projectId, threadId, userId, content) {
|
||||
const comment = await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/thread/${threadId}/messages`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { user_id: userId, content },
|
||||
}
|
||||
)
|
||||
return comment
|
||||
}
|
||||
|
||||
async function resolveThread(projectId, threadId, userId) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(`/project/${projectId}/thread/${threadId}/resolve`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { user_id: userId },
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function reopenThread(projectId, threadId) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(`/project/${projectId}/thread/${threadId}/reopen`),
|
||||
{ method: 'POST' }
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteThread(projectId, threadId) {
|
||||
await fetchNothing(chatApiUrl(`/project/${projectId}/thread/${threadId}`), {
|
||||
method: 'DELETE',
|
||||
})
|
||||
}
|
||||
|
||||
async function editMessage(projectId, threadId, messageId, userId, content) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(
|
||||
`/project/${projectId}/thread/${threadId}/messages/${messageId}/edit`
|
||||
),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { content, userId },
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteMessage(projectId, threadId, messageId) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(
|
||||
`/project/${projectId}/thread/${threadId}/messages/${messageId}`
|
||||
),
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteUserMessage(projectId, threadId, userId, messageId) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(
|
||||
`/project/${projectId}/thread/${threadId}/user/${userId}/messages/${messageId}`
|
||||
),
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
}
|
||||
|
||||
async function getResolvedThreadIds(projectId) {
|
||||
const body = await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/resolved-thread-ids`)
|
||||
)
|
||||
return body.resolvedThreadIds
|
||||
}
|
||||
|
||||
async function duplicateCommentThreads(projectId, threads) {
|
||||
return await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/duplicate-comment-threads`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: {
|
||||
threads,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function generateThreadData(projectId, threads) {
|
||||
return await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/generate-thread-data`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { threads },
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function chatApiUrl(path) {
|
||||
return new URL(path, settings.apis.chat.internal_url)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getThreads: callbackify(getThreads),
|
||||
destroyProject: callbackify(destroyProject),
|
||||
sendGlobalMessage: callbackify(sendGlobalMessage),
|
||||
getGlobalMessages: callbackify(getGlobalMessages),
|
||||
sendComment: callbackify(sendComment),
|
||||
resolveThread: callbackify(resolveThread),
|
||||
reopenThread: callbackify(reopenThread),
|
||||
deleteThread: callbackify(deleteThread),
|
||||
editMessage: callbackify(editMessage),
|
||||
deleteMessage: callbackify(deleteMessage),
|
||||
deleteUserMessage: callbackify(deleteUserMessage),
|
||||
getResolvedThreadIds: callbackify(getResolvedThreadIds),
|
||||
duplicateCommentThreads: callbackify(duplicateCommentThreads),
|
||||
generateThreadData: callbackify(generateThreadData),
|
||||
promises: {
|
||||
getThreads,
|
||||
destroyProject,
|
||||
sendGlobalMessage,
|
||||
getGlobalMessages,
|
||||
sendComment,
|
||||
resolveThread,
|
||||
reopenThread,
|
||||
deleteThread,
|
||||
editMessage,
|
||||
deleteMessage,
|
||||
deleteUserMessage,
|
||||
getResolvedThreadIds,
|
||||
duplicateCommentThreads,
|
||||
generateThreadData,
|
||||
},
|
||||
}
|
||||
84
services/web/app/src/Features/Chat/ChatController.js
Normal file
84
services/web/app/src/Features/Chat/ChatController.js
Normal file
@@ -0,0 +1,84 @@
|
||||
/* eslint-disable
|
||||
n/handle-callback-err,
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ChatController
|
||||
const ChatApiHandler = require('./ChatApiHandler')
|
||||
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const UserInfoManager = require('../User/UserInfoManager')
|
||||
const UserInfoController = require('../User/UserInfoController')
|
||||
const ChatManager = require('./ChatManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
|
||||
module.exports = ChatController = {
|
||||
sendMessage(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
const { content, client_id: clientId } = req.body
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
if (userId == null) {
|
||||
const err = new Error('no logged-in user')
|
||||
return next(err)
|
||||
}
|
||||
return ChatApiHandler.sendGlobalMessage(
|
||||
projectId,
|
||||
userId,
|
||||
content,
|
||||
function (err, message) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
return UserInfoManager.getPersonalInfo(
|
||||
message.user_id,
|
||||
function (err, user) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
message.user = UserInfoController.formatPersonalInfo(user)
|
||||
message.clientId = clientId
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'new-chat-message',
|
||||
message
|
||||
)
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getMessages(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
const { query } = req
|
||||
return ChatApiHandler.getGlobalMessages(
|
||||
projectId,
|
||||
query.limit,
|
||||
query.before,
|
||||
function (err, messages) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
return ChatManager.injectUserInfoIntoThreads(
|
||||
{ global: { messages } },
|
||||
function (err) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
return res.json(messages)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
61
services/web/app/src/Features/Chat/ChatManager.js
Normal file
61
services/web/app/src/Features/Chat/ChatManager.js
Normal file
@@ -0,0 +1,61 @@
|
||||
const async = require('async')
|
||||
const UserInfoManager = require('../User/UserInfoManager')
|
||||
const UserInfoController = require('../User/UserInfoController')
|
||||
const { promisify } = require('@overleaf/promise-utils')
|
||||
|
||||
function injectUserInfoIntoThreads(threads, callback) {
|
||||
// There will be a lot of repitition of user_ids, so first build a list
|
||||
// of unique ones to perform db look ups on, then use these to populate the
|
||||
// user fields
|
||||
let message, thread, threadId, userId
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
const userIds = {}
|
||||
for (threadId in threads) {
|
||||
thread = threads[threadId]
|
||||
if (thread.resolved) {
|
||||
userIds[thread.resolved_by_user_id] = true
|
||||
}
|
||||
for (message of Array.from(thread.messages)) {
|
||||
userIds[message.user_id] = true
|
||||
}
|
||||
}
|
||||
|
||||
const jobs = []
|
||||
const users = {}
|
||||
for (userId in userIds) {
|
||||
;(userId =>
|
||||
jobs.push(cb =>
|
||||
UserInfoManager.getPersonalInfo(userId, function (error, user) {
|
||||
if (error != null) return cb(error)
|
||||
user = UserInfoController.formatPersonalInfo(user)
|
||||
users[userId] = user
|
||||
cb()
|
||||
})
|
||||
))(userId)
|
||||
}
|
||||
|
||||
return async.series(jobs, function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
for (threadId in threads) {
|
||||
thread = threads[threadId]
|
||||
if (thread.resolved) {
|
||||
thread.resolved_by_user = users[thread.resolved_by_user_id]
|
||||
}
|
||||
for (message of Array.from(thread.messages)) {
|
||||
message.user = users[message.user_id]
|
||||
}
|
||||
}
|
||||
return callback(null, threads)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
injectUserInfoIntoThreads,
|
||||
promises: {
|
||||
injectUserInfoIntoThreads: promisify(injectUserInfoIntoThreads),
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,202 @@
|
||||
import OError from '@overleaf/o-error'
|
||||
import HttpErrorHandler from '../../Features/Errors/HttpErrorHandler.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import CollaboratorsHandler from './CollaboratorsHandler.js'
|
||||
import CollaboratorsGetter from './CollaboratorsGetter.js'
|
||||
import OwnershipTransferHandler from './OwnershipTransferHandler.js'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import EditorRealTimeController from '../Editor/EditorRealTimeController.js'
|
||||
import TagsHandler from '../Tags/TagsHandler.js'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import { hasAdminAccess } from '../Helpers/AdminAuthorizationHelper.js'
|
||||
import TokenAccessHandler from '../TokenAccess/TokenAccessHandler.js'
|
||||
import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js'
|
||||
import LimitationsManager from '../Subscription/LimitationsManager.js'
|
||||
|
||||
const ObjectId = mongodb.ObjectId
|
||||
|
||||
export default {
|
||||
removeUserFromProject: expressify(removeUserFromProject),
|
||||
removeSelfFromProject: expressify(removeSelfFromProject),
|
||||
getAllMembers: expressify(getAllMembers),
|
||||
setCollaboratorInfo: expressify(setCollaboratorInfo),
|
||||
transferOwnership: expressify(transferOwnership),
|
||||
getShareTokens: expressify(getShareTokens),
|
||||
}
|
||||
|
||||
async function removeUserFromProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = req.params.user_id
|
||||
const sessionUserId = SessionManager.getLoggedInUserId(req.session)
|
||||
await _removeUserIdFromProject(projectId, userId)
|
||||
EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', {
|
||||
members: true,
|
||||
})
|
||||
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'remove-collaborator',
|
||||
sessionUserId,
|
||||
req.ip,
|
||||
{ userId }
|
||||
)
|
||||
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function removeSelfFromProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await _removeUserIdFromProject(projectId, userId)
|
||||
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'leave-project',
|
||||
userId,
|
||||
req.ip
|
||||
)
|
||||
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function getAllMembers(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
logger.debug({ projectId }, 'getting all active members for project')
|
||||
let members
|
||||
try {
|
||||
members = await CollaboratorsGetter.promises.getAllInvitedMembers(projectId)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error getting members for project', { projectId })
|
||||
}
|
||||
res.json({ members })
|
||||
}
|
||||
|
||||
async function setCollaboratorInfo(req, res, next) {
|
||||
try {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = req.params.user_id
|
||||
const { privilegeLevel } = req.body
|
||||
|
||||
const allowed =
|
||||
await LimitationsManager.promises.canChangeCollaboratorPrivilegeLevel(
|
||||
projectId,
|
||||
userId,
|
||||
privilegeLevel
|
||||
)
|
||||
if (!allowed) {
|
||||
return HttpErrorHandler.forbidden(
|
||||
req,
|
||||
res,
|
||||
'edit collaborator limit reached'
|
||||
)
|
||||
}
|
||||
|
||||
await CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel(
|
||||
projectId,
|
||||
userId,
|
||||
privilegeLevel
|
||||
)
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'project:collaboratorAccessLevel:changed',
|
||||
{ userId }
|
||||
)
|
||||
res.sendStatus(204)
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
HttpErrorHandler.notFound(req, res)
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function transferOwnership(req, res, next) {
|
||||
const sessionUser = SessionManager.getSessionUser(req.session)
|
||||
const projectId = req.params.Project_id
|
||||
const toUserId = req.body.user_id
|
||||
try {
|
||||
await OwnershipTransferHandler.promises.transferOwnership(
|
||||
projectId,
|
||||
toUserId,
|
||||
{
|
||||
allowTransferToNonCollaborators: hasAdminAccess(sessionUser),
|
||||
sessionUserId: new ObjectId(sessionUser._id),
|
||||
ipAddress: req.ip,
|
||||
}
|
||||
)
|
||||
res.sendStatus(204)
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.ProjectNotFoundError) {
|
||||
HttpErrorHandler.notFound(req, res, `project not found: ${projectId}`)
|
||||
} else if (err instanceof Errors.UserNotFoundError) {
|
||||
HttpErrorHandler.notFound(req, res, `user not found: ${toUserId}`)
|
||||
} else if (err instanceof Errors.UserNotCollaboratorError) {
|
||||
HttpErrorHandler.forbidden(
|
||||
req,
|
||||
res,
|
||||
`user ${toUserId} should be a collaborator in project ${projectId} prior to ownership transfer`
|
||||
)
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function _removeUserIdFromProject(projectId, userId) {
|
||||
await CollaboratorsHandler.promises.removeUserFromProject(projectId, userId)
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'userRemovedFromProject',
|
||||
userId
|
||||
)
|
||||
await TagsHandler.promises.removeProjectFromAllTags(userId, projectId)
|
||||
}
|
||||
|
||||
async function getShareTokens(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
let tokens
|
||||
if (userId) {
|
||||
tokens = await CollaboratorsGetter.promises.getPublicShareTokens(
|
||||
new ObjectId(userId),
|
||||
new ObjectId(projectId)
|
||||
)
|
||||
} else {
|
||||
// anonymous access, the token is already available in the session
|
||||
const readOnly = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
tokens = { readOnly }
|
||||
}
|
||||
if (!tokens) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
if (tokens.readOnly || tokens.readAndWrite) {
|
||||
logger.info(
|
||||
{
|
||||
projectId,
|
||||
userId: userId || 'anonymous',
|
||||
ip: req.ip,
|
||||
tokens: Object.keys(tokens),
|
||||
},
|
||||
'project tokens accessed'
|
||||
)
|
||||
}
|
||||
|
||||
if (tokens.readOnly) {
|
||||
tokens.readOnlyHashPrefix = TokenAccessHandler.createTokenHashPrefix(
|
||||
tokens.readOnly
|
||||
)
|
||||
}
|
||||
|
||||
if (tokens.readAndWrite) {
|
||||
tokens.readAndWriteHashPrefix = TokenAccessHandler.createTokenHashPrefix(
|
||||
tokens.readAndWrite
|
||||
)
|
||||
}
|
||||
|
||||
res.json(tokens)
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
import { callbackify } from 'node:util'
|
||||
import { Project } from '../../models/Project.js'
|
||||
import EmailHandler from '../Email/EmailHandler.js'
|
||||
import Settings from '@overleaf/settings'
|
||||
|
||||
const CollaboratorsEmailHandler = {
|
||||
_buildInviteUrl(project, invite) {
|
||||
return `${Settings.siteUrl}/project/${project._id}/invite/token/${invite.token}`
|
||||
},
|
||||
|
||||
async notifyUserOfProjectInvite(projectId, email, invite, sendingUser) {
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
const project = await Project.findOne({ _id: projectId })
|
||||
.select('name owner_ref')
|
||||
.populate('owner_ref')
|
||||
.exec()
|
||||
const emailOptions = {
|
||||
to: email,
|
||||
replyTo: project.owner_ref.email,
|
||||
project: {
|
||||
name: project.name,
|
||||
},
|
||||
inviteUrl: CollaboratorsEmailHandler._buildInviteUrl(project, invite),
|
||||
owner: project.owner_ref,
|
||||
sendingUser_id: sendingUser._id,
|
||||
}
|
||||
await EmailHandler.promises.sendEmail('projectInvite', emailOptions)
|
||||
},
|
||||
}
|
||||
|
||||
export default {
|
||||
promises: CollaboratorsEmailHandler,
|
||||
notifyUserOfProjectInvite: callbackify(
|
||||
CollaboratorsEmailHandler.notifyUserOfProjectInvite
|
||||
),
|
||||
_buildInviteUrl: CollaboratorsEmailHandler._buildInviteUrl,
|
||||
}
|
||||
@@ -0,0 +1,418 @@
|
||||
const { callbackify } = require('util')
|
||||
const pLimit = require('p-limit')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { Project } = require('../../models/Project')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const PublicAccessLevels = require('../Authorization/PublicAccessLevels')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const ProjectEditorHandler = require('../Project/ProjectEditorHandler')
|
||||
const Sources = require('../Authorization/Sources')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
|
||||
module.exports = {
|
||||
getMemberIdsWithPrivilegeLevels: callbackify(getMemberIdsWithPrivilegeLevels),
|
||||
getMemberIds: callbackify(getMemberIds),
|
||||
getInvitedMemberIds: callbackify(getInvitedMemberIds),
|
||||
getInvitedMembersWithPrivilegeLevels: callbackify(
|
||||
getInvitedMembersWithPrivilegeLevels
|
||||
),
|
||||
getInvitedMembersWithPrivilegeLevelsFromFields: callbackify(
|
||||
getInvitedMembersWithPrivilegeLevelsFromFields
|
||||
),
|
||||
getMemberIdPrivilegeLevel: callbackify(getMemberIdPrivilegeLevel),
|
||||
getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf),
|
||||
dangerouslyGetAllProjectsUserIsMemberOf: callbackify(
|
||||
dangerouslyGetAllProjectsUserIsMemberOf
|
||||
),
|
||||
isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject),
|
||||
getPublicShareTokens: callbackify(getPublicShareTokens),
|
||||
userIsTokenMember: callbackify(userIsTokenMember),
|
||||
getAllInvitedMembers: callbackify(getAllInvitedMembers),
|
||||
promises: {
|
||||
getMemberIdsWithPrivilegeLevels,
|
||||
getMemberIds,
|
||||
getInvitedMemberIds,
|
||||
getInvitedMembersWithPrivilegeLevels,
|
||||
getInvitedMembersWithPrivilegeLevelsFromFields,
|
||||
getMemberIdPrivilegeLevel,
|
||||
getInvitedEditCollaboratorCount,
|
||||
getInvitedPendingEditorCount,
|
||||
getProjectsUserIsMemberOf,
|
||||
dangerouslyGetAllProjectsUserIsMemberOf,
|
||||
isUserInvitedMemberOfProject,
|
||||
isUserInvitedReadWriteMemberOfProject,
|
||||
getPublicShareTokens,
|
||||
userIsTokenMember,
|
||||
userIsReadWriteTokenMember,
|
||||
getAllInvitedMembers,
|
||||
},
|
||||
}
|
||||
|
||||
async function getMemberIdsWithPrivilegeLevels(projectId) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
collaberator_refs: 1,
|
||||
readOnly_refs: 1,
|
||||
tokenAccessReadOnly_refs: 1,
|
||||
tokenAccessReadAndWrite_refs: 1,
|
||||
publicAccesLevel: 1,
|
||||
pendingEditor_refs: 1,
|
||||
reviewer_refs: 1,
|
||||
pendingReviewer_refs: 1,
|
||||
})
|
||||
if (!project) {
|
||||
throw new Errors.NotFoundError(`no project found with id ${projectId}`)
|
||||
}
|
||||
const memberIds = _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||
project.owner_ref,
|
||||
project.collaberator_refs,
|
||||
project.readOnly_refs,
|
||||
project.tokenAccessReadAndWrite_refs,
|
||||
project.tokenAccessReadOnly_refs,
|
||||
project.publicAccesLevel,
|
||||
project.pendingEditor_refs,
|
||||
project.reviewer_refs,
|
||||
project.pendingReviewer_refs
|
||||
)
|
||||
return memberIds
|
||||
}
|
||||
|
||||
async function getMemberIds(projectId) {
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
return members.map(m => m.id)
|
||||
}
|
||||
|
||||
async function getInvitedMemberIds(projectId) {
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
return members.filter(m => m.source !== Sources.TOKEN).map(m => m.id)
|
||||
}
|
||||
|
||||
async function getInvitedMembersWithPrivilegeLevels(projectId) {
|
||||
let members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
members = members.filter(m => m.source !== Sources.TOKEN)
|
||||
return _loadMembers(members)
|
||||
}
|
||||
|
||||
async function getInvitedMembersWithPrivilegeLevelsFromFields(
|
||||
ownerId,
|
||||
collaboratorIds,
|
||||
readOnlyIds,
|
||||
reviewerIds
|
||||
) {
|
||||
const members = _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||
ownerId,
|
||||
collaboratorIds,
|
||||
readOnlyIds,
|
||||
[],
|
||||
[],
|
||||
null,
|
||||
[],
|
||||
reviewerIds,
|
||||
[]
|
||||
)
|
||||
return _loadMembers(members)
|
||||
}
|
||||
|
||||
async function getMemberIdPrivilegeLevel(userId, projectId) {
|
||||
// In future if the schema changes and getting all member ids is more expensive (multiple documents)
|
||||
// then optimise this.
|
||||
if (userId == null) {
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
for (const member of members) {
|
||||
if (member.id === userId.toString()) {
|
||||
return member.privilegeLevel
|
||||
}
|
||||
}
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
|
||||
async function getInvitedEditCollaboratorCount(projectId) {
|
||||
// Counts invited members with editor or reviewer roles
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
return members.filter(
|
||||
m =>
|
||||
m.source === Sources.INVITE &&
|
||||
(m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE ||
|
||||
m.privilegeLevel === PrivilegeLevels.REVIEW)
|
||||
).length
|
||||
}
|
||||
|
||||
async function getInvitedPendingEditorCount(projectId) {
|
||||
// Only counts invited members that are readonly pending editors or pending
|
||||
// reviewers
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
return members.filter(
|
||||
m =>
|
||||
m.source === Sources.INVITE &&
|
||||
m.privilegeLevel === PrivilegeLevels.READ_ONLY &&
|
||||
(m.pendingEditor || m.pendingReviewer)
|
||||
).length
|
||||
}
|
||||
|
||||
async function isUserInvitedMemberOfProject(userId, projectId) {
|
||||
if (!userId) {
|
||||
return false
|
||||
}
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
for (const member of members) {
|
||||
if (
|
||||
member.id.toString() === userId.toString() &&
|
||||
member.source !== Sources.TOKEN
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async function isUserInvitedReadWriteMemberOfProject(userId, projectId) {
|
||||
if (!userId) {
|
||||
return false
|
||||
}
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
for (const member of members) {
|
||||
if (
|
||||
member.id.toString() === userId.toString() &&
|
||||
member.source !== Sources.TOKEN &&
|
||||
member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async function getPublicShareTokens(userId, projectId) {
|
||||
const memberInfo = await Project.findOne(
|
||||
{
|
||||
_id: projectId,
|
||||
},
|
||||
{
|
||||
isOwner: { $eq: ['$owner_ref', userId] },
|
||||
hasTokenReadOnlyAccess: {
|
||||
$and: [
|
||||
{ $in: [userId, '$tokenAccessReadOnly_refs'] },
|
||||
{ $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] },
|
||||
],
|
||||
},
|
||||
tokens: 1,
|
||||
}
|
||||
)
|
||||
.lean()
|
||||
.exec()
|
||||
|
||||
if (!memberInfo) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (memberInfo.isOwner) {
|
||||
return memberInfo.tokens
|
||||
} else if (memberInfo.hasTokenReadOnlyAccess) {
|
||||
return {
|
||||
readOnly: memberInfo.tokens.readOnly,
|
||||
}
|
||||
} else {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
// This function returns all the projects that a user currently has access to,
|
||||
// excluding projects where the user is listed in the token access fields when
|
||||
// token access has been disabled.
|
||||
async function getProjectsUserIsMemberOf(userId, fields) {
|
||||
const limit = pLimit(2)
|
||||
const [readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly] =
|
||||
await Promise.all([
|
||||
limit(() => Project.find({ collaberator_refs: userId }, fields).exec()),
|
||||
limit(() => Project.find({ reviewer_refs: userId }, fields).exec()),
|
||||
limit(() => Project.find({ readOnly_refs: userId }, fields).exec()),
|
||||
limit(() =>
|
||||
Project.find(
|
||||
{
|
||||
tokenAccessReadAndWrite_refs: userId,
|
||||
publicAccesLevel: PublicAccessLevels.TOKEN_BASED,
|
||||
},
|
||||
fields
|
||||
).exec()
|
||||
),
|
||||
limit(() =>
|
||||
Project.find(
|
||||
{
|
||||
tokenAccessReadOnly_refs: userId,
|
||||
publicAccesLevel: PublicAccessLevels.TOKEN_BASED,
|
||||
},
|
||||
fields
|
||||
).exec()
|
||||
),
|
||||
])
|
||||
return { readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly }
|
||||
}
|
||||
|
||||
// This function returns all the projects that a user is a member of, regardless of
|
||||
// the current state of the project, so it includes those projects where token access
|
||||
// has been disabled.
|
||||
async function dangerouslyGetAllProjectsUserIsMemberOf(userId, fields) {
|
||||
const readAndWrite = await Project.find(
|
||||
{ collaberator_refs: userId },
|
||||
fields
|
||||
).exec()
|
||||
const readOnly = await Project.find({ readOnly_refs: userId }, fields).exec()
|
||||
const tokenReadAndWrite = await Project.find(
|
||||
{ tokenAccessReadAndWrite_refs: userId },
|
||||
fields
|
||||
).exec()
|
||||
const tokenReadOnly = await Project.find(
|
||||
{ tokenAccessReadOnly_refs: userId },
|
||||
fields
|
||||
).exec()
|
||||
return { readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly }
|
||||
}
|
||||
|
||||
async function getAllInvitedMembers(projectId) {
|
||||
try {
|
||||
const rawMembers = await getInvitedMembersWithPrivilegeLevels(projectId)
|
||||
const { members } =
|
||||
ProjectEditorHandler.buildOwnerAndMembersViews(rawMembers)
|
||||
return members
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error getting members for project', { projectId })
|
||||
}
|
||||
}
|
||||
|
||||
async function userIsTokenMember(userId, projectId) {
|
||||
userId = new ObjectId(userId.toString())
|
||||
projectId = new ObjectId(projectId.toString())
|
||||
const project = await Project.findOne(
|
||||
{
|
||||
_id: projectId,
|
||||
$or: [
|
||||
{ tokenAccessReadOnly_refs: userId },
|
||||
{ tokenAccessReadAndWrite_refs: userId },
|
||||
],
|
||||
},
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
).exec()
|
||||
return project != null
|
||||
}
|
||||
|
||||
async function userIsReadWriteTokenMember(userId, projectId) {
|
||||
userId = new ObjectId(userId.toString())
|
||||
projectId = new ObjectId(projectId.toString())
|
||||
const project = await Project.findOne(
|
||||
{
|
||||
_id: projectId,
|
||||
tokenAccessReadAndWrite_refs: userId,
|
||||
},
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
).exec()
|
||||
return project != null
|
||||
}
|
||||
|
||||
function _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||
ownerId,
|
||||
collaboratorIds,
|
||||
readOnlyIds,
|
||||
tokenAccessIds,
|
||||
tokenAccessReadOnlyIds,
|
||||
publicAccessLevel,
|
||||
pendingEditorIds,
|
||||
reviewerIds,
|
||||
pendingReviewerIds
|
||||
) {
|
||||
const members = []
|
||||
members.push({
|
||||
id: ownerId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.OWNER,
|
||||
source: Sources.OWNER,
|
||||
})
|
||||
|
||||
for (const memberId of collaboratorIds || []) {
|
||||
members.push({
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.READ_AND_WRITE,
|
||||
source: Sources.INVITE,
|
||||
})
|
||||
}
|
||||
|
||||
for (const memberId of readOnlyIds || []) {
|
||||
const record = {
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.READ_ONLY,
|
||||
source: Sources.INVITE,
|
||||
}
|
||||
|
||||
if (pendingEditorIds?.some(pe => memberId.equals(pe))) {
|
||||
record.pendingEditor = true
|
||||
} else if (pendingReviewerIds?.some(pr => memberId.equals(pr))) {
|
||||
record.pendingReviewer = true
|
||||
}
|
||||
members.push(record)
|
||||
}
|
||||
|
||||
if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||
for (const memberId of tokenAccessIds || []) {
|
||||
members.push({
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.READ_AND_WRITE,
|
||||
source: Sources.TOKEN,
|
||||
})
|
||||
}
|
||||
for (const memberId of tokenAccessReadOnlyIds || []) {
|
||||
members.push({
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.READ_ONLY,
|
||||
source: Sources.TOKEN,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for (const memberId of reviewerIds || []) {
|
||||
members.push({
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.REVIEW,
|
||||
source: Sources.INVITE,
|
||||
})
|
||||
}
|
||||
return members
|
||||
}
|
||||
|
||||
async function _loadMembers(members) {
|
||||
const limit = pLimit(3)
|
||||
const results = await Promise.all(
|
||||
members.map(member =>
|
||||
limit(async () => {
|
||||
const user = await UserGetter.promises.getUser(member.id, {
|
||||
_id: 1,
|
||||
email: 1,
|
||||
features: 1,
|
||||
first_name: 1,
|
||||
last_name: 1,
|
||||
signUpDate: 1,
|
||||
})
|
||||
if (user != null) {
|
||||
const record = {
|
||||
user,
|
||||
privilegeLevel: member.privilegeLevel,
|
||||
}
|
||||
if (member.pendingEditor) {
|
||||
record.pendingEditor = true
|
||||
} else if (member.pendingReviewer) {
|
||||
record.pendingReviewer = true
|
||||
}
|
||||
return record
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
return results.filter(r => r != null)
|
||||
}
|
||||
@@ -0,0 +1,468 @@
|
||||
const { callbackify } = require('util')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { Project } = require('../../models/Project')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const ProjectHelper = require('../Project/ProjectHelper')
|
||||
const logger = require('@overleaf/logger')
|
||||
const ContactManager = require('../Contacts/ContactManager')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
|
||||
const CollaboratorsGetter = require('./CollaboratorsGetter')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender')
|
||||
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
||||
|
||||
module.exports = {
|
||||
userIsTokenMember: callbackify(userIsTokenMember),
|
||||
removeUserFromProject: callbackify(removeUserFromProject),
|
||||
removeUserFromAllProjects: callbackify(removeUserFromAllProjects),
|
||||
addUserIdToProject: callbackify(addUserIdToProject),
|
||||
transferProjects: callbackify(transferProjects),
|
||||
promises: {
|
||||
userIsTokenMember,
|
||||
removeUserFromProject,
|
||||
removeUserFromAllProjects,
|
||||
addUserIdToProject,
|
||||
transferProjects,
|
||||
setCollaboratorPrivilegeLevel,
|
||||
convertTrackChangesToExplicitFormat,
|
||||
},
|
||||
}
|
||||
// Forces null pendingReviewer_refs, readOnly_refs, and reviewer_refs to
|
||||
// be empty arrays to avoid errors during $pull ops
|
||||
// See https://github.com/overleaf/internal/issues/24610
|
||||
async function fixNullCollaboratorRefs(projectId) {
|
||||
// Temporary cleanup for the case where pendingReviewer_refs is null
|
||||
await Project.updateOne(
|
||||
{ _id: projectId, pendingReviewer_refs: { $type: 'null' } },
|
||||
{ $set: { pendingReviewer_refs: [] } }
|
||||
).exec()
|
||||
|
||||
// Temporary cleanup for the case where readOnly_refs is null
|
||||
await Project.updateOne(
|
||||
{ _id: projectId, readOnly_refs: { $type: 'null' } },
|
||||
{ $set: { readOnly_refs: [] } }
|
||||
).exec()
|
||||
|
||||
// Temporary cleanup for the case where reviewer_refs is null
|
||||
await Project.updateOne(
|
||||
{ _id: projectId, reviewer_refs: { $type: 'null' } },
|
||||
{ $set: { reviewer_refs: [] } }
|
||||
).exec()
|
||||
}
|
||||
|
||||
async function removeUserFromProject(projectId, userId) {
|
||||
try {
|
||||
const project = await Project.findOne({ _id: projectId }).exec()
|
||||
|
||||
await fixNullCollaboratorRefs(projectId)
|
||||
|
||||
// Deal with the old type of boolean value for archived
|
||||
// In order to clear it
|
||||
if (typeof project.archived === 'boolean') {
|
||||
let archived = ProjectHelper.calculateArchivedArray(
|
||||
project,
|
||||
userId,
|
||||
'ARCHIVE'
|
||||
)
|
||||
|
||||
archived = archived.filter(id => id.toString() !== userId.toString())
|
||||
|
||||
await Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{
|
||||
$set: { archived },
|
||||
$pull: {
|
||||
collaberator_refs: userId,
|
||||
reviewer_refs: userId,
|
||||
readOnly_refs: userId,
|
||||
pendingEditor_refs: userId,
|
||||
pendingReviewer_refs: userId,
|
||||
tokenAccessReadOnly_refs: userId,
|
||||
tokenAccessReadAndWrite_refs: userId,
|
||||
trashed: userId,
|
||||
},
|
||||
}
|
||||
)
|
||||
} else {
|
||||
await Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{
|
||||
$pull: {
|
||||
collaberator_refs: userId,
|
||||
readOnly_refs: userId,
|
||||
reviewer_refs: userId,
|
||||
pendingEditor_refs: userId,
|
||||
pendingReviewer_refs: userId,
|
||||
tokenAccessReadOnly_refs: userId,
|
||||
tokenAccessReadAndWrite_refs: userId,
|
||||
archived: userId,
|
||||
trashed: userId,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'problem removing user from project collaborators', {
|
||||
projectId,
|
||||
userId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function removeUserFromAllProjects(userId) {
|
||||
const { readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly } =
|
||||
await CollaboratorsGetter.promises.dangerouslyGetAllProjectsUserIsMemberOf(
|
||||
userId,
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
const allProjects = readAndWrite
|
||||
.concat(readOnly)
|
||||
.concat(tokenReadAndWrite)
|
||||
.concat(tokenReadOnly)
|
||||
for (const project of allProjects) {
|
||||
await removeUserFromProject(project._id, userId)
|
||||
}
|
||||
}
|
||||
|
||||
async function addUserIdToProject(
|
||||
projectId,
|
||||
addingUserId,
|
||||
userId,
|
||||
privilegeLevel,
|
||||
{ pendingEditor, pendingReviewer } = {}
|
||||
) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
name: 1,
|
||||
collaberator_refs: 1,
|
||||
readOnly_refs: 1,
|
||||
reviewer_refs: 1,
|
||||
track_changes: 1,
|
||||
})
|
||||
let level
|
||||
let existingUsers = project.collaberator_refs || []
|
||||
existingUsers = existingUsers.concat(project.readOnly_refs || [])
|
||||
existingUsers = existingUsers.map(u => u.toString())
|
||||
if (existingUsers.includes(userId.toString())) {
|
||||
return // User already in Project
|
||||
}
|
||||
if (privilegeLevel === PrivilegeLevels.READ_AND_WRITE) {
|
||||
level = { collaberator_refs: userId }
|
||||
logger.debug(
|
||||
{ privileges: 'readAndWrite', userId, projectId },
|
||||
'adding user'
|
||||
)
|
||||
} else if (privilegeLevel === PrivilegeLevels.READ_ONLY) {
|
||||
level = { readOnly_refs: userId }
|
||||
if (pendingEditor) {
|
||||
level.pendingEditor_refs = userId
|
||||
} else if (pendingReviewer) {
|
||||
level.pendingReviewer_refs = userId
|
||||
}
|
||||
logger.debug(
|
||||
{
|
||||
privileges: 'readOnly',
|
||||
userId,
|
||||
projectId,
|
||||
pendingEditor,
|
||||
pendingReviewer,
|
||||
},
|
||||
'adding user'
|
||||
)
|
||||
} else if (privilegeLevel === PrivilegeLevels.REVIEW) {
|
||||
level = { reviewer_refs: userId }
|
||||
logger.debug({ privileges: 'reviewer', userId, projectId }, 'adding user')
|
||||
} else {
|
||||
throw new Error(`unknown privilegeLevel: ${privilegeLevel}`)
|
||||
}
|
||||
|
||||
if (addingUserId) {
|
||||
ContactManager.addContact(addingUserId, userId, () => {})
|
||||
}
|
||||
|
||||
if (privilegeLevel === PrivilegeLevels.REVIEW) {
|
||||
const trackChanges = await convertTrackChangesToExplicitFormat(
|
||||
projectId,
|
||||
project.track_changes
|
||||
)
|
||||
trackChanges[userId] = true
|
||||
|
||||
await Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{ track_changes: trackChanges, $addToSet: level }
|
||||
).exec()
|
||||
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'toggle-track-changes',
|
||||
trackChanges
|
||||
)
|
||||
} else {
|
||||
await Project.updateOne({ _id: projectId }, { $addToSet: level }).exec()
|
||||
}
|
||||
|
||||
// Ensure there is a dedicated folder for this "new" project.
|
||||
await TpdsUpdateSender.promises.createProject({
|
||||
projectId,
|
||||
projectName: project.name,
|
||||
ownerId: project.owner_ref,
|
||||
userId,
|
||||
})
|
||||
|
||||
// Flush to TPDS in background to add files to collaborator's Dropbox
|
||||
TpdsProjectFlusher.promises.flushProjectToTpds(projectId).catch(err => {
|
||||
logger.error(
|
||||
{ err, projectId, userId },
|
||||
'error flushing to TPDS after adding collaborator'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async function transferProjects(fromUserId, toUserId) {
|
||||
// Find all the projects this user is part of so we can flush them to TPDS
|
||||
const projects = await Project.find(
|
||||
{
|
||||
$or: [
|
||||
{ owner_ref: fromUserId },
|
||||
{ collaberator_refs: fromUserId },
|
||||
{ readOnly_refs: fromUserId },
|
||||
],
|
||||
},
|
||||
{ _id: 1 }
|
||||
).exec()
|
||||
const projectIds = projects.map(p => p._id)
|
||||
logger.debug({ projectIds, fromUserId, toUserId }, 'transferring projects')
|
||||
|
||||
await Project.updateMany(
|
||||
{ owner_ref: fromUserId },
|
||||
{ $set: { owner_ref: toUserId } }
|
||||
).exec()
|
||||
|
||||
await Project.updateMany(
|
||||
{ collaberator_refs: fromUserId },
|
||||
{
|
||||
$addToSet: { collaberator_refs: toUserId },
|
||||
}
|
||||
).exec()
|
||||
await Project.updateMany(
|
||||
{ collaberator_refs: fromUserId },
|
||||
{
|
||||
$pull: { collaberator_refs: fromUserId },
|
||||
}
|
||||
).exec()
|
||||
|
||||
await Project.updateMany(
|
||||
{ readOnly_refs: fromUserId },
|
||||
{
|
||||
$addToSet: { readOnly_refs: toUserId },
|
||||
}
|
||||
).exec()
|
||||
await Project.updateMany(
|
||||
{ readOnly_refs: fromUserId },
|
||||
{
|
||||
$pull: { readOnly_refs: fromUserId },
|
||||
}
|
||||
).exec()
|
||||
|
||||
await Project.updateMany(
|
||||
{ pendingEditor_refs: fromUserId },
|
||||
{
|
||||
$addToSet: { pendingEditor_refs: toUserId },
|
||||
}
|
||||
).exec()
|
||||
await Project.updateMany(
|
||||
{ pendingEditor_refs: fromUserId },
|
||||
{
|
||||
$pull: { pendingEditor_refs: fromUserId },
|
||||
}
|
||||
).exec()
|
||||
|
||||
await Project.updateMany(
|
||||
{ pendingReviewer_refs: fromUserId },
|
||||
{
|
||||
$addToSet: { pendingReviewer_refs: toUserId },
|
||||
}
|
||||
).exec()
|
||||
await Project.updateMany(
|
||||
{ pendingReviewer_refs: fromUserId },
|
||||
{
|
||||
$pull: { pendingReviewer_refs: fromUserId },
|
||||
}
|
||||
).exec()
|
||||
|
||||
// Flush in background, no need to block on this
|
||||
_flushProjects(projectIds).catch(err => {
|
||||
logger.err(
|
||||
{ err, projectIds, fromUserId, toUserId },
|
||||
'error flushing tranferred projects to TPDS'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async function setCollaboratorPrivilegeLevel(
|
||||
projectId,
|
||||
userId,
|
||||
privilegeLevel,
|
||||
{ pendingEditor, pendingReviewer } = {}
|
||||
) {
|
||||
// Make sure we're only updating the project if the user is already a
|
||||
// collaborator
|
||||
const query = {
|
||||
_id: projectId,
|
||||
$or: [
|
||||
{ collaberator_refs: userId },
|
||||
{ readOnly_refs: userId },
|
||||
{ reviewer_refs: userId },
|
||||
],
|
||||
}
|
||||
let update
|
||||
|
||||
await fixNullCollaboratorRefs(projectId)
|
||||
|
||||
switch (privilegeLevel) {
|
||||
case PrivilegeLevels.READ_AND_WRITE: {
|
||||
update = {
|
||||
$pull: {
|
||||
readOnly_refs: userId,
|
||||
pendingEditor_refs: userId,
|
||||
reviewer_refs: userId,
|
||||
pendingReviewer_refs: userId,
|
||||
},
|
||||
$addToSet: { collaberator_refs: userId },
|
||||
}
|
||||
break
|
||||
}
|
||||
case PrivilegeLevels.REVIEW: {
|
||||
update = {
|
||||
$pull: {
|
||||
readOnly_refs: userId,
|
||||
pendingEditor_refs: userId,
|
||||
collaberator_refs: userId,
|
||||
pendingReviewer_refs: userId,
|
||||
},
|
||||
$addToSet: { reviewer_refs: userId },
|
||||
}
|
||||
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
track_changes: true,
|
||||
})
|
||||
const newTrackChangesState = await convertTrackChangesToExplicitFormat(
|
||||
projectId,
|
||||
project.track_changes
|
||||
)
|
||||
if (newTrackChangesState[userId] !== true) {
|
||||
newTrackChangesState[userId] = true
|
||||
}
|
||||
if (typeof project.track_changes === 'object') {
|
||||
update.$set = { [`track_changes.${userId}`]: true }
|
||||
} else {
|
||||
update.$set = { track_changes: newTrackChangesState }
|
||||
}
|
||||
break
|
||||
}
|
||||
case PrivilegeLevels.READ_ONLY: {
|
||||
update = {
|
||||
$pull: { collaberator_refs: userId, reviewer_refs: userId },
|
||||
$addToSet: { readOnly_refs: userId },
|
||||
}
|
||||
|
||||
if (pendingEditor) {
|
||||
update.$addToSet.pendingEditor_refs = userId
|
||||
} else {
|
||||
update.$pull.pendingEditor_refs = userId
|
||||
}
|
||||
|
||||
if (pendingReviewer) {
|
||||
update.$addToSet.pendingReviewer_refs = userId
|
||||
} else {
|
||||
update.$pull.pendingReviewer_refs = userId
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
default: {
|
||||
throw new OError(`unknown privilege level: ${privilegeLevel}`)
|
||||
}
|
||||
}
|
||||
const mongoResponse = await Project.updateOne(query, update).exec()
|
||||
if (mongoResponse.matchedCount === 0) {
|
||||
throw new Errors.NotFoundError('project or collaborator not found')
|
||||
}
|
||||
|
||||
if (update.$set?.track_changes) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'toggle-track-changes',
|
||||
update.$set.track_changes
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function userIsTokenMember(userId, projectId) {
|
||||
if (!userId) {
|
||||
return false
|
||||
}
|
||||
try {
|
||||
const project = await Project.findOne(
|
||||
{
|
||||
_id: projectId,
|
||||
$or: [
|
||||
{ tokenAccessReadOnly_refs: userId },
|
||||
{ tokenAccessReadAndWrite_refs: userId },
|
||||
],
|
||||
},
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
return project != null
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'problem while checking if user is token member', {
|
||||
userId,
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function _flushProjects(projectIds) {
|
||||
for (const projectId of projectIds) {
|
||||
await TpdsProjectFlusher.promises.flushProjectToTpds(projectId)
|
||||
}
|
||||
}
|
||||
|
||||
async function convertTrackChangesToExplicitFormat(
|
||||
projectId,
|
||||
trackChangesState
|
||||
) {
|
||||
if (typeof trackChangesState === 'object') {
|
||||
return { ...trackChangesState }
|
||||
}
|
||||
|
||||
if (trackChangesState === true) {
|
||||
// track changes are enabled for all
|
||||
const members =
|
||||
await CollaboratorsGetter.promises.getMemberIdsWithPrivilegeLevels(
|
||||
projectId
|
||||
)
|
||||
|
||||
const newTrackChangesState = {}
|
||||
for (const { id, privilegeLevel } of members) {
|
||||
if (
|
||||
[
|
||||
PrivilegeLevels.OWNER,
|
||||
PrivilegeLevels.READ_AND_WRITE,
|
||||
PrivilegeLevels.REVIEW,
|
||||
].includes(privilegeLevel)
|
||||
) {
|
||||
newTrackChangesState[id] = true
|
||||
}
|
||||
}
|
||||
|
||||
return newTrackChangesState
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
@@ -0,0 +1,399 @@
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import LimitationsManager from '../Subscription/LimitationsManager.js'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import CollaboratorsGetter from './CollaboratorsGetter.js'
|
||||
import CollaboratorsInviteHandler from './CollaboratorsInviteHandler.mjs'
|
||||
import CollaboratorsInviteGetter from './CollaboratorsInviteGetter.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import Settings from '@overleaf/settings'
|
||||
import EmailHelper from '../Helpers/EmailHelper.js'
|
||||
import EditorRealTimeController from '../Editor/EditorRealTimeController.js'
|
||||
import AnalyticsManager from '../Analytics/AnalyticsManager.js'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import PrivilegeLevels from '../Authorization/PrivilegeLevels.js'
|
||||
|
||||
// This rate limiter allows a different number of requests depending on the
|
||||
// number of callaborators a user is allowed. This is implemented by providing
|
||||
// a number of points (P) and consuming c = floor(P / maxRequests) on each
|
||||
// request. We'd like (maxRequests + 1) requests to trigger the rate limit, so
|
||||
// one constrait that we have is that c * (maxRequests + 1) > P. This is
|
||||
// achieved if P = M^2 where M is the largest value possible for maxRequests.
|
||||
//
|
||||
// In the present case, we allow 10 requests per collaborator per 30 minutes,
|
||||
// with a maximum of 200 requests, so P = 200^2 = 40000.
|
||||
const RATE_LIMIT_POINTS = 40000
|
||||
const rateLimiter = new RateLimiter('invite-to-project-by-user-id', {
|
||||
points: RATE_LIMIT_POINTS,
|
||||
duration: 60 * 30,
|
||||
})
|
||||
|
||||
async function getAllInvites(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
logger.debug({ projectId }, 'getting all active invites for project')
|
||||
const invites =
|
||||
await CollaboratorsInviteGetter.promises.getAllInvites(projectId)
|
||||
res.json({ invites })
|
||||
}
|
||||
|
||||
async function _checkShouldInviteEmail(email) {
|
||||
if (Settings.restrictInvitesToExistingAccounts === true) {
|
||||
logger.debug({ email }, 'checking if user exists with this email')
|
||||
const user = await UserGetter.promises.getUserByAnyEmail(email, {
|
||||
_id: 1,
|
||||
})
|
||||
const userExists = user?._id != null
|
||||
return userExists
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
async function _checkRateLimit(userId) {
|
||||
let collabLimit =
|
||||
await LimitationsManager.promises.allowedNumberOfCollaboratorsForUser(
|
||||
userId
|
||||
)
|
||||
|
||||
if (collabLimit == null || collabLimit === 0) {
|
||||
collabLimit = 1
|
||||
} else if (collabLimit < 0 || collabLimit > 20) {
|
||||
collabLimit = 20
|
||||
}
|
||||
|
||||
// Consume enough points to hit the rate limit at 10 * collabLimit
|
||||
const maxRequests = 10 * collabLimit
|
||||
const points = Math.floor(RATE_LIMIT_POINTS / maxRequests)
|
||||
try {
|
||||
await rateLimiter.consume(userId, points, { method: 'userId' })
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
async function inviteToProject(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
let { email, privileges } = req.body
|
||||
const sendingUser = SessionManager.getSessionUser(req.session)
|
||||
const sendingUserId = sendingUser._id
|
||||
req.logger.addFields({ email, sendingUserId })
|
||||
|
||||
if (email === sendingUser.email) {
|
||||
logger.debug(
|
||||
{ projectId, email, sendingUserId },
|
||||
'cannot invite yourself to project'
|
||||
)
|
||||
return res.json({ invite: null, error: 'cannot_invite_self' })
|
||||
}
|
||||
|
||||
logger.debug({ projectId, email, sendingUserId }, 'inviting to project')
|
||||
|
||||
let allowed = false
|
||||
// can always invite read-only collaborators
|
||||
if (privileges === PrivilegeLevels.READ_ONLY) {
|
||||
allowed = true
|
||||
} else {
|
||||
allowed = await LimitationsManager.promises.canAddXEditCollaborators(
|
||||
projectId,
|
||||
1
|
||||
)
|
||||
}
|
||||
|
||||
if (!allowed) {
|
||||
logger.debug(
|
||||
{ projectId, email, sendingUserId },
|
||||
'not allowed to invite more users to project'
|
||||
)
|
||||
return res.json({ invite: null })
|
||||
}
|
||||
|
||||
email = EmailHelper.parseEmail(email, true)
|
||||
if (email == null || email === '') {
|
||||
logger.debug({ projectId, email, sendingUserId }, 'invalid email address')
|
||||
return res.status(400).json({ errorReason: 'invalid_email' })
|
||||
}
|
||||
|
||||
const underRateLimit =
|
||||
await CollaboratorsInviteController._checkRateLimit(sendingUserId)
|
||||
if (!underRateLimit) {
|
||||
return res.sendStatus(429)
|
||||
}
|
||||
|
||||
const shouldAllowInvite =
|
||||
await CollaboratorsInviteController._checkShouldInviteEmail(email)
|
||||
if (!shouldAllowInvite) {
|
||||
logger.debug(
|
||||
{ email, projectId, sendingUserId },
|
||||
'not allowed to send an invite to this email address'
|
||||
)
|
||||
return res.json({
|
||||
invite: null,
|
||||
error: 'cannot_invite_non_user',
|
||||
})
|
||||
}
|
||||
|
||||
const invite = await CollaboratorsInviteHandler.promises.inviteToProject(
|
||||
projectId,
|
||||
sendingUser,
|
||||
email,
|
||||
privileges
|
||||
)
|
||||
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'send-invite',
|
||||
sendingUserId,
|
||||
req.ip,
|
||||
{
|
||||
inviteId: invite._id,
|
||||
privileges,
|
||||
}
|
||||
)
|
||||
|
||||
logger.debug({ projectId, email, sendingUserId }, 'invite created')
|
||||
|
||||
EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', {
|
||||
invites: true,
|
||||
})
|
||||
res.json({ invite })
|
||||
}
|
||||
async function revokeInvite(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const inviteId = req.params.invite_id
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
|
||||
logger.debug({ projectId, inviteId }, 'revoking invite')
|
||||
|
||||
const invite = await CollaboratorsInviteHandler.promises.revokeInvite(
|
||||
projectId,
|
||||
inviteId
|
||||
)
|
||||
|
||||
if (invite != null) {
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'revoke-invite',
|
||||
user._id,
|
||||
req.ip,
|
||||
{
|
||||
inviteId: invite._id,
|
||||
privileges: invite.privileges,
|
||||
}
|
||||
)
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'project:membership:changed',
|
||||
{ invites: true }
|
||||
)
|
||||
}
|
||||
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function generateNewInvite(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const inviteId = req.params.invite_id
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
|
||||
logger.debug({ projectId, inviteId }, 'resending invite')
|
||||
const sendingUser = SessionManager.getSessionUser(req.session)
|
||||
const underRateLimit = await CollaboratorsInviteController._checkRateLimit(
|
||||
sendingUser._id
|
||||
)
|
||||
if (!underRateLimit) {
|
||||
return res.sendStatus(429)
|
||||
}
|
||||
|
||||
const invite = await CollaboratorsInviteHandler.promises.generateNewInvite(
|
||||
projectId,
|
||||
sendingUser,
|
||||
inviteId
|
||||
)
|
||||
|
||||
EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', {
|
||||
invites: true,
|
||||
})
|
||||
|
||||
if (invite != null) {
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'resend-invite',
|
||||
user._id,
|
||||
req.ip,
|
||||
{
|
||||
inviteId: invite._id,
|
||||
privileges: invite.privileges,
|
||||
}
|
||||
)
|
||||
|
||||
res.sendStatus(201)
|
||||
} else {
|
||||
res.sendStatus(404)
|
||||
}
|
||||
}
|
||||
|
||||
async function viewInvite(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const { token } = req.params
|
||||
const _renderInvalidPage = function () {
|
||||
res.status(404)
|
||||
logger.debug({ projectId }, 'invite not valid, rendering not-valid page')
|
||||
res.render('project/invite/not-valid', { title: 'Invalid Invite' })
|
||||
}
|
||||
|
||||
// check if the user is already a member of the project
|
||||
const currentUser = SessionManager.getSessionUser(req.session)
|
||||
if (currentUser) {
|
||||
const isMember =
|
||||
await CollaboratorsGetter.promises.isUserInvitedMemberOfProject(
|
||||
currentUser._id,
|
||||
projectId
|
||||
)
|
||||
if (isMember) {
|
||||
logger.debug(
|
||||
{ projectId, userId: currentUser._id },
|
||||
'user is already a member of this project, redirecting'
|
||||
)
|
||||
return res.redirect(`/project/${projectId}`)
|
||||
}
|
||||
}
|
||||
|
||||
// get the invite
|
||||
const invite = await CollaboratorsInviteGetter.promises.getInviteByToken(
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
|
||||
// check if invite is gone, or otherwise non-existent
|
||||
if (invite == null) {
|
||||
logger.debug({ projectId }, 'no invite found for this token')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
|
||||
// check the user who sent the invite exists
|
||||
const owner = await UserGetter.promises.getUser(
|
||||
{ _id: invite.sendingUserId },
|
||||
{ email: 1, first_name: 1, last_name: 1 }
|
||||
)
|
||||
if (owner == null) {
|
||||
logger.debug({ projectId }, 'no project owner found')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
|
||||
// fetch the project name
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
name: 1,
|
||||
})
|
||||
if (project == null) {
|
||||
logger.debug({ projectId }, 'no project found')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
|
||||
if (!currentUser) {
|
||||
req.session.sharedProjectData = {
|
||||
project_name: project.name,
|
||||
user_first_name: owner.first_name,
|
||||
}
|
||||
AuthenticationController.setRedirectInSession(req)
|
||||
return res.redirect('/register')
|
||||
}
|
||||
|
||||
// cleanup if set for register page
|
||||
delete req.session.sharedProjectData
|
||||
|
||||
// finally render the invite
|
||||
res.render('project/invite/show', {
|
||||
invite,
|
||||
token,
|
||||
project,
|
||||
owner,
|
||||
title: 'Project Invite',
|
||||
})
|
||||
}
|
||||
|
||||
async function acceptInvite(req, res) {
|
||||
const { Project_id: projectId, token } = req.params
|
||||
const currentUser = SessionManager.getSessionUser(req.session)
|
||||
logger.debug(
|
||||
{ projectId, userId: currentUser._id },
|
||||
'got request to accept invite'
|
||||
)
|
||||
|
||||
const invite = await CollaboratorsInviteGetter.promises.getInviteByToken(
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
|
||||
if (invite == null) {
|
||||
throw new Errors.NotFoundError('no matching invite found')
|
||||
}
|
||||
|
||||
await ProjectAuditLogHandler.promises.addEntry(
|
||||
projectId,
|
||||
'accept-invite',
|
||||
currentUser._id,
|
||||
req.ip,
|
||||
{
|
||||
inviteId: invite._id,
|
||||
privileges: invite.privileges,
|
||||
}
|
||||
)
|
||||
|
||||
await CollaboratorsInviteHandler.promises.acceptInvite(
|
||||
invite,
|
||||
projectId,
|
||||
currentUser
|
||||
)
|
||||
|
||||
await EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'project:membership:changed',
|
||||
{ invites: true, members: true }
|
||||
)
|
||||
|
||||
let editMode = 'edit'
|
||||
if (invite.privileges === PrivilegeLevels.REVIEW) {
|
||||
editMode = 'review'
|
||||
} else if (invite.privileges === PrivilegeLevels.READ_ONLY) {
|
||||
editMode = 'view'
|
||||
}
|
||||
AnalyticsManager.recordEventForUserInBackground(
|
||||
currentUser._id,
|
||||
'project-joined',
|
||||
{
|
||||
projectId,
|
||||
ownerId: invite.sendingUserId, // only owner can invite others
|
||||
mode: editMode,
|
||||
role: invite.privileges,
|
||||
source: 'email-invite',
|
||||
}
|
||||
)
|
||||
|
||||
if (req.xhr) {
|
||||
res.sendStatus(204) // Done async via project page notification
|
||||
} else {
|
||||
res.redirect(`/project/${projectId}`)
|
||||
}
|
||||
}
|
||||
|
||||
const CollaboratorsInviteController = {
|
||||
getAllInvites: expressify(getAllInvites),
|
||||
inviteToProject: expressify(inviteToProject),
|
||||
revokeInvite: expressify(revokeInvite),
|
||||
generateNewInvite: expressify(generateNewInvite),
|
||||
viewInvite: expressify(viewInvite),
|
||||
acceptInvite: expressify(acceptInvite),
|
||||
_checkShouldInviteEmail,
|
||||
_checkRateLimit,
|
||||
}
|
||||
|
||||
export default CollaboratorsInviteController
|
||||
@@ -0,0 +1,48 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const { ProjectInvite } = require('../../models/ProjectInvite')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
const CollaboratorsInviteHelper = require('./CollaboratorsInviteHelper')
|
||||
|
||||
async function getAllInvites(projectId) {
|
||||
logger.debug({ projectId }, 'fetching invites for project')
|
||||
const invites = await ProjectInvite.find({ projectId })
|
||||
.select('_id email privileges')
|
||||
.exec()
|
||||
logger.debug(
|
||||
{ projectId, count: invites.length },
|
||||
'found invites for project'
|
||||
)
|
||||
return invites
|
||||
}
|
||||
|
||||
async function getEditInviteCount(projectId) {
|
||||
logger.debug({ projectId }, 'counting edit invites for project')
|
||||
const count = await ProjectInvite.countDocuments({
|
||||
projectId,
|
||||
privileges: { $ne: PrivilegeLevels.READ_ONLY },
|
||||
}).exec()
|
||||
return count
|
||||
}
|
||||
|
||||
async function getInviteByToken(projectId, tokenString) {
|
||||
logger.debug({ projectId }, 'fetching invite by token')
|
||||
const invite = await ProjectInvite.findOne({
|
||||
projectId,
|
||||
tokenHmac: CollaboratorsInviteHelper.hashInviteToken(tokenString),
|
||||
}).exec()
|
||||
|
||||
if (invite == null) {
|
||||
logger.err({ projectId }, 'no invite found')
|
||||
return null
|
||||
}
|
||||
|
||||
return invite
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
promises: {
|
||||
getAllInvites,
|
||||
getEditInviteCount,
|
||||
getInviteByToken,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,234 @@
|
||||
import { callbackify } from 'node:util'
|
||||
import { ProjectInvite } from '../../models/ProjectInvite.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import CollaboratorsEmailHandler from './CollaboratorsEmailHandler.mjs'
|
||||
import CollaboratorsHandler from './CollaboratorsHandler.js'
|
||||
import CollaboratorsInviteGetter from './CollaboratorsInviteGetter.js'
|
||||
import CollaboratorsInviteHelper from './CollaboratorsInviteHelper.js'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import NotificationsBuilder from '../Notifications/NotificationsBuilder.js'
|
||||
import PrivilegeLevels from '../Authorization/PrivilegeLevels.js'
|
||||
import LimitationsManager from '../Subscription/LimitationsManager.js'
|
||||
import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js'
|
||||
import _ from 'lodash'
|
||||
|
||||
const CollaboratorsInviteHandler = {
|
||||
async _trySendInviteNotification(projectId, sendingUser, invite) {
|
||||
const { email } = invite
|
||||
const existingUser = await UserGetter.promises.getUserByAnyEmail(email, {
|
||||
_id: 1,
|
||||
})
|
||||
if (existingUser == null) {
|
||||
logger.debug({ projectId, email }, 'no existing user found, returning')
|
||||
return null
|
||||
}
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
_id: 1,
|
||||
name: 1,
|
||||
})
|
||||
if (project == null) {
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'no project found while sending notification, returning'
|
||||
)
|
||||
return null
|
||||
}
|
||||
await NotificationsBuilder.promises
|
||||
.projectInvite(invite, project, sendingUser, existingUser)
|
||||
.create()
|
||||
},
|
||||
|
||||
async _tryCancelInviteNotification(inviteId) {
|
||||
return await NotificationsBuilder.promises
|
||||
.projectInvite({ _id: inviteId }, null, null, null)
|
||||
.read()
|
||||
},
|
||||
|
||||
async _sendMessages(projectId, sendingUser, invite) {
|
||||
const { email } = invite
|
||||
logger.debug(
|
||||
{ projectId, email, inviteId: invite._id },
|
||||
'sending notification and email for invite'
|
||||
)
|
||||
const notificationJob =
|
||||
CollaboratorsInviteHandler._trySendInviteNotification(
|
||||
projectId,
|
||||
sendingUser,
|
||||
invite
|
||||
).catch(err => {
|
||||
logger.err(
|
||||
{ err, projectId, email },
|
||||
'error sending notification for invite'
|
||||
)
|
||||
})
|
||||
CollaboratorsEmailHandler.promises
|
||||
.notifyUserOfProjectInvite(projectId, invite.email, invite, sendingUser)
|
||||
.catch(err => {
|
||||
logger.err({ err, projectId, email }, 'error sending email for invite')
|
||||
})
|
||||
await notificationJob
|
||||
},
|
||||
|
||||
async inviteToProject(projectId, sendingUser, email, privileges) {
|
||||
logger.debug(
|
||||
{ projectId, sendingUserId: sendingUser._id, email, privileges },
|
||||
'adding invite'
|
||||
)
|
||||
const token = CollaboratorsInviteHelper.generateToken()
|
||||
const tokenHmac = CollaboratorsInviteHelper.hashInviteToken(token)
|
||||
let invite = new ProjectInvite({
|
||||
email,
|
||||
tokenHmac,
|
||||
sendingUserId: sendingUser._id,
|
||||
projectId,
|
||||
privileges,
|
||||
})
|
||||
invite = await invite.save()
|
||||
invite = invite.toObject()
|
||||
|
||||
// Send notification and email
|
||||
await CollaboratorsInviteHandler._sendMessages(projectId, sendingUser, {
|
||||
...invite,
|
||||
token,
|
||||
})
|
||||
|
||||
return _.pick(invite, ['_id', 'email', 'privileges'])
|
||||
},
|
||||
|
||||
async revokeInviteForUser(projectId, targetEmails) {
|
||||
logger.debug({ projectId }, 'getting all active invites for project')
|
||||
const invites =
|
||||
await CollaboratorsInviteGetter.promises.getAllInvites(projectId)
|
||||
const matchingInvite = invites.find(invite =>
|
||||
targetEmails.some(emailData => emailData.email === invite.email)
|
||||
)
|
||||
if (matchingInvite) {
|
||||
await CollaboratorsInviteHandler.revokeInvite(
|
||||
projectId,
|
||||
matchingInvite._id
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
async revokeInvite(projectId, inviteId) {
|
||||
logger.debug({ projectId, inviteId }, 'removing invite')
|
||||
const invite = await ProjectInvite.findOneAndDelete({
|
||||
projectId,
|
||||
_id: inviteId,
|
||||
}).exec()
|
||||
CollaboratorsInviteHandler._tryCancelInviteNotification(inviteId).catch(
|
||||
err => {
|
||||
logger.err(
|
||||
{ err, projectId, inviteId },
|
||||
'failed to cancel invite notification'
|
||||
)
|
||||
}
|
||||
)
|
||||
return invite
|
||||
},
|
||||
|
||||
async generateNewInvite(projectId, sendingUser, inviteId) {
|
||||
logger.debug({ projectId, inviteId }, 'generating new invite email')
|
||||
const invite = await this.revokeInvite(projectId, inviteId)
|
||||
|
||||
if (invite == null) {
|
||||
logger.warn(
|
||||
{ projectId, inviteId },
|
||||
'no invite found, nothing to generate'
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
return await this.inviteToProject(
|
||||
projectId,
|
||||
sendingUser,
|
||||
invite.email,
|
||||
invite.privileges
|
||||
)
|
||||
},
|
||||
|
||||
async acceptInvite(invite, projectId, user) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
})
|
||||
|
||||
let privilegeLevel = invite.privileges
|
||||
const opts = {}
|
||||
if (
|
||||
[PrivilegeLevels.READ_AND_WRITE, PrivilegeLevels.REVIEW].includes(
|
||||
invite.privileges
|
||||
)
|
||||
) {
|
||||
const allowed =
|
||||
await LimitationsManager.promises.canAcceptEditCollaboratorInvite(
|
||||
project._id
|
||||
)
|
||||
if (!allowed) {
|
||||
privilegeLevel = PrivilegeLevels.READ_ONLY
|
||||
if (invite.privileges === PrivilegeLevels.READ_AND_WRITE) {
|
||||
opts.pendingEditor = true
|
||||
} else if (invite.privileges === PrivilegeLevels.REVIEW) {
|
||||
opts.pendingReviewer = true
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
{ projectId, userId: user._id, privileges: invite.privileges },
|
||||
'no collaborator slots available, user added as read only (pending editor)'
|
||||
)
|
||||
await ProjectAuditLogHandler.promises.addEntry(
|
||||
projectId,
|
||||
'editor-moved-to-pending', // controller already logged accept-invite
|
||||
null,
|
||||
null,
|
||||
{
|
||||
userId: user._id.toString(),
|
||||
role:
|
||||
invite.privileges === PrivilegeLevels.REVIEW
|
||||
? 'reviewer'
|
||||
: 'editor',
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
await CollaboratorsHandler.promises.addUserIdToProject(
|
||||
projectId,
|
||||
invite.sendingUserId,
|
||||
user._id,
|
||||
privilegeLevel,
|
||||
opts
|
||||
)
|
||||
|
||||
// Remove invite
|
||||
const inviteId = invite._id
|
||||
logger.debug({ projectId, inviteId }, 'removing invite')
|
||||
await ProjectInvite.deleteOne({ _id: inviteId }).exec()
|
||||
CollaboratorsInviteHandler._tryCancelInviteNotification(inviteId).catch(
|
||||
err => {
|
||||
logger.error(
|
||||
{ err, projectId, inviteId },
|
||||
'failed to cancel invite notification'
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
export default {
|
||||
promises: CollaboratorsInviteHandler,
|
||||
inviteToProject: callbackify(CollaboratorsInviteHandler.inviteToProject),
|
||||
revokeInviteForUser: callbackify(
|
||||
CollaboratorsInviteHandler.revokeInviteForUser
|
||||
),
|
||||
revokeInvite: callbackify(CollaboratorsInviteHandler.revokeInvite),
|
||||
generateNewInvite: callbackify(CollaboratorsInviteHandler.generateNewInvite),
|
||||
acceptInvite: callbackify(CollaboratorsInviteHandler.acceptInvite),
|
||||
_trySendInviteNotification: callbackify(
|
||||
CollaboratorsInviteHandler._trySendInviteNotification
|
||||
),
|
||||
_tryCancelInviteNotification: callbackify(
|
||||
CollaboratorsInviteHandler._tryCancelInviteNotification
|
||||
),
|
||||
_sendMessages: callbackify(CollaboratorsInviteHandler._sendMessages),
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
const Crypto = require('crypto')
|
||||
|
||||
function generateToken() {
|
||||
const buffer = Crypto.randomBytes(24)
|
||||
return buffer.toString('hex')
|
||||
}
|
||||
|
||||
function hashInviteToken(token) {
|
||||
return Crypto.createHmac('sha256', 'overleaf-token-invite')
|
||||
.update(token)
|
||||
.digest('hex')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
generateToken,
|
||||
hashInviteToken,
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
import CollaboratorsController from './CollaboratorsController.mjs'
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js'
|
||||
import PrivilegeLevels from '../Authorization/PrivilegeLevels.js'
|
||||
import CollaboratorsInviteController from './CollaboratorsInviteController.mjs'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js'
|
||||
import CaptchaMiddleware from '../Captcha/CaptchaMiddleware.js'
|
||||
import AnalyticsRegistrationSourceMiddleware from '../Analytics/AnalyticsRegistrationSourceMiddleware.js'
|
||||
import { Joi, validate } from '../../infrastructure/Validation.js'
|
||||
|
||||
const rateLimiters = {
|
||||
inviteToProjectByProjectId: new RateLimiter(
|
||||
'invite-to-project-by-project-id',
|
||||
{ points: 100, duration: 60 * 10 }
|
||||
),
|
||||
inviteToProjectByIp: new RateLimiter('invite-to-project-by-ip', {
|
||||
points: 100,
|
||||
duration: 60 * 10,
|
||||
}),
|
||||
resendInvite: new RateLimiter('resend-invite', {
|
||||
points: 200,
|
||||
duration: 60 * 10,
|
||||
}),
|
||||
getProjectTokens: new RateLimiter('get-project-tokens', {
|
||||
points: 200,
|
||||
duration: 60 * 10,
|
||||
}),
|
||||
viewProjectInvite: new RateLimiter('view-project-invite', {
|
||||
points: 20,
|
||||
duration: 60,
|
||||
}),
|
||||
}
|
||||
|
||||
export default {
|
||||
apply(webRouter) {
|
||||
webRouter.post(
|
||||
'/project/:Project_id/leave',
|
||||
AuthenticationController.requireLogin(),
|
||||
CollaboratorsController.removeSelfFromProject
|
||||
)
|
||||
|
||||
webRouter.put(
|
||||
'/project/:Project_id/users/:user_id',
|
||||
AuthenticationController.requireLogin(),
|
||||
validate({
|
||||
params: Joi.object({
|
||||
Project_id: Joi.objectId(),
|
||||
user_id: Joi.objectId(),
|
||||
}),
|
||||
body: Joi.object({
|
||||
privilegeLevel: Joi.string()
|
||||
.valid(
|
||||
PrivilegeLevels.READ_ONLY,
|
||||
PrivilegeLevels.READ_AND_WRITE,
|
||||
PrivilegeLevels.REVIEW
|
||||
)
|
||||
.required(),
|
||||
}),
|
||||
}),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsController.setCollaboratorInfo
|
||||
)
|
||||
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/users/:user_id',
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsController.removeUserFromProject
|
||||
)
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/members',
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
CollaboratorsController.getAllMembers
|
||||
)
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/transfer-ownership',
|
||||
AuthenticationController.requireLogin(),
|
||||
validate({
|
||||
params: Joi.object({
|
||||
Project_id: Joi.objectId(),
|
||||
}),
|
||||
body: Joi.object({
|
||||
user_id: Joi.objectId(),
|
||||
}),
|
||||
}),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsController.transferOwnership
|
||||
)
|
||||
|
||||
// invites
|
||||
webRouter.post(
|
||||
'/project/:Project_id/invite',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.inviteToProjectByProjectId, {
|
||||
params: ['Project_id'],
|
||||
}),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.inviteToProjectByIp, {
|
||||
ipOnly: true,
|
||||
}),
|
||||
CaptchaMiddleware.validateCaptcha('invite'),
|
||||
AuthenticationController.requireLogin(),
|
||||
validate({
|
||||
body: Joi.object({
|
||||
email: Joi.string().required(),
|
||||
privileges: Joi.string()
|
||||
.valid(
|
||||
PrivilegeLevels.READ_ONLY,
|
||||
PrivilegeLevels.READ_AND_WRITE,
|
||||
PrivilegeLevels.REVIEW
|
||||
)
|
||||
.required(),
|
||||
}),
|
||||
}),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsInviteController.inviteToProject
|
||||
)
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/invites',
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsInviteController.getAllInvites
|
||||
)
|
||||
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/invite/:invite_id',
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsInviteController.revokeInvite
|
||||
)
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/invite/:invite_id/resend',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.resendInvite, {
|
||||
params: ['Project_id'],
|
||||
}),
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsInviteController.generateNewInvite
|
||||
)
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/invite/token/:token',
|
||||
AnalyticsRegistrationSourceMiddleware.setSource(
|
||||
'collaboration',
|
||||
'project-invite'
|
||||
),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.viewProjectInvite),
|
||||
CollaboratorsInviteController.viewInvite,
|
||||
AnalyticsRegistrationSourceMiddleware.clearSource()
|
||||
)
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/invite/token/:token/accept',
|
||||
AnalyticsRegistrationSourceMiddleware.setSource(
|
||||
'collaboration',
|
||||
'project-invite'
|
||||
),
|
||||
AuthenticationController.requireLogin(),
|
||||
CollaboratorsInviteController.acceptInvite,
|
||||
AnalyticsRegistrationSourceMiddleware.clearSource()
|
||||
)
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/tokens',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens),
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
CollaboratorsController.getShareTokens
|
||||
)
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const { Project } = require('../../models/Project')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const CollaboratorsHandler = require('./CollaboratorsHandler')
|
||||
const EmailHandler = require('../Email/EmailHandler')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
|
||||
const ProjectAuditLogHandler = require('../Project/ProjectAuditLogHandler')
|
||||
const AnalyticsManager = require('../Analytics/AnalyticsManager')
|
||||
|
||||
module.exports = {
|
||||
promises: { transferOwnership },
|
||||
}
|
||||
|
||||
async function transferOwnership(projectId, newOwnerId, options = {}) {
|
||||
const {
|
||||
allowTransferToNonCollaborators,
|
||||
sessionUserId,
|
||||
skipEmails,
|
||||
ipAddress,
|
||||
} = options
|
||||
|
||||
// Fetch project and user
|
||||
const [project, newOwner] = await Promise.all([
|
||||
_getProject(projectId),
|
||||
_getUser(newOwnerId),
|
||||
])
|
||||
|
||||
// Exit early if the transferee is already the project owner
|
||||
const previousOwnerId = project.owner_ref
|
||||
if (previousOwnerId.equals(newOwnerId)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check that user is already a collaborator
|
||||
if (
|
||||
!allowTransferToNonCollaborators &&
|
||||
!_userIsCollaborator(newOwner, project)
|
||||
) {
|
||||
throw new Errors.UserNotCollaboratorError({ info: { userId: newOwnerId } })
|
||||
}
|
||||
|
||||
// Track the change of ownership in BigQuery.
|
||||
AnalyticsManager.recordEventForUserInBackground(
|
||||
previousOwnerId,
|
||||
'project-ownership-transfer',
|
||||
{ projectId, newOwnerId }
|
||||
)
|
||||
|
||||
// Transfer ownership
|
||||
await ProjectAuditLogHandler.promises.addEntry(
|
||||
projectId,
|
||||
'transfer-ownership',
|
||||
sessionUserId,
|
||||
ipAddress,
|
||||
{ previousOwnerId, newOwnerId }
|
||||
)
|
||||
|
||||
// Determine which permissions to give old owner based on
|
||||
// new owner's existing permissions
|
||||
const newPermissions =
|
||||
_getUserPermissions(newOwner, project) || PrivilegeLevels.READ_ONLY
|
||||
|
||||
await _transferOwnership(
|
||||
projectId,
|
||||
previousOwnerId,
|
||||
newOwnerId,
|
||||
newPermissions
|
||||
)
|
||||
|
||||
// Flush project to TPDS
|
||||
await TpdsProjectFlusher.promises.flushProjectToTpds(projectId)
|
||||
|
||||
// Send confirmation emails
|
||||
const previousOwner = await UserGetter.promises.getUser(previousOwnerId)
|
||||
if (!skipEmails) {
|
||||
await _sendEmails(project, previousOwner, newOwner)
|
||||
}
|
||||
}
|
||||
|
||||
async function _getProject(projectId) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
collaberator_refs: 1,
|
||||
readOnly_refs: 1,
|
||||
name: 1,
|
||||
})
|
||||
if (project == null) {
|
||||
throw new Errors.ProjectNotFoundError({ info: { projectId } })
|
||||
}
|
||||
return project
|
||||
}
|
||||
|
||||
async function _getUser(userId) {
|
||||
const user = await UserGetter.promises.getUser(userId)
|
||||
if (user == null) {
|
||||
throw new Errors.UserNotFoundError({ info: { userId } })
|
||||
}
|
||||
return user
|
||||
}
|
||||
|
||||
function _getUserPermissions(user, project) {
|
||||
const collaboratorIds = project.collaberator_refs || []
|
||||
const readOnlyIds = project.readOnly_refs || []
|
||||
if (collaboratorIds.some(collaboratorId => collaboratorId.equals(user._id))) {
|
||||
return PrivilegeLevels.READ_AND_WRITE
|
||||
} else if (
|
||||
readOnlyIds.some(collaboratorId => collaboratorId.equals(user._id))
|
||||
) {
|
||||
return PrivilegeLevels.READ_ONLY
|
||||
}
|
||||
}
|
||||
|
||||
function _userIsCollaborator(user, project) {
|
||||
return Boolean(_getUserPermissions(user, project))
|
||||
}
|
||||
|
||||
async function _transferOwnership(
|
||||
projectId,
|
||||
previousOwnerId,
|
||||
newOwnerId,
|
||||
newPermissions
|
||||
) {
|
||||
await CollaboratorsHandler.promises.removeUserFromProject(
|
||||
projectId,
|
||||
newOwnerId
|
||||
)
|
||||
await Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{ $set: { owner_ref: newOwnerId } }
|
||||
).exec()
|
||||
await CollaboratorsHandler.promises.addUserIdToProject(
|
||||
projectId,
|
||||
newOwnerId,
|
||||
previousOwnerId,
|
||||
newPermissions
|
||||
)
|
||||
}
|
||||
|
||||
async function _sendEmails(project, previousOwner, newOwner) {
|
||||
if (previousOwner == null) {
|
||||
// The previous owner didn't exist. This is not supposed to happen, but
|
||||
// since we're changing the owner anyway, we'll just warn
|
||||
logger.warn(
|
||||
{ projectId: project._id, ownerId: previousOwner._id },
|
||||
'Project owner did not exist before ownership transfer'
|
||||
)
|
||||
} else {
|
||||
// Send confirmation emails
|
||||
await Promise.all([
|
||||
EmailHandler.promises.sendEmail(
|
||||
'ownershipTransferConfirmationPreviousOwner',
|
||||
{
|
||||
to: previousOwner.email,
|
||||
project,
|
||||
newOwner,
|
||||
}
|
||||
),
|
||||
EmailHandler.promises.sendEmail('ownershipTransferConfirmationNewOwner', {
|
||||
to: newOwner.email,
|
||||
project,
|
||||
previousOwner,
|
||||
}),
|
||||
])
|
||||
}
|
||||
}
|
||||
193
services/web/app/src/Features/Compile/ClsiCacheController.js
Normal file
193
services/web/app/src/Features/Compile/ClsiCacheController.js
Normal file
@@ -0,0 +1,193 @@
|
||||
const { NotFoundError } = require('../Errors/Errors')
|
||||
const {
|
||||
fetchStreamWithResponse,
|
||||
RequestFailedError,
|
||||
fetchJson,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const Path = require('path')
|
||||
const { pipeline } = require('stream/promises')
|
||||
const logger = require('@overleaf/logger')
|
||||
const ClsiCacheManager = require('./ClsiCacheManager')
|
||||
const CompileController = require('./CompileController')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const ClsiCacheHandler = require('./ClsiCacheHandler')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
|
||||
/**
|
||||
* Download a file from a specific build on the clsi-cache.
|
||||
*
|
||||
* @param req
|
||||
* @param res
|
||||
* @return {Promise<*>}
|
||||
*/
|
||||
async function downloadFromCache(req, res) {
|
||||
const { Project_id: projectId, buildId, filename } = req.params
|
||||
const userId = CompileController._getUserIdForCompile(req)
|
||||
const signal = AbortSignal.timeout(60 * 1000)
|
||||
let location, projectName
|
||||
try {
|
||||
;[{ location }, { name: projectName }] = await Promise.all([
|
||||
ClsiCacheHandler.getOutputFile(
|
||||
projectId,
|
||||
userId,
|
||||
buildId,
|
||||
filename,
|
||||
signal
|
||||
),
|
||||
ProjectGetter.promises.getProject(projectId, { name: 1 }),
|
||||
])
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
// res.sendStatus() sends a description of the status as body.
|
||||
// Using res.status().end() avoids sending that fake body.
|
||||
return res.status(404).end()
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
const { stream, response } = await fetchStreamWithResponse(location, {
|
||||
signal,
|
||||
})
|
||||
if (req.destroyed) {
|
||||
// The client has disconnected already, avoid trying to write into the broken connection.
|
||||
return
|
||||
}
|
||||
|
||||
for (const key of ['Content-Length', 'Content-Type']) {
|
||||
if (response.headers.has(key)) res.setHeader(key, response.headers.get(key))
|
||||
}
|
||||
const ext = Path.extname(filename)
|
||||
res.attachment(
|
||||
ext === '.pdf'
|
||||
? `${CompileController._getSafeProjectName({ name: projectName })}.pdf`
|
||||
: filename
|
||||
)
|
||||
try {
|
||||
res.writeHead(response.status)
|
||||
await pipeline(stream, res)
|
||||
} catch (err) {
|
||||
const reqAborted = Boolean(req.destroyed)
|
||||
const streamingStarted = Boolean(res.headersSent)
|
||||
if (!streamingStarted) {
|
||||
if (err instanceof RequestFailedError) {
|
||||
res.sendStatus(err.response.status)
|
||||
} else {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
if (
|
||||
streamingStarted &&
|
||||
reqAborted &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE'
|
||||
) {
|
||||
// Ignore noisy spurious error
|
||||
return
|
||||
}
|
||||
logger.warn(
|
||||
{
|
||||
err,
|
||||
projectId,
|
||||
location,
|
||||
filename,
|
||||
reqAborted,
|
||||
streamingStarted,
|
||||
},
|
||||
'CLSI-cache proxy error'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare a compile response from the clsi-cache.
|
||||
*
|
||||
* @param req
|
||||
* @param res
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function getLatestBuildFromCache(req, res) {
|
||||
const { Project_id: projectId } = req.params
|
||||
const userId = CompileController._getUserIdForCompile(req)
|
||||
try {
|
||||
const {
|
||||
internal: { location: metaLocation, zone },
|
||||
external: { isUpToDate, allFiles },
|
||||
} = await ClsiCacheManager.getLatestBuildFromCache(
|
||||
projectId,
|
||||
userId,
|
||||
'output.overleaf.json'
|
||||
)
|
||||
|
||||
if (!isUpToDate) return res.sendStatus(410)
|
||||
|
||||
const meta = await fetchJson(metaLocation, {
|
||||
signal: AbortSignal.timeout(5 * 1000),
|
||||
})
|
||||
|
||||
const [, editorId, buildId] = metaLocation.match(
|
||||
/\/build\/([a-f0-9-]+?)-([a-f0-9]+-[a-f0-9]+)\//
|
||||
)
|
||||
|
||||
let baseURL = `/project/${projectId}`
|
||||
if (userId) {
|
||||
baseURL += `/user/${userId}`
|
||||
}
|
||||
|
||||
const { ranges, contentId, clsiServerId, compileGroup, size, options } =
|
||||
meta
|
||||
|
||||
const outputFiles = allFiles
|
||||
.filter(
|
||||
path => path !== 'output.overleaf.json' && path !== 'output.tar.gz'
|
||||
)
|
||||
.map(path => {
|
||||
const f = {
|
||||
url: `${baseURL}/build/${editorId}-${buildId}/output/${path}`,
|
||||
downloadURL: `/download/project/${projectId}/build/${editorId}-${buildId}/output/cached/${path}`,
|
||||
build: buildId,
|
||||
path,
|
||||
type: path.split('.').pop(),
|
||||
}
|
||||
if (path === 'output.pdf') {
|
||||
Object.assign(f, {
|
||||
size,
|
||||
editorId,
|
||||
})
|
||||
if (clsiServerId !== 'cache') {
|
||||
// Enable PDF caching and attempt to download from VM first.
|
||||
// (clsi VMs do not have the editorId in the path on disk, omit it).
|
||||
Object.assign(f, {
|
||||
url: `${baseURL}/build/${buildId}/output/output.pdf`,
|
||||
ranges,
|
||||
contentId,
|
||||
})
|
||||
}
|
||||
}
|
||||
return f
|
||||
})
|
||||
let { pdfCachingMinChunkSize, pdfDownloadDomain } =
|
||||
await CompileController._getSplitTestOptions(req, res)
|
||||
pdfDownloadDomain += `/zone/${zone}`
|
||||
res.json({
|
||||
fromCache: true,
|
||||
status: 'success',
|
||||
outputFiles,
|
||||
compileGroup,
|
||||
clsiServerId,
|
||||
pdfDownloadDomain,
|
||||
pdfCachingMinChunkSize,
|
||||
options,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
res.sendStatus(404)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
downloadFromCache: expressify(downloadFromCache),
|
||||
getLatestBuildFromCache: expressify(getLatestBuildFromCache),
|
||||
}
|
||||
217
services/web/app/src/Features/Compile/ClsiCacheHandler.js
Normal file
217
services/web/app/src/Features/Compile/ClsiCacheHandler.js
Normal file
@@ -0,0 +1,217 @@
|
||||
const _ = require('lodash')
|
||||
const {
|
||||
fetchNothing,
|
||||
fetchRedirectWithResponse,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { NotFoundError, InvalidNameError } = require('../Errors/Errors')
|
||||
|
||||
function validateFilename(filename) {
|
||||
if (
|
||||
![
|
||||
'output.blg',
|
||||
'output.log',
|
||||
'output.pdf',
|
||||
'output.synctex.gz',
|
||||
'output.overleaf.json',
|
||||
'output.tar.gz',
|
||||
].includes(filename) ||
|
||||
filename.endsWith('.blg')
|
||||
) {
|
||||
throw new InvalidNameError('bad filename')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the cache on all clsi-cache instances.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function clearCache(projectId, userId) {
|
||||
let path = `/project/${projectId}`
|
||||
if (userId) {
|
||||
path += `/user/${userId}`
|
||||
}
|
||||
path += '/output'
|
||||
|
||||
await Promise.all(
|
||||
Settings.apis.clsiCache.instances.map(async ({ url, zone }) => {
|
||||
const u = new URL(url)
|
||||
u.pathname = path
|
||||
try {
|
||||
await fetchNothing(u, {
|
||||
method: 'DELETE',
|
||||
signal: AbortSignal.timeout(15_000),
|
||||
})
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'clear clsi-cache', { url, zone })
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an output file from a specific build.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param buildId
|
||||
* @param filename
|
||||
* @param signal
|
||||
* @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
|
||||
*/
|
||||
async function getOutputFile(
|
||||
projectId,
|
||||
userId,
|
||||
buildId,
|
||||
filename,
|
||||
signal = AbortSignal.timeout(15_000)
|
||||
) {
|
||||
validateFilename(filename)
|
||||
if (!/^[a-f0-9-]+$/.test(buildId)) {
|
||||
throw new InvalidNameError('bad buildId')
|
||||
}
|
||||
|
||||
let path = `/project/${projectId}`
|
||||
if (userId) {
|
||||
path += `/user/${userId}`
|
||||
}
|
||||
path += `/build/${buildId}/search/output/${filename}`
|
||||
return getRedirectWithFallback(projectId, userId, path, signal)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an output file from the most recent build.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param filename
|
||||
* @param signal
|
||||
* @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
|
||||
*/
|
||||
async function getLatestOutputFile(
|
||||
projectId,
|
||||
userId,
|
||||
filename,
|
||||
signal = AbortSignal.timeout(15_000)
|
||||
) {
|
||||
validateFilename(filename)
|
||||
|
||||
let path = `/project/${projectId}`
|
||||
if (userId) {
|
||||
path += `/user/${userId}`
|
||||
}
|
||||
path += `/latest/output/${filename}`
|
||||
return getRedirectWithFallback(projectId, userId, path, signal)
|
||||
}
|
||||
|
||||
/**
|
||||
* Request the given path from any of the clsi-cache instances.
|
||||
*
|
||||
* Some of them might be down temporarily. Try the next one until we receive a redirect or 404.
|
||||
*
|
||||
* This function is similar to the Coordinator in the clsi-cache, notable differences:
|
||||
* - all the logic for sorting builds is in clsi-cache (re-used by clsi and web)
|
||||
* - fan-out (1 client performs lookup on many clsi-cache instances) is "central" in clsi-cache, resulting in better connection re-use
|
||||
* - we only cross the k8s cluster boundary via an internal GCLB once ($$$)
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param path
|
||||
* @param signal
|
||||
* @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
|
||||
*/
|
||||
async function getRedirectWithFallback(
|
||||
projectId,
|
||||
userId,
|
||||
path,
|
||||
signal = AbortSignal.timeout(15_000)
|
||||
) {
|
||||
// Avoid hitting the same instance first all the time.
|
||||
const instances = _.shuffle(Settings.apis.clsiCache.instances)
|
||||
for (const { url, zone } of instances) {
|
||||
const u = new URL(url)
|
||||
u.pathname = path
|
||||
try {
|
||||
const {
|
||||
location,
|
||||
response: { headers },
|
||||
} = await fetchRedirectWithResponse(u, {
|
||||
signal,
|
||||
})
|
||||
// Success, return the cache entry.
|
||||
return {
|
||||
location,
|
||||
zone: headers.get('X-Zone'),
|
||||
lastModified: new Date(headers.get('X-Last-Modified')),
|
||||
size: parseInt(headers.get('X-Content-Length'), 10),
|
||||
allFiles: JSON.parse(headers.get('X-All-Files')),
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
break // No clsi-cache instance has cached something for this project/user.
|
||||
}
|
||||
logger.warn(
|
||||
{ err, projectId, userId, url, zone },
|
||||
'getLatestOutputFile from clsi-cache failed'
|
||||
)
|
||||
// This clsi-cache instance is down, try the next backend.
|
||||
}
|
||||
}
|
||||
throw new NotFoundError('nothing cached yet')
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate the clsi-cache for the given project/user with the provided source
|
||||
*
|
||||
* This is either another project, or a template (id+version).
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param sourceProjectId
|
||||
* @param templateId
|
||||
* @param templateVersionId
|
||||
* @param lastUpdated
|
||||
* @param zone
|
||||
* @param signal
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function prepareCacheSource(
|
||||
projectId,
|
||||
userId,
|
||||
{ sourceProjectId, templateId, templateVersionId, lastUpdated, zone, signal }
|
||||
) {
|
||||
const url = new URL(
|
||||
`/project/${projectId}/user/${userId}/import-from`,
|
||||
Settings.apis.clsiCache.instances.find(i => i.zone === zone).url
|
||||
)
|
||||
try {
|
||||
await fetchNothing(url, {
|
||||
method: 'POST',
|
||||
json: {
|
||||
sourceProjectId,
|
||||
lastUpdated,
|
||||
templateId,
|
||||
templateVersionId,
|
||||
},
|
||||
signal,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
throw new NotFoundError()
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
clearCache,
|
||||
getOutputFile,
|
||||
getLatestOutputFile,
|
||||
prepareCacheSource,
|
||||
}
|
||||
106
services/web/app/src/Features/Compile/ClsiCacheManager.js
Normal file
106
services/web/app/src/Features/Compile/ClsiCacheManager.js
Normal file
@@ -0,0 +1,106 @@
|
||||
const { NotFoundError } = require('../Errors/Errors')
|
||||
const ClsiCacheHandler = require('./ClsiCacheHandler')
|
||||
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const SplitTestHandler = require('../SplitTests/SplitTestHandler')
|
||||
|
||||
/**
|
||||
* Get the most recent build and metadata
|
||||
*
|
||||
* Internal: internal metadata; External: fine to send to user as-is.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param filename
|
||||
* @param signal
|
||||
* @return {Promise<{internal: {zone: string, location: string}, external: {isUpToDate: boolean, lastUpdated: Date, size: number, allFiles: string[]}}>}
|
||||
*/
|
||||
async function getLatestBuildFromCache(projectId, userId, filename, signal) {
|
||||
const [
|
||||
{ location, lastModified: lastCompiled, zone, size, allFiles },
|
||||
lastUpdatedInRedis,
|
||||
{ lastUpdated: lastUpdatedInMongo },
|
||||
] = await Promise.all([
|
||||
ClsiCacheHandler.getLatestOutputFile(projectId, userId, filename, signal),
|
||||
DocumentUpdaterHandler.promises.getProjectLastUpdatedAt(projectId),
|
||||
ProjectGetter.promises.getProject(projectId, { lastUpdated: 1 }),
|
||||
])
|
||||
|
||||
const lastUpdated =
|
||||
lastUpdatedInRedis > lastUpdatedInMongo
|
||||
? lastUpdatedInRedis
|
||||
: lastUpdatedInMongo
|
||||
const isUpToDate = lastCompiled >= lastUpdated
|
||||
|
||||
return {
|
||||
internal: {
|
||||
location,
|
||||
zone,
|
||||
},
|
||||
external: {
|
||||
isUpToDate,
|
||||
lastUpdated,
|
||||
size,
|
||||
allFiles,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect metadata and prepare the clsi-cache for the given project.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param sourceProjectId
|
||||
* @param templateId
|
||||
* @param templateVersionId
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function prepareClsiCache(
|
||||
projectId,
|
||||
userId,
|
||||
{ sourceProjectId, templateId, templateVersionId }
|
||||
) {
|
||||
const { variant } = await SplitTestHandler.promises.getAssignmentForUser(
|
||||
userId,
|
||||
'copy-clsi-cache'
|
||||
)
|
||||
if (variant !== 'enabled') return
|
||||
const signal = AbortSignal.timeout(5_000)
|
||||
let lastUpdated
|
||||
let zone = 'b' // populate template data on zone b
|
||||
if (sourceProjectId) {
|
||||
try {
|
||||
;({
|
||||
internal: { zone },
|
||||
external: { lastUpdated },
|
||||
} = await getLatestBuildFromCache(
|
||||
sourceProjectId,
|
||||
userId,
|
||||
'output.tar.gz',
|
||||
signal
|
||||
))
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) return // nothing cached yet
|
||||
throw err
|
||||
}
|
||||
}
|
||||
try {
|
||||
await ClsiCacheHandler.prepareCacheSource(projectId, userId, {
|
||||
sourceProjectId,
|
||||
templateId,
|
||||
templateVersionId,
|
||||
zone,
|
||||
lastUpdated,
|
||||
signal,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) return // nothing cached yet/expired.
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLatestBuildFromCache,
|
||||
prepareClsiCache,
|
||||
}
|
||||
250
services/web/app/src/Features/Compile/ClsiCookieManager.js
Normal file
250
services/web/app/src/Features/Compile/ClsiCookieManager.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const { URL, URLSearchParams } = require('url')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const request = require('request').defaults({ timeout: 30 * 1000 })
|
||||
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||
const Cookie = require('cookie')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
|
||||
const clsiCookiesEnabled = (Settings.clsiCookie?.key ?? '') !== ''
|
||||
|
||||
const rclient = RedisWrapper.client('clsi_cookie')
|
||||
let rclientSecondary
|
||||
if (Settings.redis.clsi_cookie_secondary != null) {
|
||||
rclientSecondary = RedisWrapper.client('clsi_cookie_secondary')
|
||||
}
|
||||
|
||||
module.exports = function (backendGroup) {
|
||||
const cookieManager = {
|
||||
buildKey(projectId, userId) {
|
||||
if (backendGroup != null) {
|
||||
return `clsiserver:${backendGroup}:${projectId}:${userId}`
|
||||
} else {
|
||||
return `clsiserver:${projectId}:${userId}`
|
||||
}
|
||||
},
|
||||
|
||||
getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
) {
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback()
|
||||
}
|
||||
rclient.get(this.buildKey(projectId, userId), (err, serverId) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (serverId == null || serverId === '') {
|
||||
this._populateServerIdViaRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
)
|
||||
} else {
|
||||
callback(null, serverId)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_populateServerIdViaRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
) {
|
||||
const u = new URL(`${Settings.apis.clsi.url}/project/${projectId}/status`)
|
||||
u.search = new URLSearchParams({
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
}).toString()
|
||||
request.post(u.href, (err, res, body) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'error getting initial server id for project', {
|
||||
project_id: projectId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback()
|
||||
}
|
||||
const serverId = this._parseServerIdFromResponse(res)
|
||||
this.setServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
serverId,
|
||||
null,
|
||||
function (err) {
|
||||
if (err) {
|
||||
logger.warn(
|
||||
{ err, projectId },
|
||||
'error setting server id via populate request'
|
||||
)
|
||||
}
|
||||
callback(err, serverId)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
_parseServerIdFromResponse(response) {
|
||||
const cookies = Cookie.parse(response.headers['set-cookie']?.[0] || '')
|
||||
return cookies?.[Settings.clsiCookie.key]
|
||||
},
|
||||
|
||||
checkIsLoadSheddingEvent(clsiserverid, compileGroup, compileBackendClass) {
|
||||
request.get(
|
||||
{
|
||||
url: `${Settings.apis.clsi.url}/instance-state`,
|
||||
qs: { clsiserverid, compileGroup, compileBackendClass },
|
||||
},
|
||||
(err, res, body) => {
|
||||
if (err) {
|
||||
Metrics.inc('clsi-lb-switch-backend', 1, {
|
||||
status: 'error',
|
||||
})
|
||||
logger.warn({ err, clsiserverid }, 'cannot probe clsi VM')
|
||||
return
|
||||
}
|
||||
const isStillRunning =
|
||||
res.statusCode === 200 && body === `${clsiserverid},UP\n`
|
||||
Metrics.inc('clsi-lb-switch-backend', 1, {
|
||||
status: isStillRunning ? 'load-shedding' : 'cycle',
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_getTTLInSeconds(clsiServerId) {
|
||||
return (clsiServerId || '').includes('-reg-')
|
||||
? Settings.clsiCookie.ttlInSecondsRegular
|
||||
: Settings.clsiCookie.ttlInSeconds
|
||||
},
|
||||
|
||||
setServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
serverId,
|
||||
previous,
|
||||
callback
|
||||
) {
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback()
|
||||
}
|
||||
if (serverId == null) {
|
||||
// We don't get a cookie back if it hasn't changed
|
||||
return rclient.expire(
|
||||
this.buildKey(projectId, userId),
|
||||
this._getTTLInSeconds(previous),
|
||||
err => callback(err)
|
||||
)
|
||||
}
|
||||
if (!previous) {
|
||||
// Initial assignment of a user+project or after clearing cache.
|
||||
Metrics.inc('clsi-lb-assign-initial-backend')
|
||||
} else {
|
||||
this.checkIsLoadSheddingEvent(
|
||||
previous,
|
||||
compileGroup,
|
||||
compileBackendClass
|
||||
)
|
||||
}
|
||||
if (rclientSecondary != null) {
|
||||
this._setServerIdInRedis(
|
||||
rclientSecondary,
|
||||
projectId,
|
||||
userId,
|
||||
serverId,
|
||||
() => {}
|
||||
)
|
||||
}
|
||||
this._setServerIdInRedis(rclient, projectId, userId, serverId, err =>
|
||||
callback(err)
|
||||
)
|
||||
},
|
||||
|
||||
_setServerIdInRedis(rclient, projectId, userId, serverId, callback) {
|
||||
rclient.setex(
|
||||
this.buildKey(projectId, userId),
|
||||
this._getTTLInSeconds(serverId),
|
||||
serverId,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
clearServerId(projectId, userId, callback) {
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback()
|
||||
}
|
||||
rclient.del(this.buildKey(projectId, userId), err => {
|
||||
if (err) {
|
||||
// redis errors need wrapping as the instance may be shared
|
||||
return callback(
|
||||
new OError(
|
||||
'Failed to clear clsi persistence',
|
||||
{ projectId, userId },
|
||||
err
|
||||
)
|
||||
)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
getCookieJar(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
) {
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback(null, request.jar(), undefined)
|
||||
}
|
||||
this.getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
(err, serverId) => {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error getting server id', {
|
||||
project_id: projectId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
const serverCookie = request.cookie(
|
||||
`${Settings.clsiCookie.key}=${serverId}`
|
||||
)
|
||||
const jar = request.jar()
|
||||
jar.setCookie(serverCookie, Settings.apis.clsi.url)
|
||||
callback(null, jar, serverId)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
cookieManager.promises = promisifyAll(cookieManager, {
|
||||
without: [
|
||||
'_parseServerIdFromResponse',
|
||||
'checkIsLoadSheddingEvent',
|
||||
'_getTTLInSeconds',
|
||||
],
|
||||
multiResult: {
|
||||
getCookieJar: ['jar', 'clsiServerId'],
|
||||
},
|
||||
})
|
||||
return cookieManager
|
||||
}
|
||||
89
services/web/app/src/Features/Compile/ClsiFormatChecker.js
Normal file
89
services/web/app/src/Features/Compile/ClsiFormatChecker.js
Normal file
@@ -0,0 +1,89 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ClsiFormatChecker
|
||||
const _ = require('lodash')
|
||||
const async = require('async')
|
||||
const settings = require('@overleaf/settings')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
|
||||
module.exports = ClsiFormatChecker = {
|
||||
checkRecoursesForProblems(resources, callback) {
|
||||
const jobs = {
|
||||
conflictedPaths(cb) {
|
||||
return ClsiFormatChecker._checkForConflictingPaths(resources, cb)
|
||||
},
|
||||
|
||||
sizeCheck(cb) {
|
||||
return ClsiFormatChecker._checkDocsAreUnderSizeLimit(resources, cb)
|
||||
},
|
||||
}
|
||||
|
||||
return async.series(jobs, function (err, problems) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
problems = _.omitBy(problems, _.isEmpty)
|
||||
|
||||
if (_.isEmpty(problems)) {
|
||||
return callback()
|
||||
} else {
|
||||
return callback(null, problems)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_checkForConflictingPaths(resources, callback) {
|
||||
const paths = resources.map(resource => resource.path)
|
||||
|
||||
const conflicts = _.filter(paths, function (path) {
|
||||
const matchingPaths = _.filter(
|
||||
paths,
|
||||
checkPath => checkPath.indexOf(path + '/') !== -1
|
||||
)
|
||||
|
||||
return matchingPaths.length > 0
|
||||
})
|
||||
|
||||
const conflictObjects = conflicts.map(conflict => ({ path: conflict }))
|
||||
|
||||
return callback(null, conflictObjects)
|
||||
},
|
||||
|
||||
_checkDocsAreUnderSizeLimit(resources, callback) {
|
||||
const sizeLimit = 1000 * 1000 * settings.compileBodySizeLimitMb
|
||||
|
||||
let totalSize = 0
|
||||
|
||||
let sizedResources = resources.map(function (resource) {
|
||||
const result = { path: resource.path }
|
||||
if (resource.content != null) {
|
||||
result.size = resource.content.replace(/\n/g, '').length
|
||||
result.kbSize = Math.ceil(result.size / 1000)
|
||||
} else {
|
||||
result.size = 0
|
||||
}
|
||||
totalSize += result.size
|
||||
return result
|
||||
})
|
||||
|
||||
const tooLarge = totalSize > sizeLimit
|
||||
if (!tooLarge) {
|
||||
return callback()
|
||||
} else {
|
||||
sizedResources = _.sortBy(sizedResources, 'size').reverse().slice(0, 10)
|
||||
return callback(null, { resources: sizedResources, totalSize })
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
module.exports.promises = promisifyAll(module.exports)
|
||||
873
services/web/app/src/Features/Compile/ClsiManager.js
Normal file
873
services/web/app/src/Features/Compile/ClsiManager.js
Normal file
@@ -0,0 +1,873 @@
|
||||
const { callbackify } = require('util')
|
||||
const { callbackifyMultiResult } = require('@overleaf/promise-utils')
|
||||
const {
|
||||
fetchString,
|
||||
fetchStringWithResponse,
|
||||
fetchStream,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||
const logger = require('@overleaf/logger')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { Cookie } = require('tough-cookie')
|
||||
const ClsiCookieManager = require('./ClsiCookieManager')(
|
||||
Settings.apis.clsi?.backendGroupName
|
||||
)
|
||||
const Features = require('../../infrastructure/Features')
|
||||
const NewBackendCloudClsiCookieManager = require('./ClsiCookieManager')(
|
||||
Settings.apis.clsi_new?.backendGroupName
|
||||
)
|
||||
const ClsiStateManager = require('./ClsiStateManager')
|
||||
const _ = require('lodash')
|
||||
const ClsiFormatChecker = require('./ClsiFormatChecker')
|
||||
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const ClsiCacheHandler = require('./ClsiCacheHandler')
|
||||
const { getBlobLocation } = require('../History/HistoryManager')
|
||||
|
||||
const VALID_COMPILERS = ['pdflatex', 'latex', 'xelatex', 'lualatex']
|
||||
const OUTPUT_FILE_TIMEOUT_MS = 60000
|
||||
const CLSI_COOKIES_ENABLED = (Settings.clsiCookie?.key ?? '') !== ''
|
||||
|
||||
// The timeout in services/clsi/app.js is 10 minutes, so we'll be on the safe side with 12 minutes
|
||||
const COMPILE_REQUEST_TIMEOUT_MS = 12 * 60 * 1000
|
||||
|
||||
function collectMetricsOnBlgFiles(outputFiles) {
|
||||
let topLevel = 0
|
||||
let nested = 0
|
||||
for (const outputFile of outputFiles) {
|
||||
if (outputFile.type === 'blg') {
|
||||
if (outputFile.path.includes('/')) {
|
||||
nested++
|
||||
} else {
|
||||
topLevel++
|
||||
}
|
||||
}
|
||||
}
|
||||
Metrics.count('blg_output_file', topLevel, 1, { path: 'top-level' })
|
||||
Metrics.count('blg_output_file', nested, 1, { path: 'nested' })
|
||||
}
|
||||
|
||||
async function sendRequest(projectId, userId, options) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
let result = await sendRequestOnce(projectId, userId, options)
|
||||
if (result.status === 'conflict') {
|
||||
// Try again, with a full compile
|
||||
result = await sendRequestOnce(projectId, userId, {
|
||||
...options,
|
||||
syncType: 'full',
|
||||
})
|
||||
} else if (result.status === 'unavailable') {
|
||||
result = await sendRequestOnce(projectId, userId, {
|
||||
...options,
|
||||
syncType: 'full',
|
||||
forceNewClsiServer: true,
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async function sendRequestOnce(projectId, userId, options) {
|
||||
let req
|
||||
try {
|
||||
req = await _buildRequest(projectId, options)
|
||||
} catch (err) {
|
||||
if (err.message === 'no main file specified') {
|
||||
return {
|
||||
status: 'validation-problems',
|
||||
validationProblems: { mainFile: err.message },
|
||||
}
|
||||
} else {
|
||||
throw OError.tag(err, 'Could not build request to CLSI', {
|
||||
projectId,
|
||||
options,
|
||||
})
|
||||
}
|
||||
}
|
||||
return await _sendBuiltRequest(projectId, userId, req, options)
|
||||
}
|
||||
|
||||
// for public API requests where there is no project id
|
||||
async function sendExternalRequest(submissionId, clsiRequest, options) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
return await _sendBuiltRequest(submissionId, null, clsiRequest, options)
|
||||
}
|
||||
|
||||
async function stopCompile(projectId, userId, options) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
const { compileBackendClass, compileGroup } = options
|
||||
const url = _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId,
|
||||
'compile/stop'
|
||||
)
|
||||
const opts = { method: 'POST' }
|
||||
await _makeRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteAuxFiles(projectId, userId, options, clsiserverid) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
const { compileBackendClass, compileGroup } = options
|
||||
const url = _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId
|
||||
)
|
||||
const opts = {
|
||||
method: 'DELETE',
|
||||
}
|
||||
|
||||
try {
|
||||
await _makeRequestWithClsiServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts,
|
||||
clsiserverid
|
||||
)
|
||||
} finally {
|
||||
// always clear the clsi-cache
|
||||
try {
|
||||
await ClsiCacheHandler.clearCache(projectId, userId)
|
||||
} catch (err) {
|
||||
logger.warn({ err, projectId, userId }, 'purge clsi-cache failed')
|
||||
}
|
||||
|
||||
// always clear the project state from the docupdater, even if there
|
||||
// was a problem with the request to the clsi
|
||||
try {
|
||||
await DocumentUpdaterHandler.promises.clearProjectState(projectId)
|
||||
} finally {
|
||||
await ClsiCookieManager.promises.clearServerId(projectId, userId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function _sendBuiltRequest(projectId, userId, req, options, callback) {
|
||||
if (options.forceNewClsiServer) {
|
||||
await ClsiCookieManager.promises.clearServerId(projectId, userId)
|
||||
}
|
||||
const validationProblems =
|
||||
await ClsiFormatChecker.promises.checkRecoursesForProblems(
|
||||
req.compile?.resources
|
||||
)
|
||||
if (validationProblems != null) {
|
||||
logger.debug(
|
||||
{ projectId, validationProblems },
|
||||
'problems with users latex before compile was attempted'
|
||||
)
|
||||
return {
|
||||
status: 'validation-problems',
|
||||
validationProblems,
|
||||
}
|
||||
}
|
||||
|
||||
const { response, clsiServerId } = await _postToClsi(
|
||||
projectId,
|
||||
userId,
|
||||
req,
|
||||
options.compileBackendClass,
|
||||
options.compileGroup
|
||||
)
|
||||
|
||||
const outputFiles = _parseOutputFiles(
|
||||
projectId,
|
||||
response && response.compile && response.compile.outputFiles
|
||||
)
|
||||
collectMetricsOnBlgFiles(outputFiles)
|
||||
const compile = response?.compile || {}
|
||||
return {
|
||||
status: compile.status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
buildId: compile.buildId,
|
||||
stats: compile.stats,
|
||||
timings: compile.timings,
|
||||
outputUrlPrefix: compile.outputUrlPrefix,
|
||||
}
|
||||
}
|
||||
|
||||
async function _makeRequestWithClsiServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts,
|
||||
clsiserverid
|
||||
) {
|
||||
if (clsiserverid) {
|
||||
// ignore cookies and newBackend, go straight to the clsi node
|
||||
url.searchParams.set('compileGroup', compileGroup)
|
||||
url.searchParams.set('compileBackendClass', compileBackendClass)
|
||||
url.searchParams.set('clsiserverid', clsiserverid)
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchString(url, opts)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error making request to CLSI', {
|
||||
userId,
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(body)
|
||||
} catch (err) {
|
||||
// some responses are empty. Ignore JSON parsing errors.
|
||||
}
|
||||
|
||||
return { body: json }
|
||||
} else {
|
||||
return await _makeRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function _makeRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
) {
|
||||
const currentBackendStartTime = new Date()
|
||||
const clsiServerId = await ClsiCookieManager.promises.getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass
|
||||
)
|
||||
opts.headers = {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (CLSI_COOKIES_ENABLED) {
|
||||
const cookie = new Cookie({
|
||||
key: Settings.clsiCookie.key,
|
||||
value: clsiServerId,
|
||||
})
|
||||
opts.headers.Cookie = cookie.cookieString()
|
||||
}
|
||||
|
||||
const timer = new Metrics.Timer('compile.currentBackend')
|
||||
|
||||
let response, body
|
||||
try {
|
||||
;({ body, response } = await fetchStringWithResponse(url, opts))
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error making request to CLSI', {
|
||||
projectId,
|
||||
userId,
|
||||
})
|
||||
}
|
||||
|
||||
Metrics.inc(`compile.currentBackend.response.${response.status}`)
|
||||
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(body)
|
||||
} catch (err) {
|
||||
// some responses are empty. Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
timer.done()
|
||||
let newClsiServerId
|
||||
if (CLSI_COOKIES_ENABLED) {
|
||||
newClsiServerId = _getClsiServerIdFromResponse(response)
|
||||
await ClsiCookieManager.promises.setServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
newClsiServerId,
|
||||
clsiServerId
|
||||
)
|
||||
}
|
||||
const currentCompileTime = new Date() - currentBackendStartTime
|
||||
|
||||
// Start new backend request in the background
|
||||
const newBackendStartTime = new Date()
|
||||
_makeNewBackendRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
)
|
||||
.then(result => {
|
||||
if (result == null) {
|
||||
return
|
||||
}
|
||||
const { response: newBackendResponse } = result
|
||||
Metrics.inc(`compile.newBackend.response.${newBackendResponse.status}`)
|
||||
const newBackendCompileTime = new Date() - newBackendStartTime
|
||||
const currentStatusCode = response.status
|
||||
const newStatusCode = newBackendResponse.status
|
||||
const statusCodeSame = newStatusCode === currentStatusCode
|
||||
const timeDifference = newBackendCompileTime - currentCompileTime
|
||||
logger.debug(
|
||||
{
|
||||
statusCodeSame,
|
||||
timeDifference,
|
||||
currentCompileTime,
|
||||
newBackendCompileTime,
|
||||
projectId,
|
||||
},
|
||||
'both clsi requests returned'
|
||||
)
|
||||
})
|
||||
.catch(err => {
|
||||
logger.warn({ err }, 'Error making request to new CLSI backend')
|
||||
})
|
||||
|
||||
return {
|
||||
body: json,
|
||||
clsiServerId: newClsiServerId || clsiServerId,
|
||||
}
|
||||
}
|
||||
|
||||
async function _makeNewBackendRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
) {
|
||||
if (Settings.apis.clsi_new?.url == null) {
|
||||
return null
|
||||
}
|
||||
url = url
|
||||
.toString()
|
||||
.replace(Settings.apis.clsi.url, Settings.apis.clsi_new.url)
|
||||
|
||||
const clsiServerId =
|
||||
await NewBackendCloudClsiCookieManager.promises.getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass
|
||||
)
|
||||
opts.headers = {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (CLSI_COOKIES_ENABLED) {
|
||||
const cookie = new Cookie({
|
||||
key: Settings.clsiCookie.key,
|
||||
value: clsiServerId,
|
||||
})
|
||||
opts.headers.Cookie = cookie.cookieString()
|
||||
}
|
||||
|
||||
const timer = new Metrics.Timer('compile.newBackend')
|
||||
|
||||
let response, body
|
||||
try {
|
||||
;({ body, response } = await fetchStringWithResponse(url, opts))
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error making request to new CLSI', {
|
||||
userId,
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(body)
|
||||
} catch (err) {
|
||||
// Some responses are empty. Ignore JSON parsing errors
|
||||
}
|
||||
timer.done()
|
||||
if (CLSI_COOKIES_ENABLED) {
|
||||
const newClsiServerId = _getClsiServerIdFromResponse(response)
|
||||
await NewBackendCloudClsiCookieManager.promises.setServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
newClsiServerId,
|
||||
clsiServerId
|
||||
)
|
||||
}
|
||||
return { response, body: json }
|
||||
}
|
||||
|
||||
function _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId,
|
||||
action
|
||||
) {
|
||||
const u = new URL(`/project/${projectId}`, Settings.apis.clsi.url)
|
||||
if (userId != null) {
|
||||
u.pathname += `/user/${userId}`
|
||||
}
|
||||
if (action != null) {
|
||||
u.pathname += `/${action}`
|
||||
}
|
||||
u.searchParams.set('compileBackendClass', compileBackendClass)
|
||||
u.searchParams.set('compileGroup', compileGroup)
|
||||
return u
|
||||
}
|
||||
|
||||
async function _postToClsi(
|
||||
projectId,
|
||||
userId,
|
||||
req,
|
||||
compileBackendClass,
|
||||
compileGroup
|
||||
) {
|
||||
const url = _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId,
|
||||
'compile'
|
||||
)
|
||||
const opts = {
|
||||
json: req,
|
||||
method: 'POST',
|
||||
signal: AbortSignal.timeout(COMPILE_REQUEST_TIMEOUT_MS),
|
||||
}
|
||||
try {
|
||||
const { body, clsiServerId } = await _makeRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
)
|
||||
return { response: body, clsiServerId }
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError) {
|
||||
if (err.response.status === 413) {
|
||||
return { response: { compile: { status: 'project-too-large' } } }
|
||||
} else if (err.response.status === 409) {
|
||||
return { response: { compile: { status: 'conflict' } } }
|
||||
} else if (err.response.status === 423) {
|
||||
return { response: { compile: { status: 'compile-in-progress' } } }
|
||||
} else if (err.response.status === 503) {
|
||||
return { response: { compile: { status: 'unavailable' } } }
|
||||
} else {
|
||||
throw new OError(
|
||||
`CLSI returned non-success code: ${err.response.status}`,
|
||||
{
|
||||
projectId,
|
||||
userId,
|
||||
compileOptions: req.compile.options,
|
||||
rootResourcePath: req.compile.rootResourcePath,
|
||||
clsiResponse: err.body,
|
||||
statusCode: err.response.status,
|
||||
}
|
||||
)
|
||||
}
|
||||
} else {
|
||||
throw new OError(
|
||||
'failed to make request to CLSI',
|
||||
{
|
||||
projectId,
|
||||
userId,
|
||||
compileOptions: req.compile.options,
|
||||
rootResourcePath: req.compile.rootResourcePath,
|
||||
},
|
||||
err
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _parseOutputFiles(projectId, rawOutputFiles = []) {
|
||||
const outputFiles = []
|
||||
for (const file of rawOutputFiles) {
|
||||
const f = {
|
||||
path: file.path, // the clsi is now sending this to web
|
||||
url: new URL(file.url).pathname, // the location of the file on the clsi, excluding the host part
|
||||
type: file.type,
|
||||
build: file.build,
|
||||
}
|
||||
if (file.path === 'output.pdf') {
|
||||
f.contentId = file.contentId
|
||||
f.ranges = file.ranges || []
|
||||
f.size = file.size
|
||||
f.startXRefTable = file.startXRefTable
|
||||
f.createdAt = new Date()
|
||||
}
|
||||
outputFiles.push(f)
|
||||
}
|
||||
return outputFiles
|
||||
}
|
||||
|
||||
async function _buildRequest(projectId, options) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
compiler: 1,
|
||||
rootDoc_id: 1,
|
||||
imageName: 1,
|
||||
rootFolder: 1,
|
||||
'overleaf.history.id': 1,
|
||||
})
|
||||
if (project == null) {
|
||||
throw new Errors.NotFoundError(`project does not exist: ${projectId}`)
|
||||
}
|
||||
if (!VALID_COMPILERS.includes(project.compiler)) {
|
||||
project.compiler = 'pdflatex'
|
||||
}
|
||||
|
||||
if (options.incrementalCompilesEnabled || options.syncType != null) {
|
||||
// new way, either incremental or full
|
||||
const timer = new Metrics.Timer('editor.compile-getdocs-redis')
|
||||
let projectStateHash, docUpdaterDocs
|
||||
try {
|
||||
;({ projectStateHash, docs: docUpdaterDocs } =
|
||||
await getContentFromDocUpdaterIfMatch(projectId, project, options))
|
||||
} catch (err) {
|
||||
logger.error({ err, projectId }, 'error checking project state')
|
||||
// note: we don't bail out when there's an error getting
|
||||
// incremental files from the docupdater, we just fall back
|
||||
// to a normal compile below
|
||||
}
|
||||
timer.done()
|
||||
// see if we can send an incremental update to the CLSI
|
||||
if (docUpdaterDocs != null && options.syncType !== 'full') {
|
||||
Metrics.inc('compile-from-redis')
|
||||
return _buildRequestFromDocupdater(
|
||||
projectId,
|
||||
options,
|
||||
project,
|
||||
projectStateHash,
|
||||
docUpdaterDocs
|
||||
)
|
||||
} else {
|
||||
Metrics.inc('compile-from-mongo')
|
||||
return await _buildRequestFromMongo(
|
||||
projectId,
|
||||
options,
|
||||
project,
|
||||
projectStateHash
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// old way, always from mongo
|
||||
const timer = new Metrics.Timer('editor.compile-getdocs-mongo')
|
||||
const { docs, files } = await _getContentFromMongo(projectId)
|
||||
timer.done()
|
||||
return _finaliseRequest(projectId, options, project, docs, files)
|
||||
}
|
||||
}
|
||||
|
||||
async function getContentFromDocUpdaterIfMatch(projectId, project, options) {
|
||||
const projectStateHash = ClsiStateManager.computeHash(project, options)
|
||||
const docs = await DocumentUpdaterHandler.promises.getProjectDocsIfMatch(
|
||||
projectId,
|
||||
projectStateHash
|
||||
)
|
||||
return { projectStateHash, docs }
|
||||
}
|
||||
|
||||
async function getOutputFileStream(
|
||||
projectId,
|
||||
userId,
|
||||
options,
|
||||
clsiServerId,
|
||||
buildId,
|
||||
outputFilePath
|
||||
) {
|
||||
const { compileBackendClass, compileGroup } = options
|
||||
const url = new URL(
|
||||
`${Settings.apis.clsi.url}/project/${projectId}/user/${userId}/build/${buildId}/output/${outputFilePath}`
|
||||
)
|
||||
url.searchParams.set('compileBackendClass', compileBackendClass)
|
||||
url.searchParams.set('compileGroup', compileGroup)
|
||||
url.searchParams.set('clsiserverid', clsiServerId)
|
||||
try {
|
||||
const stream = await fetchStream(url, {
|
||||
signal: AbortSignal.timeout(OUTPUT_FILE_TIMEOUT_MS),
|
||||
})
|
||||
return stream
|
||||
} catch (err) {
|
||||
throw new Errors.OutputFileFetchFailedError(
|
||||
'failed to fetch output file from CLSI',
|
||||
{
|
||||
projectId,
|
||||
userId,
|
||||
url,
|
||||
status: err.response?.status,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function _buildRequestFromDocupdater(
|
||||
projectId,
|
||||
options,
|
||||
project,
|
||||
projectStateHash,
|
||||
docUpdaterDocs
|
||||
) {
|
||||
const docPath = ProjectEntityHandler.getAllDocPathsFromProject(project)
|
||||
const docs = {}
|
||||
for (const doc of docUpdaterDocs || []) {
|
||||
const path = docPath[doc._id]
|
||||
docs[path] = doc
|
||||
}
|
||||
// send new docs but not files as those are already on the clsi
|
||||
options = _.clone(options)
|
||||
options.syncType = 'incremental'
|
||||
options.syncState = projectStateHash
|
||||
// create stub doc entries for any possible root docs, if not
|
||||
// present in the docupdater. This allows finaliseRequest to
|
||||
// identify the root doc.
|
||||
const possibleRootDocIds = [options.rootDoc_id, project.rootDoc_id]
|
||||
for (const rootDocId of possibleRootDocIds) {
|
||||
if (rootDocId != null && rootDocId in docPath) {
|
||||
const path = docPath[rootDocId]
|
||||
if (docs[path] == null) {
|
||||
docs[path] = { _id: rootDocId, path }
|
||||
}
|
||||
}
|
||||
}
|
||||
return _finaliseRequest(projectId, options, project, docs, [])
|
||||
}
|
||||
|
||||
async function _buildRequestFromMongo(
|
||||
projectId,
|
||||
options,
|
||||
project,
|
||||
projectStateHash
|
||||
) {
|
||||
const { docs, files } = await _getContentFromMongo(projectId)
|
||||
options = {
|
||||
...options,
|
||||
syncType: 'full',
|
||||
syncState: projectStateHash,
|
||||
}
|
||||
return _finaliseRequest(projectId, options, project, docs, files)
|
||||
}
|
||||
|
||||
async function _getContentFromMongo(projectId) {
|
||||
await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId)
|
||||
const docs = await ProjectEntityHandler.promises.getAllDocs(projectId)
|
||||
const files = await ProjectEntityHandler.promises.getAllFiles(projectId)
|
||||
return { docs, files }
|
||||
}
|
||||
|
||||
function _finaliseRequest(projectId, options, project, docs, files) {
|
||||
const resources = []
|
||||
let flags
|
||||
let rootResourcePath = null
|
||||
let rootResourcePathOverride = null
|
||||
let hasMainFile = false
|
||||
let numberOfDocsInProject = 0
|
||||
|
||||
for (let path in docs) {
|
||||
const doc = docs[path]
|
||||
path = path.replace(/^\//, '') // Remove leading /
|
||||
numberOfDocsInProject++
|
||||
if (doc.lines != null) {
|
||||
// add doc to resources unless it is just a stub entry
|
||||
resources.push({
|
||||
path,
|
||||
content: doc.lines.join('\n'),
|
||||
})
|
||||
}
|
||||
if (
|
||||
project.rootDoc_id != null &&
|
||||
doc._id.toString() === project.rootDoc_id.toString()
|
||||
) {
|
||||
rootResourcePath = path
|
||||
}
|
||||
if (
|
||||
options.rootDoc_id != null &&
|
||||
doc._id.toString() === options.rootDoc_id.toString()
|
||||
) {
|
||||
rootResourcePathOverride = path
|
||||
}
|
||||
if (path === 'main.tex') {
|
||||
hasMainFile = true
|
||||
}
|
||||
}
|
||||
|
||||
if (rootResourcePathOverride != null) {
|
||||
rootResourcePath = rootResourcePathOverride
|
||||
}
|
||||
if (rootResourcePath == null) {
|
||||
if (hasMainFile) {
|
||||
rootResourcePath = 'main.tex'
|
||||
} else if (numberOfDocsInProject === 1) {
|
||||
// only one file, must be the main document
|
||||
for (const path in docs) {
|
||||
// Remove leading /
|
||||
rootResourcePath = path.replace(/^\//, '')
|
||||
}
|
||||
} else {
|
||||
throw new OError('no main file specified', { projectId })
|
||||
}
|
||||
}
|
||||
|
||||
const historyId = project.overleaf.history.id
|
||||
if (!historyId) {
|
||||
throw new OError('project does not have a history id', { projectId })
|
||||
}
|
||||
for (let path in files) {
|
||||
const file = files[path]
|
||||
path = path.replace(/^\//, '') // Remove leading /
|
||||
|
||||
const filestoreURL = `${Settings.apis.filestore.url}/project/${project._id}/file/${file._id}`
|
||||
let url = filestoreURL
|
||||
let fallbackURL
|
||||
if (file.hash && Features.hasFeature('project-history-blobs')) {
|
||||
const { bucket, key } = getBlobLocation(historyId, file.hash)
|
||||
url = `${Settings.apis.filestore.url}/bucket/${bucket}/key/${key}`
|
||||
fallbackURL = filestoreURL
|
||||
}
|
||||
resources.push({
|
||||
path,
|
||||
url,
|
||||
fallbackURL,
|
||||
modified: file.created?.getTime(),
|
||||
})
|
||||
}
|
||||
|
||||
if (options.fileLineErrors) {
|
||||
flags = ['-file-line-error']
|
||||
}
|
||||
|
||||
return {
|
||||
compile: {
|
||||
options: {
|
||||
buildId: options.buildId,
|
||||
editorId: options.editorId,
|
||||
compiler: project.compiler,
|
||||
timeout: options.timeout,
|
||||
imageName: project.imageName,
|
||||
draft: Boolean(options.draft),
|
||||
stopOnFirstError: Boolean(options.stopOnFirstError),
|
||||
check: options.check,
|
||||
syncType: options.syncType,
|
||||
syncState: options.syncState,
|
||||
compileGroup: options.compileGroup,
|
||||
compileFromClsiCache: options.compileFromClsiCache,
|
||||
populateClsiCache: options.populateClsiCache,
|
||||
enablePdfCaching:
|
||||
(Settings.enablePdfCaching && options.enablePdfCaching) || false,
|
||||
pdfCachingMinChunkSize: options.pdfCachingMinChunkSize,
|
||||
flags,
|
||||
metricsMethod: options.compileGroup,
|
||||
},
|
||||
rootResourcePath,
|
||||
resources,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async function wordCount(projectId, userId, file, options, clsiserverid) {
|
||||
const { compileBackendClass, compileGroup } = options
|
||||
const req = await _buildRequest(projectId, options)
|
||||
const filename = file || req.compile.rootResourcePath
|
||||
const url = _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId,
|
||||
'wordcount'
|
||||
)
|
||||
url.searchParams.set('file', filename)
|
||||
url.searchParams.set('image', req.compile.options.imageName)
|
||||
|
||||
const opts = {
|
||||
method: 'GET',
|
||||
}
|
||||
const { body } = await _makeRequestWithClsiServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts,
|
||||
clsiserverid
|
||||
)
|
||||
return body
|
||||
}
|
||||
|
||||
function _getClsiServerIdFromResponse(response) {
|
||||
const setCookieHeaders = response.headers.raw()['set-cookie'] ?? []
|
||||
for (const header of setCookieHeaders) {
|
||||
const cookie = Cookie.parse(header)
|
||||
if (cookie.key === Settings.clsiCookie.key) {
|
||||
return cookie.value
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendRequest: callbackifyMultiResult(sendRequest, [
|
||||
'status',
|
||||
'outputFiles',
|
||||
'clsiServerId',
|
||||
'validationProblems',
|
||||
'stats',
|
||||
'timings',
|
||||
'outputUrlPrefix',
|
||||
'buildId',
|
||||
]),
|
||||
sendExternalRequest: callbackifyMultiResult(sendExternalRequest, [
|
||||
'status',
|
||||
'outputFiles',
|
||||
'clsiServerId',
|
||||
'validationProblems',
|
||||
'stats',
|
||||
'timings',
|
||||
'outputUrlPrefix',
|
||||
]),
|
||||
stopCompile: callbackify(stopCompile),
|
||||
deleteAuxFiles: callbackify(deleteAuxFiles),
|
||||
getOutputFileStream: callbackify(getOutputFileStream),
|
||||
wordCount: callbackify(wordCount),
|
||||
promises: {
|
||||
sendRequest,
|
||||
sendExternalRequest,
|
||||
stopCompile,
|
||||
deleteAuxFiles,
|
||||
getOutputFileStream,
|
||||
wordCount,
|
||||
},
|
||||
}
|
||||
72
services/web/app/src/Features/Compile/ClsiStateManager.js
Normal file
72
services/web/app/src/Features/Compile/ClsiStateManager.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/* eslint-disable
|
||||
n/handle-callback-err,
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ClsiStateManager
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const crypto = require('crypto')
|
||||
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||
|
||||
// The "state" of a project is a hash of the relevant attributes in the
|
||||
// project object in this case we only need the rootFolder.
|
||||
//
|
||||
// The idea is that it will change if any doc or file is
|
||||
// created/renamed/deleted, and also if the content of any file (not
|
||||
// doc) changes.
|
||||
//
|
||||
// When the hash changes the full set of files on the CLSI will need to
|
||||
// be updated. If it doesn't change then we can overwrite changed docs
|
||||
// in place on the clsi, getting them from the docupdater.
|
||||
//
|
||||
// The docupdater is responsible for setting the key in redis, and
|
||||
// unsetting it if it removes any documents from the doc updater.
|
||||
|
||||
const buildState = s =>
|
||||
crypto.createHash('sha1').update(s, 'utf8').digest('hex')
|
||||
|
||||
module.exports = ClsiStateManager = {
|
||||
computeHash(project, options) {
|
||||
const { docs, files } =
|
||||
ProjectEntityHandler.getAllEntitiesFromProject(project)
|
||||
const fileList = Array.from(files || []).map(
|
||||
f => `${f.file._id}:${f.file.rev}:${f.file.created}:${f.path}`
|
||||
)
|
||||
const docList = Array.from(docs || []).map(d => `${d.doc._id}:${d.path}`)
|
||||
const sortedEntityList = [
|
||||
...Array.from(docList),
|
||||
...Array.from(fileList),
|
||||
].sort()
|
||||
// ignore the isAutoCompile options as it doesn't affect the
|
||||
// output, but include all other options e.g. draft
|
||||
const optionsList = (() => {
|
||||
const result = []
|
||||
const object = options || {}
|
||||
for (const key in object) {
|
||||
const value = object[key]
|
||||
if (!['isAutoCompile'].includes(key)) {
|
||||
result.push(`option ${key}:${value}`)
|
||||
}
|
||||
}
|
||||
return result
|
||||
})()
|
||||
const sortedOptionsList = optionsList.sort()
|
||||
const hash = buildState(
|
||||
[...Array.from(sortedEntityList), ...Array.from(sortedOptionsList)].join(
|
||||
'\n'
|
||||
)
|
||||
)
|
||||
return hash
|
||||
},
|
||||
}
|
||||
802
services/web/app/src/Features/Compile/CompileController.js
Normal file
802
services/web/app/src/Features/Compile/CompileController.js
Normal file
@@ -0,0 +1,802 @@
|
||||
let CompileController
|
||||
const { URL, URLSearchParams } = require('url')
|
||||
const { pipeline } = require('stream/promises')
|
||||
const { Cookie } = require('tough-cookie')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const CompileManager = require('./CompileManager')
|
||||
const ClsiManager = require('./ClsiManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const { RateLimiter } = require('../../infrastructure/RateLimiter')
|
||||
const ClsiCookieManager = require('./ClsiCookieManager')(
|
||||
Settings.apis.clsi?.backendGroupName
|
||||
)
|
||||
const Path = require('path')
|
||||
const AnalyticsManager = require('../Analytics/AnalyticsManager')
|
||||
const SplitTestHandler = require('../SplitTests/SplitTestHandler')
|
||||
const { callbackify } = require('@overleaf/promise-utils')
|
||||
const {
|
||||
fetchStreamWithResponse,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
|
||||
const COMPILE_TIMEOUT_MS = 10 * 60 * 1000
|
||||
|
||||
const pdfDownloadRateLimiter = new RateLimiter('full-pdf-download', {
|
||||
points: 1000,
|
||||
duration: 60 * 60,
|
||||
})
|
||||
|
||||
function getOutputFilesArchiveSpecification(projectId, userId, buildId) {
|
||||
const fileName = 'output.zip'
|
||||
return {
|
||||
path: fileName,
|
||||
url: CompileController._getFileUrl(projectId, userId, buildId, fileName),
|
||||
type: 'zip',
|
||||
}
|
||||
}
|
||||
|
||||
function getImageNameForProject(projectId, callback) {
|
||||
ProjectGetter.getProject(projectId, { imageName: 1 }, (err, project) => {
|
||||
if (err) return callback(err)
|
||||
if (!project) return callback(new Error('project not found'))
|
||||
callback(null, project.imageName)
|
||||
})
|
||||
}
|
||||
|
||||
async function getPdfCachingMinChunkSize(req, res) {
|
||||
const { variant } = await SplitTestHandler.promises.getAssignment(
|
||||
req,
|
||||
res,
|
||||
'pdf-caching-min-chunk-size'
|
||||
)
|
||||
if (variant === 'default') return 1_000_000
|
||||
return parseInt(variant, 10)
|
||||
}
|
||||
|
||||
async function _getSplitTestOptions(req, res) {
|
||||
// Use the query flags from the editor request for overriding the split test.
|
||||
let query = {}
|
||||
try {
|
||||
const u = new URL(req.headers.referer || req.url, Settings.siteUrl)
|
||||
query = Object.fromEntries(u.searchParams.entries())
|
||||
} catch (e) {}
|
||||
const editorReq = { ...req, query }
|
||||
|
||||
// Lookup the clsi-cache flag in the backend.
|
||||
// We may need to turn off the feature on a short notice, without requiring
|
||||
// all users to reload their editor page to disable the feature.
|
||||
const { variant: compileFromClsiCacheVariant } =
|
||||
await SplitTestHandler.promises.getAssignment(
|
||||
editorReq,
|
||||
res,
|
||||
'compile-from-clsi-cache'
|
||||
)
|
||||
const compileFromClsiCache = compileFromClsiCacheVariant === 'enabled'
|
||||
const { variant: populateClsiCacheVariant } =
|
||||
await SplitTestHandler.promises.getAssignment(
|
||||
editorReq,
|
||||
res,
|
||||
'populate-clsi-cache'
|
||||
)
|
||||
const populateClsiCache = populateClsiCacheVariant === 'enabled'
|
||||
|
||||
const pdfDownloadDomain = Settings.pdfDownloadDomain
|
||||
|
||||
if (!req.query.enable_pdf_caching) {
|
||||
// The frontend does not want to do pdf caching.
|
||||
return {
|
||||
compileFromClsiCache,
|
||||
populateClsiCache,
|
||||
pdfDownloadDomain,
|
||||
enablePdfCaching: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Double check with the latest split test assignment.
|
||||
// We may need to turn off the feature on a short notice, without requiring
|
||||
// all users to reload their editor page to disable the feature.
|
||||
const { variant } = await SplitTestHandler.promises.getAssignment(
|
||||
editorReq,
|
||||
res,
|
||||
'pdf-caching-mode'
|
||||
)
|
||||
const enablePdfCaching = variant === 'enabled'
|
||||
if (!enablePdfCaching) {
|
||||
// Skip the lookup of the chunk size when caching is not enabled.
|
||||
return {
|
||||
compileFromClsiCache,
|
||||
populateClsiCache,
|
||||
pdfDownloadDomain,
|
||||
enablePdfCaching: false,
|
||||
}
|
||||
}
|
||||
const pdfCachingMinChunkSize = await getPdfCachingMinChunkSize(editorReq, res)
|
||||
return {
|
||||
compileFromClsiCache,
|
||||
populateClsiCache,
|
||||
pdfDownloadDomain,
|
||||
enablePdfCaching,
|
||||
pdfCachingMinChunkSize,
|
||||
}
|
||||
}
|
||||
const getSplitTestOptionsCb = callbackify(_getSplitTestOptions)
|
||||
|
||||
module.exports = CompileController = {
|
||||
compile(req, res, next) {
|
||||
res.setTimeout(COMPILE_TIMEOUT_MS)
|
||||
const projectId = req.params.Project_id
|
||||
const isAutoCompile = !!req.query.auto_compile
|
||||
const fileLineErrors = !!req.query.file_line_errors
|
||||
const stopOnFirstError = !!req.body.stopOnFirstError
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
const options = {
|
||||
isAutoCompile,
|
||||
fileLineErrors,
|
||||
stopOnFirstError,
|
||||
editorId: req.body.editorId,
|
||||
}
|
||||
|
||||
if (req.body.rootDoc_id) {
|
||||
options.rootDoc_id = req.body.rootDoc_id
|
||||
} else if (
|
||||
req.body.settingsOverride &&
|
||||
req.body.settingsOverride.rootDoc_id
|
||||
) {
|
||||
// Can be removed after deploy
|
||||
options.rootDoc_id = req.body.settingsOverride.rootDoc_id
|
||||
}
|
||||
if (req.body.compiler) {
|
||||
options.compiler = req.body.compiler
|
||||
}
|
||||
if (req.body.draft) {
|
||||
options.draft = req.body.draft
|
||||
}
|
||||
if (['validate', 'error', 'silent'].includes(req.body.check)) {
|
||||
options.check = req.body.check
|
||||
}
|
||||
if (req.body.incrementalCompilesEnabled) {
|
||||
options.incrementalCompilesEnabled = true
|
||||
}
|
||||
|
||||
getSplitTestOptionsCb(req, res, (err, splitTestOptions) => {
|
||||
if (err) return next(err)
|
||||
let {
|
||||
compileFromClsiCache,
|
||||
populateClsiCache,
|
||||
enablePdfCaching,
|
||||
pdfCachingMinChunkSize,
|
||||
pdfDownloadDomain,
|
||||
} = splitTestOptions
|
||||
options.compileFromClsiCache = compileFromClsiCache
|
||||
options.populateClsiCache = populateClsiCache
|
||||
options.enablePdfCaching = enablePdfCaching
|
||||
if (enablePdfCaching) {
|
||||
options.pdfCachingMinChunkSize = pdfCachingMinChunkSize
|
||||
}
|
||||
|
||||
CompileManager.compile(
|
||||
projectId,
|
||||
userId,
|
||||
options,
|
||||
(
|
||||
error,
|
||||
status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
limits,
|
||||
validationProblems,
|
||||
stats,
|
||||
timings,
|
||||
outputUrlPrefix,
|
||||
buildId
|
||||
) => {
|
||||
if (error) {
|
||||
Metrics.inc('compile-error')
|
||||
return next(error)
|
||||
}
|
||||
Metrics.inc('compile-status', 1, { status })
|
||||
if (pdfDownloadDomain && outputUrlPrefix) {
|
||||
pdfDownloadDomain += outputUrlPrefix
|
||||
}
|
||||
|
||||
if (
|
||||
limits &&
|
||||
SplitTestHandler.getPercentile(
|
||||
AnalyticsManager.getIdsFromSession(req.session).analyticsId,
|
||||
'compile-result-backend',
|
||||
'release'
|
||||
) === 1
|
||||
) {
|
||||
// For a compile request to be sent to clsi we need limits.
|
||||
// If we get here without having the limits object populated, it is
|
||||
// a reasonable assumption to make that nothing was compiled.
|
||||
// We need to know the limits in order to make use of the events.
|
||||
AnalyticsManager.recordEventForSession(
|
||||
req.session,
|
||||
'compile-result-backend',
|
||||
{
|
||||
projectId,
|
||||
ownerAnalyticsId: limits.ownerAnalyticsId,
|
||||
status,
|
||||
compileTime: timings?.compileE2E,
|
||||
timeout: limits.timeout === 60 ? 'short' : 'long',
|
||||
server: clsiServerId?.includes('-c2d-') ? 'faster' : 'normal',
|
||||
isAutoCompile,
|
||||
isInitialCompile: stats?.isInitialCompile === 1,
|
||||
restoredClsiCache: stats?.restoredClsiCache === 1,
|
||||
stopOnFirstError,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const outputFilesArchive = buildId
|
||||
? getOutputFilesArchiveSpecification(projectId, userId, buildId)
|
||||
: null
|
||||
|
||||
res.json({
|
||||
status,
|
||||
outputFiles,
|
||||
outputFilesArchive,
|
||||
compileGroup: limits?.compileGroup,
|
||||
clsiServerId,
|
||||
validationProblems,
|
||||
stats,
|
||||
timings,
|
||||
outputUrlPrefix,
|
||||
pdfDownloadDomain,
|
||||
pdfCachingMinChunkSize,
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
stopCompile(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
CompileManager.stopCompile(projectId, userId, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.sendStatus(200)
|
||||
})
|
||||
},
|
||||
|
||||
// Used for submissions through the public API
|
||||
compileSubmission(req, res, next) {
|
||||
res.setTimeout(COMPILE_TIMEOUT_MS)
|
||||
const submissionId = req.params.submission_id
|
||||
const options = {}
|
||||
if (req.body?.rootResourcePath != null) {
|
||||
options.rootResourcePath = req.body.rootResourcePath
|
||||
}
|
||||
if (req.body?.compiler) {
|
||||
options.compiler = req.body.compiler
|
||||
}
|
||||
if (req.body?.draft) {
|
||||
options.draft = req.body.draft
|
||||
}
|
||||
if (['validate', 'error', 'silent'].includes(req.body?.check)) {
|
||||
options.check = req.body.check
|
||||
}
|
||||
options.compileGroup =
|
||||
req.body?.compileGroup || Settings.defaultFeatures.compileGroup
|
||||
options.compileBackendClass = Settings.apis.clsi.submissionBackendClass
|
||||
options.timeout =
|
||||
req.body?.timeout || Settings.defaultFeatures.compileTimeout
|
||||
ClsiManager.sendExternalRequest(
|
||||
submissionId,
|
||||
req.body,
|
||||
options,
|
||||
function (error, status, outputFiles, clsiServerId, validationProblems) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.json({
|
||||
status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
validationProblems,
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_getSplitTestOptions,
|
||||
|
||||
_getUserIdForCompile(req) {
|
||||
if (!Settings.disablePerUserCompiles) {
|
||||
return SessionManager.getLoggedInUserId(req.session)
|
||||
}
|
||||
return null
|
||||
},
|
||||
_compileAsUser(req, callback) {
|
||||
callback(null, CompileController._getUserIdForCompile(req))
|
||||
},
|
||||
_downloadAsUser(req, callback) {
|
||||
callback(null, CompileController._getUserIdForCompile(req))
|
||||
},
|
||||
|
||||
downloadPdf(req, res, next) {
|
||||
Metrics.inc('pdf-downloads')
|
||||
const projectId = req.params.Project_id
|
||||
const rateLimit = function (callback) {
|
||||
pdfDownloadRateLimiter
|
||||
.consume(req.ip, 1, { method: 'ip' })
|
||||
.then(() => {
|
||||
callback(null, true)
|
||||
})
|
||||
.catch(err => {
|
||||
if (err instanceof Error) {
|
||||
callback(err)
|
||||
} else {
|
||||
callback(null, false)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
ProjectGetter.getProject(projectId, { name: 1 }, function (err, project) {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
res.contentType('application/pdf')
|
||||
const filename = `${CompileController._getSafeProjectName(project)}.pdf`
|
||||
|
||||
if (req.query.popupDownload) {
|
||||
res.setContentDisposition('attachment', { filename })
|
||||
} else {
|
||||
res.setContentDisposition('inline', { filename })
|
||||
}
|
||||
|
||||
rateLimit(function (err, canContinue) {
|
||||
if (err) {
|
||||
logger.err({ err }, 'error checking rate limit for pdf download')
|
||||
res.sendStatus(500)
|
||||
} else if (!canContinue) {
|
||||
logger.debug(
|
||||
{ projectId, ip: req.ip },
|
||||
'rate limit hit downloading pdf'
|
||||
)
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
CompileController._downloadAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
const url = CompileController._getFileUrl(
|
||||
projectId,
|
||||
userId,
|
||||
req.params.build_id,
|
||||
'output.pdf'
|
||||
)
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'output-file',
|
||||
url,
|
||||
{},
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_getSafeProjectName(project) {
|
||||
return project.name.replace(/[^\p{L}\p{Nd}]/gu, '_')
|
||||
},
|
||||
|
||||
deleteAuxFiles(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { clsiserverid } = req.query
|
||||
CompileController._compileAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
CompileManager.deleteAuxFiles(
|
||||
projectId,
|
||||
userId,
|
||||
clsiserverid,
|
||||
function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.sendStatus(200)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
// this is only used by templates, so is not called with a userId
|
||||
compileAndDownloadPdf(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
// pass userId as null, since templates are an "anonymous" compile
|
||||
CompileManager.compile(projectId, null, {}, (err, _status, outputFiles) => {
|
||||
if (err) {
|
||||
logger.err(
|
||||
{ err, projectId },
|
||||
'something went wrong compile and downloading pdf'
|
||||
)
|
||||
res.sendStatus(500)
|
||||
return
|
||||
}
|
||||
const pdf = outputFiles.find(f => f.path === 'output.pdf')
|
||||
if (!pdf) {
|
||||
logger.warn(
|
||||
{ projectId },
|
||||
'something went wrong compile and downloading pdf: no pdf'
|
||||
)
|
||||
res.sendStatus(500)
|
||||
return
|
||||
}
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'output-file',
|
||||
pdf.url,
|
||||
{},
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
getFileFromClsi(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
CompileController._downloadAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
|
||||
const qs = {}
|
||||
|
||||
const url = CompileController._getFileUrl(
|
||||
projectId,
|
||||
userId,
|
||||
req.params.build_id,
|
||||
req.params.file
|
||||
)
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'output-file',
|
||||
url,
|
||||
qs,
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
getFileFromClsiWithoutUser(req, res, next) {
|
||||
const submissionId = req.params.submission_id
|
||||
const url = CompileController._getFileUrl(
|
||||
submissionId,
|
||||
null,
|
||||
req.params.build_id,
|
||||
req.params.file
|
||||
)
|
||||
const limits = {
|
||||
compileGroup:
|
||||
req.body?.compileGroup ||
|
||||
req.query?.compileGroup ||
|
||||
Settings.defaultFeatures.compileGroup,
|
||||
compileBackendClass: Settings.apis.clsi.submissionBackendClass,
|
||||
}
|
||||
CompileController.proxyToClsiWithLimits(
|
||||
submissionId,
|
||||
'output-file',
|
||||
url,
|
||||
{},
|
||||
limits,
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
},
|
||||
|
||||
// compute a GET file url for a given project, user (optional), build (optional) and file
|
||||
_getFileUrl(projectId, userId, buildId, file) {
|
||||
let url
|
||||
if (userId != null && buildId != null) {
|
||||
url = `/project/${projectId}/user/${userId}/build/${buildId}/output/${file}`
|
||||
} else if (userId != null) {
|
||||
url = `/project/${projectId}/user/${userId}/output/${file}`
|
||||
} else if (buildId != null) {
|
||||
url = `/project/${projectId}/build/${buildId}/output/${file}`
|
||||
} else {
|
||||
url = `/project/${projectId}/output/${file}`
|
||||
}
|
||||
return url
|
||||
},
|
||||
|
||||
// compute a POST url for a project, user (optional) and action
|
||||
_getUrl(projectId, userId, action) {
|
||||
let path = `/project/${projectId}`
|
||||
if (userId != null) {
|
||||
path += `/user/${userId}`
|
||||
}
|
||||
return `${path}/${action}`
|
||||
},
|
||||
|
||||
proxySyncPdf(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { page, h, v, editorId, buildId } = req.query
|
||||
if (!page?.match(/^\d+$/)) {
|
||||
return next(new Error('invalid page parameter'))
|
||||
}
|
||||
if (!h?.match(/^-?\d+\.\d+$/)) {
|
||||
return next(new Error('invalid h parameter'))
|
||||
}
|
||||
if (!v?.match(/^-?\d+\.\d+$/)) {
|
||||
return next(new Error('invalid v parameter'))
|
||||
}
|
||||
// whether this request is going to a per-user container
|
||||
CompileController._compileAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
getImageNameForProject(projectId, (error, imageName) => {
|
||||
if (error) return next(error)
|
||||
|
||||
getSplitTestOptionsCb(req, res, (error, splitTestOptions) => {
|
||||
if (error) return next(error)
|
||||
const { compileFromClsiCache } = splitTestOptions
|
||||
|
||||
const url = CompileController._getUrl(projectId, userId, 'sync/pdf')
|
||||
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'sync-to-pdf',
|
||||
url,
|
||||
{ page, h, v, imageName, editorId, buildId, compileFromClsiCache },
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
proxySyncCode(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { file, line, column, editorId, buildId } = req.query
|
||||
if (file == null) {
|
||||
return next(new Error('missing file parameter'))
|
||||
}
|
||||
// Check that we are dealing with a simple file path (this is not
|
||||
// strictly needed because synctex uses this parameter as a label
|
||||
// to look up in the synctex output, and does not open the file
|
||||
// itself). Since we have valid synctex paths like foo/./bar we
|
||||
// allow those by replacing /./ with /
|
||||
const testPath = file.replace('/./', '/')
|
||||
if (Path.resolve('/', testPath) !== `/${testPath}`) {
|
||||
return next(new Error('invalid file parameter'))
|
||||
}
|
||||
if (!line?.match(/^\d+$/)) {
|
||||
return next(new Error('invalid line parameter'))
|
||||
}
|
||||
if (!column?.match(/^\d+$/)) {
|
||||
return next(new Error('invalid column parameter'))
|
||||
}
|
||||
CompileController._compileAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
getImageNameForProject(projectId, (error, imageName) => {
|
||||
if (error) return next(error)
|
||||
|
||||
getSplitTestOptionsCb(req, res, (error, splitTestOptions) => {
|
||||
if (error) return next(error)
|
||||
const { compileFromClsiCache } = splitTestOptions
|
||||
|
||||
const url = CompileController._getUrl(projectId, userId, 'sync/code')
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'sync-to-code',
|
||||
url,
|
||||
{
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
imageName,
|
||||
editorId,
|
||||
buildId,
|
||||
compileFromClsiCache,
|
||||
},
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
proxyToClsi(projectId, action, url, qs, req, res, next) {
|
||||
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
CompileController.proxyToClsiWithLimits(
|
||||
projectId,
|
||||
action,
|
||||
url,
|
||||
qs,
|
||||
limits,
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
proxyToClsiWithLimits(projectId, action, url, qs, limits, req, res, next) {
|
||||
_getPersistenceOptions(
|
||||
req,
|
||||
projectId,
|
||||
limits.compileGroup,
|
||||
limits.compileBackendClass,
|
||||
(err, persistenceOptions) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'error getting cookie jar for clsi request')
|
||||
return next(err)
|
||||
}
|
||||
url = new URL(`${Settings.apis.clsi.url}${url}`)
|
||||
url.search = new URLSearchParams({
|
||||
...persistenceOptions.qs,
|
||||
...qs,
|
||||
}).toString()
|
||||
const timer = new Metrics.Timer(
|
||||
'proxy_to_clsi',
|
||||
1,
|
||||
{ path: action },
|
||||
[0, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000, 45000, 60000]
|
||||
)
|
||||
Metrics.inc('proxy_to_clsi', 1, { path: action, status: 'start' })
|
||||
fetchStreamWithResponse(url.href, {
|
||||
method: req.method,
|
||||
signal: AbortSignal.timeout(60 * 1000),
|
||||
headers: persistenceOptions.headers,
|
||||
})
|
||||
.then(({ stream, response }) => {
|
||||
if (req.destroyed) {
|
||||
// The client has disconnected already, avoid trying to write into the broken connection.
|
||||
Metrics.inc('proxy_to_clsi', 1, {
|
||||
path: action,
|
||||
status: 'req-aborted',
|
||||
})
|
||||
return
|
||||
}
|
||||
Metrics.inc('proxy_to_clsi', 1, {
|
||||
path: action,
|
||||
status: response.status,
|
||||
})
|
||||
|
||||
for (const key of ['Content-Length', 'Content-Type']) {
|
||||
if (response.headers.has(key)) {
|
||||
res.setHeader(key, response.headers.get(key))
|
||||
}
|
||||
}
|
||||
res.writeHead(response.status)
|
||||
return pipeline(stream, res)
|
||||
})
|
||||
.then(() => {
|
||||
timer.labels.status = 'success'
|
||||
timer.done()
|
||||
})
|
||||
.catch(err => {
|
||||
const reqAborted = Boolean(req.destroyed)
|
||||
const status = reqAborted ? 'req-aborted-late' : 'error'
|
||||
timer.labels.status = status
|
||||
const duration = timer.done()
|
||||
Metrics.inc('proxy_to_clsi', 1, { path: action, status })
|
||||
const streamingStarted = Boolean(res.headersSent)
|
||||
if (!streamingStarted) {
|
||||
if (err instanceof RequestFailedError) {
|
||||
res.sendStatus(err.response.status)
|
||||
} else {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
if (
|
||||
streamingStarted &&
|
||||
reqAborted &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE'
|
||||
) {
|
||||
// Ignore noisy spurious error
|
||||
return
|
||||
}
|
||||
if (
|
||||
err instanceof RequestFailedError &&
|
||||
['sync-to-code', 'sync-to-pdf', 'output-file'].includes(action)
|
||||
) {
|
||||
// Ignore noisy error
|
||||
// https://github.com/overleaf/internal/issues/15201
|
||||
return
|
||||
}
|
||||
logger.warn(
|
||||
{
|
||||
err,
|
||||
projectId,
|
||||
url,
|
||||
action,
|
||||
reqAborted,
|
||||
streamingStarted,
|
||||
duration,
|
||||
},
|
||||
'CLSI proxy error'
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
wordCount(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const file = req.query.file || false
|
||||
const { clsiserverid } = req.query
|
||||
CompileController._compileAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
CompileManager.wordCount(
|
||||
projectId,
|
||||
userId,
|
||||
file,
|
||||
clsiserverid,
|
||||
function (error, body) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.json(body)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
function _getPersistenceOptions(
|
||||
req,
|
||||
projectId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
) {
|
||||
const { clsiserverid } = req.query
|
||||
const userId = SessionManager.getLoggedInUserId(req)
|
||||
if (clsiserverid && typeof clsiserverid === 'string') {
|
||||
callback(null, {
|
||||
qs: { clsiserverid, compileGroup, compileBackendClass },
|
||||
headers: {},
|
||||
})
|
||||
} else {
|
||||
ClsiCookieManager.getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
(err, clsiServerId) => {
|
||||
if (err) return callback(err)
|
||||
callback(null, {
|
||||
qs: { compileGroup, compileBackendClass },
|
||||
headers: clsiServerId
|
||||
? {
|
||||
Cookie: new Cookie({
|
||||
key: Settings.clsiCookie.key,
|
||||
value: clsiServerId,
|
||||
}).cookieString(),
|
||||
}
|
||||
: {},
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
249
services/web/app/src/Features/Compile/CompileManager.js
Normal file
249
services/web/app/src/Features/Compile/CompileManager.js
Normal file
@@ -0,0 +1,249 @@
|
||||
let CompileManager
|
||||
const Crypto = require('crypto')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||
const rclient = RedisWrapper.client('clsi_recently_compiled')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const ProjectRootDocManager = require('../Project/ProjectRootDocManager')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const ClsiManager = require('./ClsiManager')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const { RateLimiter } = require('../../infrastructure/RateLimiter')
|
||||
const UserAnalyticsIdCache = require('../Analytics/UserAnalyticsIdCache')
|
||||
const {
|
||||
callbackify,
|
||||
callbackifyMultiResult,
|
||||
} = require('@overleaf/promise-utils')
|
||||
|
||||
function instrumentWithTimer(fn, key) {
|
||||
return async (...args) => {
|
||||
const timer = new Metrics.Timer(key)
|
||||
try {
|
||||
return await fn(...args)
|
||||
} finally {
|
||||
timer.done()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function generateBuildId() {
|
||||
return `${Date.now().toString(16)}-${Crypto.randomBytes(8).toString('hex')}`
|
||||
}
|
||||
|
||||
async function compile(projectId, userId, options = {}) {
|
||||
const recentlyCompiled = await CompileManager._checkIfRecentlyCompiled(
|
||||
projectId,
|
||||
userId
|
||||
)
|
||||
if (recentlyCompiled) {
|
||||
return { status: 'too-recently-compiled', outputFiles: [] }
|
||||
}
|
||||
|
||||
try {
|
||||
const canCompile = await CompileManager._checkIfAutoCompileLimitHasBeenHit(
|
||||
options.isAutoCompile,
|
||||
'everyone'
|
||||
)
|
||||
if (!canCompile) {
|
||||
return { status: 'autocompile-backoff', outputFiles: [] }
|
||||
}
|
||||
} catch (error) {
|
||||
return { status: 'autocompile-backoff', outputFiles: [] }
|
||||
}
|
||||
|
||||
await ProjectRootDocManager.promises.ensureRootDocumentIsSet(projectId)
|
||||
|
||||
const limits =
|
||||
await CompileManager.promises.getProjectCompileLimits(projectId)
|
||||
for (const key in limits) {
|
||||
const value = limits[key]
|
||||
options[key] = value
|
||||
}
|
||||
|
||||
try {
|
||||
const canCompile = await CompileManager._checkCompileGroupAutoCompileLimit(
|
||||
options.isAutoCompile,
|
||||
limits.compileGroup
|
||||
)
|
||||
if (!canCompile) {
|
||||
return { status: 'autocompile-backoff', outputFiles: [] }
|
||||
}
|
||||
} catch (error) {
|
||||
return { message: 'autocompile-backoff', outputFiles: [] }
|
||||
}
|
||||
|
||||
// Generate the buildId ahead of fetching the project content from redis/mongo so that the buildId's timestamp is before any lastUpdated date.
|
||||
options.buildId = generateBuildId()
|
||||
|
||||
// only pass userId down to clsi if this is a per-user compile
|
||||
const compileAsUser = Settings.disablePerUserCompiles ? undefined : userId
|
||||
const {
|
||||
status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
validationProblems,
|
||||
stats,
|
||||
timings,
|
||||
outputUrlPrefix,
|
||||
buildId,
|
||||
} = await ClsiManager.promises.sendRequest(projectId, compileAsUser, options)
|
||||
|
||||
return {
|
||||
status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
limits,
|
||||
validationProblems,
|
||||
stats,
|
||||
timings,
|
||||
outputUrlPrefix,
|
||||
buildId,
|
||||
}
|
||||
}
|
||||
|
||||
const instrumentedCompile = instrumentWithTimer(compile, 'editor.compile')
|
||||
|
||||
async function getProjectCompileLimits(projectId) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
})
|
||||
|
||||
const owner = await UserGetter.promises.getUser(project.owner_ref, {
|
||||
_id: 1,
|
||||
alphaProgram: 1,
|
||||
analyticsId: 1,
|
||||
betaProgram: 1,
|
||||
features: 1,
|
||||
})
|
||||
|
||||
const ownerFeatures = (owner && owner.features) || {}
|
||||
// put alpha users into their own compile group
|
||||
if (owner && owner.alphaProgram) {
|
||||
ownerFeatures.compileGroup = 'alpha'
|
||||
}
|
||||
const analyticsId = await UserAnalyticsIdCache.get(owner._id)
|
||||
|
||||
const compileGroup =
|
||||
ownerFeatures.compileGroup || Settings.defaultFeatures.compileGroup
|
||||
const limits = {
|
||||
timeout:
|
||||
ownerFeatures.compileTimeout || Settings.defaultFeatures.compileTimeout,
|
||||
compileGroup,
|
||||
compileBackendClass: compileGroup === 'standard' ? 'n2d' : 'c2d',
|
||||
ownerAnalyticsId: analyticsId,
|
||||
}
|
||||
return limits
|
||||
}
|
||||
|
||||
async function wordCount(projectId, userId, file, clsiserverid) {
|
||||
const limits =
|
||||
await CompileManager.promises.getProjectCompileLimits(projectId)
|
||||
return await ClsiManager.promises.wordCount(
|
||||
projectId,
|
||||
userId,
|
||||
file,
|
||||
limits,
|
||||
clsiserverid
|
||||
)
|
||||
}
|
||||
|
||||
async function stopCompile(projectId, userId) {
|
||||
const limits =
|
||||
await CompileManager.promises.getProjectCompileLimits(projectId)
|
||||
|
||||
return await ClsiManager.promises.stopCompile(projectId, userId, limits)
|
||||
}
|
||||
|
||||
async function deleteAuxFiles(projectId, userId, clsiserverid) {
|
||||
const limits =
|
||||
await CompileManager.promises.getProjectCompileLimits(projectId)
|
||||
|
||||
return await ClsiManager.promises.deleteAuxFiles(
|
||||
projectId,
|
||||
userId,
|
||||
limits,
|
||||
clsiserverid
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = CompileManager = {
|
||||
promises: {
|
||||
compile: instrumentedCompile,
|
||||
deleteAuxFiles,
|
||||
getProjectCompileLimits,
|
||||
stopCompile,
|
||||
wordCount,
|
||||
},
|
||||
compile: callbackifyMultiResult(instrumentedCompile, [
|
||||
'status',
|
||||
'outputFiles',
|
||||
'clsiServerId',
|
||||
'limits',
|
||||
'validationProblems',
|
||||
'stats',
|
||||
'timings',
|
||||
'outputUrlPrefix',
|
||||
'buildId',
|
||||
]),
|
||||
|
||||
stopCompile: callbackify(stopCompile),
|
||||
|
||||
deleteAuxFiles: callbackify(deleteAuxFiles),
|
||||
|
||||
getProjectCompileLimits: callbackify(getProjectCompileLimits),
|
||||
|
||||
COMPILE_DELAY: 1, // seconds
|
||||
async _checkIfRecentlyCompiled(projectId, userId) {
|
||||
const key = `compile:${projectId}:${userId}`
|
||||
const ok = await rclient.set(key, true, 'EX', this.COMPILE_DELAY, 'NX')
|
||||
return ok !== 'OK'
|
||||
},
|
||||
|
||||
async _checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup) {
|
||||
if (!isAutoCompile) {
|
||||
return true
|
||||
}
|
||||
if (compileGroup === 'standard') {
|
||||
// apply extra limits to the standard compile group
|
||||
return await CompileManager._checkIfAutoCompileLimitHasBeenHit(
|
||||
isAutoCompile,
|
||||
compileGroup
|
||||
)
|
||||
} else {
|
||||
Metrics.inc(`auto-compile-${compileGroup}`)
|
||||
return true
|
||||
}
|
||||
}, // always allow priority group users to compile
|
||||
|
||||
async _checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup) {
|
||||
if (!isAutoCompile) {
|
||||
return true
|
||||
}
|
||||
Metrics.inc(`auto-compile-${compileGroup}`)
|
||||
const rateLimiter = getAutoCompileRateLimiter(compileGroup)
|
||||
try {
|
||||
await rateLimiter.consume('global', 1, { method: 'global' })
|
||||
return true
|
||||
} catch (e) {
|
||||
// Don't differentiate between errors and rate limits. Silently trigger
|
||||
// the rate limit if there's an error consuming the points.
|
||||
Metrics.inc(`auto-compile-${compileGroup}-limited`)
|
||||
return false
|
||||
}
|
||||
},
|
||||
|
||||
wordCount: callbackify(wordCount),
|
||||
}
|
||||
|
||||
const autoCompileRateLimiters = new Map()
|
||||
function getAutoCompileRateLimiter(compileGroup) {
|
||||
let rateLimiter = autoCompileRateLimiters.get(compileGroup)
|
||||
if (rateLimiter == null) {
|
||||
rateLimiter = new RateLimiter(`auto-compile:${compileGroup}`, {
|
||||
points: Settings.rateLimit.autoCompile[compileGroup] || 25,
|
||||
duration: 20,
|
||||
})
|
||||
autoCompileRateLimiters.set(compileGroup, rateLimiter)
|
||||
}
|
||||
return rateLimiter
|
||||
}
|
||||
60
services/web/app/src/Features/Contacts/ContactController.mjs
Normal file
60
services/web/app/src/Features/Contacts/ContactController.mjs
Normal file
@@ -0,0 +1,60 @@
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import ContactManager from './ContactManager.js'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import Modules from '../../infrastructure/Modules.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
function _formatContact(contact) {
|
||||
return {
|
||||
id: contact._id?.toString(),
|
||||
email: contact.email || '',
|
||||
first_name: contact.first_name || '',
|
||||
last_name: contact.last_name || '',
|
||||
type: 'user',
|
||||
}
|
||||
}
|
||||
|
||||
async function getContacts(req, res) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
const contactIds = await ContactManager.promises.getContactIds(userId, {
|
||||
limit: 50,
|
||||
})
|
||||
|
||||
let contacts = await UserGetter.promises.getUsers(contactIds, {
|
||||
email: 1,
|
||||
first_name: 1,
|
||||
last_name: 1,
|
||||
holdingAccount: 1,
|
||||
})
|
||||
|
||||
// UserGetter.getUsers may not preserve order so put them back in order
|
||||
const positions = {}
|
||||
for (let i = 0; i < contactIds.length; i++) {
|
||||
const contactId = contactIds[i]
|
||||
positions[contactId] = i
|
||||
}
|
||||
contacts.sort(
|
||||
(a, b) => positions[a._id?.toString()] - positions[b._id?.toString()]
|
||||
)
|
||||
|
||||
// Don't count holding accounts to discourage users from repeating mistakes (mistyped or wrong emails, etc)
|
||||
contacts = contacts.filter(c => !c.holdingAccount)
|
||||
|
||||
contacts = contacts.map(_formatContact)
|
||||
|
||||
const additionalContacts = await Modules.promises.hooks.fire(
|
||||
'getContacts',
|
||||
userId,
|
||||
contacts
|
||||
)
|
||||
|
||||
contacts = contacts.concat(...(additionalContacts || []))
|
||||
return res.json({
|
||||
contacts,
|
||||
})
|
||||
}
|
||||
|
||||
export default {
|
||||
getContacts: expressify(getContacts),
|
||||
}
|
||||
51
services/web/app/src/Features/Contacts/ContactManager.js
Normal file
51
services/web/app/src/Features/Contacts/ContactManager.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const { callbackify } = require('util')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { fetchJson } = require('@overleaf/fetch-utils')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
async function getContactIds(userId, options) {
|
||||
options = options ?? { limit: 50 }
|
||||
|
||||
const url = new URL(`${settings.apis.contacts.url}/user/${userId}/contacts`)
|
||||
|
||||
for (const [key, val] of Object.entries(options)) {
|
||||
url.searchParams.set(key, val)
|
||||
}
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchJson(url)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed request to contacts API', { userId })
|
||||
}
|
||||
|
||||
return body?.contact_ids || []
|
||||
}
|
||||
|
||||
async function addContact(userId, contactId) {
|
||||
const url = new URL(`${settings.apis.contacts.url}/user/${userId}/contacts`)
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchJson(url, {
|
||||
method: 'POST',
|
||||
json: { contact_id: contactId },
|
||||
})
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed request to contacts API', {
|
||||
userId,
|
||||
contactId,
|
||||
})
|
||||
}
|
||||
|
||||
return body?.contact_ids || []
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getContactIds: callbackify(getContactIds),
|
||||
addContact: callbackify(addContact),
|
||||
promises: {
|
||||
getContactIds,
|
||||
addContact,
|
||||
},
|
||||
}
|
||||
28
services/web/app/src/Features/Contacts/ContactRouter.mjs
Normal file
28
services/web/app/src/Features/Contacts/ContactRouter.mjs
Normal file
@@ -0,0 +1,28 @@
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import ContactController from './ContactController.mjs'
|
||||
import Settings from '@overleaf/settings'
|
||||
|
||||
function contactsAuthenticationMiddleware() {
|
||||
if (!Settings.allowAnonymousReadAndWriteSharing) {
|
||||
return AuthenticationController.requireLogin()
|
||||
} else {
|
||||
return (req, res, next) => {
|
||||
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
next()
|
||||
} else {
|
||||
res.json({ contacts: [] })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
apply(webRouter) {
|
||||
webRouter.get(
|
||||
'/user/contacts',
|
||||
contactsAuthenticationMiddleware(),
|
||||
ContactController.getContacts
|
||||
)
|
||||
},
|
||||
}
|
||||
61
services/web/app/src/Features/Cooldown/CooldownManager.js
Normal file
61
services/web/app/src/Features/Cooldown/CooldownManager.js
Normal file
@@ -0,0 +1,61 @@
|
||||
/* eslint-disable
|
||||
n/handle-callback-err,
|
||||
max-len,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let CooldownManager
|
||||
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||
const rclient = RedisWrapper.client('cooldown')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
|
||||
const COOLDOWN_IN_SECONDS = 60 * 10
|
||||
|
||||
module.exports = CooldownManager = {
|
||||
_buildKey(projectId) {
|
||||
return `Cooldown:{${projectId}}`
|
||||
},
|
||||
|
||||
putProjectOnCooldown(projectId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
`[Cooldown] putting project on cooldown for ${COOLDOWN_IN_SECONDS} seconds`
|
||||
)
|
||||
return rclient.set(
|
||||
CooldownManager._buildKey(projectId),
|
||||
'1',
|
||||
'EX',
|
||||
COOLDOWN_IN_SECONDS,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
isProjectOnCooldown(projectId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return rclient.get(
|
||||
CooldownManager._buildKey(projectId),
|
||||
function (err, result) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return callback(null, result === '1')
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
module.exports.promises = promisifyAll(module.exports, {
|
||||
without: ['_buildKey'],
|
||||
})
|
||||
@@ -0,0 +1,41 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import CooldownManager from './CooldownManager.js'
|
||||
import logger from '@overleaf/logger'
|
||||
|
||||
let CooldownMiddleware
|
||||
|
||||
export default CooldownMiddleware = {
|
||||
freezeProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
if (projectId == null) {
|
||||
return next(new Error('[Cooldown] No projectId parameter on route'))
|
||||
}
|
||||
return CooldownManager.isProjectOnCooldown(
|
||||
projectId,
|
||||
function (err, projectIsOnCooldown) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
if (projectIsOnCooldown) {
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'[Cooldown] project is on cooldown, denying request'
|
||||
)
|
||||
return res.sendStatus(429)
|
||||
}
|
||||
return next()
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
314
services/web/app/src/Features/Docstore/DocstoreManager.js
Normal file
314
services/web/app/src/Features/Docstore/DocstoreManager.js
Normal file
@@ -0,0 +1,314 @@
|
||||
const { promisify } = require('util')
|
||||
const { promisifyMultiResult } = require('@overleaf/promise-utils')
|
||||
const request = require('request').defaults({ jar: false })
|
||||
const OError = require('@overleaf/o-error')
|
||||
const logger = require('@overleaf/logger')
|
||||
const settings = require('@overleaf/settings')
|
||||
const Errors = require('../Errors/Errors')
|
||||
|
||||
const TIMEOUT = 30 * 1000 // request timeout
|
||||
|
||||
function deleteDoc(projectId, docId, name, deletedAt, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}`
|
||||
const docMetaData = { deleted: true, deletedAt, name }
|
||||
const options = { url, json: docMetaData, timeout: TIMEOUT }
|
||||
request.patch(options, (error, res) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null)
|
||||
} else if (res.statusCode === 404) {
|
||||
error = new Errors.NotFoundError({
|
||||
message: 'tried to delete doc not in docstore',
|
||||
info: {
|
||||
projectId,
|
||||
docId,
|
||||
},
|
||||
})
|
||||
callback(error) // maybe suppress the error when delete doc which is not present?
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{
|
||||
projectId,
|
||||
docId,
|
||||
}
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
*/
|
||||
function getAllDocs(projectId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc`
|
||||
request.get(
|
||||
{
|
||||
url,
|
||||
timeout: TIMEOUT,
|
||||
json: true,
|
||||
},
|
||||
(error, res, docs) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null, docs)
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId }
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getAllDeletedDocs(projectId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc-deleted`
|
||||
request.get({ url, timeout: TIMEOUT, json: true }, (error, res, docs) => {
|
||||
if (error) {
|
||||
callback(OError.tag(error, 'could not get deleted docs from docstore'))
|
||||
} else if (res.statusCode === 200) {
|
||||
callback(null, docs)
|
||||
} else {
|
||||
callback(
|
||||
new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId }
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {Callback} callback
|
||||
*/
|
||||
function getAllRanges(projectId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/ranges`
|
||||
request.get(
|
||||
{
|
||||
url,
|
||||
timeout: TIMEOUT,
|
||||
json: true,
|
||||
},
|
||||
(error, res, docs) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null, docs)
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId }
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getDoc(projectId, docId, options, callback) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
const requestParams = { timeout: TIMEOUT, json: true }
|
||||
if (options.peek) {
|
||||
requestParams.url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek`
|
||||
} else {
|
||||
requestParams.url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}`
|
||||
}
|
||||
if (options.include_deleted) {
|
||||
requestParams.qs = { include_deleted: 'true' }
|
||||
}
|
||||
request.get(requestParams, (error, res, doc) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.debug(
|
||||
{ docId, projectId, version: doc.version, rev: doc.rev },
|
||||
'got doc from docstore api'
|
||||
)
|
||||
callback(null, doc.lines, doc.rev, doc.version, doc.ranges)
|
||||
} else if (res.statusCode === 404) {
|
||||
error = new Errors.NotFoundError({
|
||||
message: 'doc not found in docstore',
|
||||
info: {
|
||||
projectId,
|
||||
docId,
|
||||
},
|
||||
})
|
||||
callback(error)
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{
|
||||
projectId,
|
||||
docId,
|
||||
}
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function isDocDeleted(projectId, docId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}/deleted`
|
||||
request.get({ url, timeout: TIMEOUT, json: true }, (err, res, body) => {
|
||||
if (err) {
|
||||
callback(err)
|
||||
} else if (res.statusCode === 200) {
|
||||
callback(null, body.deleted)
|
||||
} else if (res.statusCode === 404) {
|
||||
callback(
|
||||
new Errors.NotFoundError({
|
||||
message: 'doc does not exist in project',
|
||||
info: { projectId, docId },
|
||||
})
|
||||
)
|
||||
} else {
|
||||
callback(
|
||||
new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId, docId }
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function updateDoc(projectId, docId, lines, version, ranges, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}`
|
||||
request.post(
|
||||
{
|
||||
url,
|
||||
timeout: TIMEOUT,
|
||||
json: {
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
},
|
||||
},
|
||||
(error, res, result) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.debug(
|
||||
{ projectId, docId },
|
||||
'update doc in docstore url finished'
|
||||
)
|
||||
callback(null, result.modified, result.rev)
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId, docId }
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks docstore whether any doc in the project has ranges
|
||||
*
|
||||
* @param {string} proejctId
|
||||
* @param {Callback} callback
|
||||
*/
|
||||
function projectHasRanges(projectId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/has-ranges`
|
||||
request.get({ url, timeout: TIMEOUT, json: true }, (err, res, body) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null, body.projectHasRanges)
|
||||
} else {
|
||||
callback(
|
||||
new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId }
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function archiveProject(projectId, callback) {
|
||||
_operateOnProject(projectId, 'archive', callback)
|
||||
}
|
||||
|
||||
function unarchiveProject(projectId, callback) {
|
||||
_operateOnProject(projectId, 'unarchive', callback)
|
||||
}
|
||||
|
||||
function destroyProject(projectId, callback) {
|
||||
_operateOnProject(projectId, 'destroy', callback)
|
||||
}
|
||||
|
||||
function _operateOnProject(projectId, method, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/${method}`
|
||||
logger.debug({ projectId }, `calling ${method} for project in docstore`)
|
||||
// use default timeout for archiving/unarchiving/destroying
|
||||
request.post(url, (err, res, docs) => {
|
||||
if (err) {
|
||||
OError.tag(err, `error calling ${method} project in docstore`, {
|
||||
projectId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback()
|
||||
} else {
|
||||
const error = new Error(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`
|
||||
)
|
||||
logger.warn(
|
||||
{ err: error, projectId },
|
||||
`error calling ${method} project in docstore`
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
deleteDoc,
|
||||
getAllDocs,
|
||||
getAllDeletedDocs,
|
||||
getAllRanges,
|
||||
getDoc,
|
||||
isDocDeleted,
|
||||
updateDoc,
|
||||
projectHasRanges,
|
||||
archiveProject,
|
||||
unarchiveProject,
|
||||
destroyProject,
|
||||
promises: {
|
||||
deleteDoc: promisify(deleteDoc),
|
||||
getAllDocs: promisify(getAllDocs),
|
||||
getAllDeletedDocs: promisify(getAllDeletedDocs),
|
||||
getAllRanges: promisify(getAllRanges),
|
||||
getDoc: promisifyMultiResult(getDoc, ['lines', 'rev', 'version', 'ranges']),
|
||||
isDocDeleted: promisify(isDocDeleted),
|
||||
updateDoc: promisifyMultiResult(updateDoc, ['modified', 'rev']),
|
||||
projectHasRanges: promisify(projectHasRanges),
|
||||
archiveProject: promisify(archiveProject),
|
||||
unarchiveProject: promisify(unarchiveProject),
|
||||
destroyProject: promisify(destroyProject),
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
import logger from '@overleaf/logger'
|
||||
import DocumentUpdaterHandler from './DocumentUpdaterHandler.js'
|
||||
import ProjectLocator from '../Project/ProjectLocator.js'
|
||||
import { plainTextResponse } from '../../infrastructure/Response.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
async function getDoc(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const docId = req.params.Doc_id
|
||||
|
||||
try {
|
||||
const { element: doc } = await ProjectLocator.promises.findElement({
|
||||
project_id: projectId,
|
||||
element_id: docId,
|
||||
type: 'doc',
|
||||
})
|
||||
|
||||
const { lines } = await DocumentUpdaterHandler.promises.getDocument(
|
||||
projectId,
|
||||
docId,
|
||||
-1 // latest version only
|
||||
)
|
||||
|
||||
res.setContentDisposition('attachment', { filename: doc.name })
|
||||
plainTextResponse(res, lines.join('\n'))
|
||||
} catch (err) {
|
||||
if (err.name === 'NotFoundError') {
|
||||
logger.warn(
|
||||
{ err, projectId, docId },
|
||||
'entity not found when downloading doc'
|
||||
)
|
||||
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
logger.err(
|
||||
{ err, projectId, docId },
|
||||
'error getting document for downloading'
|
||||
)
|
||||
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
getDoc: expressify(getDoc),
|
||||
}
|
||||
@@ -0,0 +1,656 @@
|
||||
const request = require('request').defaults({ timeout: 30 * 1000 })
|
||||
const OError = require('@overleaf/o-error')
|
||||
const settings = require('@overleaf/settings')
|
||||
const _ = require('lodash')
|
||||
const async = require('async')
|
||||
const logger = require('@overleaf/logger')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const { promisify } = require('util')
|
||||
const { promisifyMultiResult } = require('@overleaf/promise-utils')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
|
||||
function getProjectLastUpdatedAt(projectId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/last_updated_at`,
|
||||
method: 'GET',
|
||||
json: true,
|
||||
},
|
||||
projectId,
|
||||
'project.redis.last_updated_at',
|
||||
(err, body) => {
|
||||
if (err || !body?.lastUpdatedAt) return callback(err, null)
|
||||
callback(null, new Date(body.lastUpdatedAt))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
*/
|
||||
function flushProjectToMongo(projectId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/flush`,
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'flushing.mongo.project',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function flushMultipleProjectsToMongo(projectIds, callback) {
|
||||
const jobs = projectIds.map(projectId => callback => {
|
||||
flushProjectToMongo(projectId, callback)
|
||||
})
|
||||
async.series(jobs, callback)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
*/
|
||||
function flushProjectToMongoAndDelete(projectId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}`,
|
||||
method: 'DELETE',
|
||||
},
|
||||
projectId,
|
||||
'flushing.mongo.project',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function flushDocToMongo(projectId, docId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/flush`,
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'flushing.mongo.doc',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function deleteDoc(projectId, docId, ignoreFlushErrors, callback) {
|
||||
if (typeof ignoreFlushErrors === 'function') {
|
||||
callback = ignoreFlushErrors
|
||||
ignoreFlushErrors = false
|
||||
}
|
||||
let path = `/project/${projectId}/doc/${docId}`
|
||||
if (ignoreFlushErrors) {
|
||||
path += '?ignore_flush_errors=true'
|
||||
}
|
||||
const method = 'DELETE'
|
||||
_makeRequest(
|
||||
{
|
||||
path,
|
||||
method,
|
||||
},
|
||||
projectId,
|
||||
'delete.mongo.doc',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function getComment(projectId, docId, commentId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/comment/${commentId}`,
|
||||
json: true,
|
||||
},
|
||||
projectId,
|
||||
'get-comment',
|
||||
function (error, comment) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
callback(null, comment)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getDocument(projectId, docId, fromVersion, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`,
|
||||
json: true,
|
||||
},
|
||||
projectId,
|
||||
'get-document',
|
||||
function (error, doc) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
callback(null, doc.lines, doc.version, doc.ranges, doc.ops)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function setDocument(projectId, docId, userId, docLines, source, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
lines: docLines,
|
||||
source,
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'set-document',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function appendToDocument(projectId, docId, userId, lines, source, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/append`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
lines,
|
||||
source,
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'append-to-document',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function getProjectDocsIfMatch(projectId, projectStateHash, callback) {
|
||||
// If the project state hasn't changed, we can get all the latest
|
||||
// docs from redis via the docupdater. Otherwise we will need to
|
||||
// fall back to getting them from mongo.
|
||||
const timer = new metrics.Timer('get-project-docs')
|
||||
const url = `${settings.apis.documentupdater.url}/project/${projectId}/get_and_flush_if_old?state=${projectStateHash}`
|
||||
request.post(url, function (error, res, body) {
|
||||
timer.done()
|
||||
if (error) {
|
||||
OError.tag(error, 'error getting project docs from doc updater', {
|
||||
url,
|
||||
projectId,
|
||||
})
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode === 409) {
|
||||
// HTTP response code "409 Conflict"
|
||||
// Docupdater has checked the projectStateHash and found that
|
||||
// it has changed. This means that the docs currently in redis
|
||||
// aren't the only change to the project and the full set of
|
||||
// docs/files should be retreived from docstore/filestore
|
||||
// instead.
|
||||
callback()
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
let docs
|
||||
try {
|
||||
docs = JSON.parse(body)
|
||||
} catch (error1) {
|
||||
return callback(OError.tag(error1))
|
||||
}
|
||||
callback(null, docs)
|
||||
} else {
|
||||
callback(
|
||||
new OError(
|
||||
`doc updater returned a non-success status code: ${res.statusCode}`,
|
||||
{
|
||||
projectId,
|
||||
url,
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function clearProjectState(projectId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/clearState`,
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'clear-project-state',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function acceptChanges(projectId, docId, changeIds, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/change/accept`,
|
||||
json: { change_ids: changeIds },
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'accept-changes',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function resolveThread(projectId, docId, threadId, userId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/comment/${threadId}/resolve`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'resolve-thread',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function reopenThread(projectId, docId, threadId, userId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/comment/${threadId}/reopen`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'reopen-thread',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function deleteThread(projectId, docId, threadId, userId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/comment/${threadId}`,
|
||||
method: 'DELETE',
|
||||
json: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'delete-thread',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function resyncProjectHistory(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
opts,
|
||||
callback
|
||||
) {
|
||||
docs = docs.map(doc => ({
|
||||
doc: doc.doc._id,
|
||||
path: doc.path,
|
||||
}))
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
if (!hasFilestore) {
|
||||
// Files without a hash likely do not have a blob. Abort.
|
||||
for (const { file } of files) {
|
||||
if (!file.hash) {
|
||||
return callback(
|
||||
new OError('found file with missing hash', { projectId, file })
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
files = files.map(file => ({
|
||||
file: file.file._id,
|
||||
path: file.path,
|
||||
url: hasFilestore
|
||||
? FileStoreHandler._buildUrl(projectId, file.file._id)
|
||||
: undefined,
|
||||
_hash: file.file.hash,
|
||||
createdBlob: !hasFilestore,
|
||||
metadata: buildFileMetadataForHistory(file.file),
|
||||
}))
|
||||
|
||||
const body = { docs, files, projectHistoryId }
|
||||
if (opts.historyRangesMigration) {
|
||||
body.historyRangesMigration = opts.historyRangesMigration
|
||||
}
|
||||
if (opts.resyncProjectStructureOnly) {
|
||||
body.resyncProjectStructureOnly = opts.resyncProjectStructureOnly
|
||||
}
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/history/resync`,
|
||||
json: body,
|
||||
method: 'POST',
|
||||
timeout: 6 * 60 * 1000, // allow 6 minutes for resync
|
||||
},
|
||||
projectId,
|
||||
'resync-project-history',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Block a project from being loaded in docupdater
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Callback} callback
|
||||
*/
|
||||
function blockProject(projectId, callback) {
|
||||
_makeRequest(
|
||||
{ path: `/project/${projectId}/block`, method: 'POST', json: true },
|
||||
projectId,
|
||||
'block-project',
|
||||
(err, body) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
callback(null, body.blocked)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Unblock a previously blocked project
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Callback} callback
|
||||
*/
|
||||
function unblockProject(projectId, callback) {
|
||||
_makeRequest(
|
||||
{ path: `/project/${projectId}/unblock`, method: 'POST', json: true },
|
||||
projectId,
|
||||
'unblock-project',
|
||||
(err, body) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
callback(null, body.wasBlocked)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function updateProjectStructure(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
userId,
|
||||
changes,
|
||||
source,
|
||||
callback
|
||||
) {
|
||||
if (
|
||||
settings.apis.project_history == null ||
|
||||
!settings.apis.project_history.sendProjectStructureOps
|
||||
) {
|
||||
return callback()
|
||||
}
|
||||
|
||||
ProjectGetter.getProjectWithoutLock(
|
||||
projectId,
|
||||
{ overleaf: true },
|
||||
(err, project) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
const historyRangesSupport = _.get(
|
||||
project,
|
||||
'overleaf.history.rangesSupportEnabled',
|
||||
false
|
||||
)
|
||||
const {
|
||||
deletes: docDeletes,
|
||||
adds: docAdds,
|
||||
renames: docRenames,
|
||||
} = _getUpdates(
|
||||
'doc',
|
||||
changes.oldDocs,
|
||||
changes.newDocs,
|
||||
historyRangesSupport
|
||||
)
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
if (!hasFilestore) {
|
||||
for (const newEntity of changes.newFiles || []) {
|
||||
if (!newEntity.file.hash) {
|
||||
// Files without a hash likely do not have a blob. Abort.
|
||||
return callback(
|
||||
new OError('found file with missing hash', { newEntity })
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
const {
|
||||
deletes: fileDeletes,
|
||||
adds: fileAdds,
|
||||
renames: fileRenames,
|
||||
} = _getUpdates(
|
||||
'file',
|
||||
changes.oldFiles,
|
||||
changes.newFiles,
|
||||
historyRangesSupport
|
||||
)
|
||||
const updates = [].concat(
|
||||
docDeletes,
|
||||
fileDeletes,
|
||||
docAdds,
|
||||
fileAdds,
|
||||
docRenames,
|
||||
fileRenames
|
||||
)
|
||||
const projectVersion =
|
||||
changes && changes.newProject && changes.newProject.version
|
||||
|
||||
if (updates.length < 1) {
|
||||
return callback()
|
||||
}
|
||||
|
||||
if (projectVersion == null) {
|
||||
logger.warn(
|
||||
{ projectId, changes, projectVersion },
|
||||
'did not receive project version in changes'
|
||||
)
|
||||
return callback(new Error('did not receive project version in changes'))
|
||||
}
|
||||
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}`,
|
||||
json: {
|
||||
updates,
|
||||
userId,
|
||||
version: projectVersion,
|
||||
projectHistoryId,
|
||||
source,
|
||||
},
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'update-project-structure',
|
||||
callback
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function _makeRequest(options, projectId, metricsKey, callback) {
|
||||
const timer = new metrics.Timer(metricsKey)
|
||||
request(
|
||||
{
|
||||
url: `${settings.apis.documentupdater.url}${options.path}`,
|
||||
json: options.json,
|
||||
method: options.method || 'GET',
|
||||
timeout: options.timeout || 30 * 1000,
|
||||
},
|
||||
function (error, res, body) {
|
||||
timer.done()
|
||||
if (error) {
|
||||
logger.warn(
|
||||
{ error, projectId },
|
||||
'error making request to document updater'
|
||||
)
|
||||
callback(error)
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null, body)
|
||||
} else {
|
||||
error = new Error(
|
||||
`document updater returned a failure status code: ${res.statusCode}`
|
||||
)
|
||||
logger.warn(
|
||||
{ error, projectId },
|
||||
`document updater returned failure status code: ${res.statusCode}`
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function _getUpdates(
|
||||
entityType,
|
||||
oldEntities,
|
||||
newEntities,
|
||||
historyRangesSupport
|
||||
) {
|
||||
if (!oldEntities) {
|
||||
oldEntities = []
|
||||
}
|
||||
if (!newEntities) {
|
||||
newEntities = []
|
||||
}
|
||||
const deletes = []
|
||||
const adds = []
|
||||
const renames = []
|
||||
|
||||
const oldEntitiesHash = _.keyBy(oldEntities, entity =>
|
||||
entity[entityType]._id.toString()
|
||||
)
|
||||
const newEntitiesHash = _.keyBy(newEntities, entity =>
|
||||
entity[entityType]._id.toString()
|
||||
)
|
||||
|
||||
// Send deletes before adds (and renames) to keep a 1:1 mapping between
|
||||
// paths and ids
|
||||
//
|
||||
// When a file is replaced, we first delete the old file and then add the
|
||||
// new file. If the 'add' operation is sent to project history before the
|
||||
// 'delete' then we would have two files with the same path at that point
|
||||
// in time.
|
||||
for (const id in oldEntitiesHash) {
|
||||
const oldEntity = oldEntitiesHash[id]
|
||||
const newEntity = newEntitiesHash[id]
|
||||
|
||||
if (newEntity == null) {
|
||||
// entity deleted
|
||||
deletes.push({
|
||||
type: `rename-${entityType}`,
|
||||
id,
|
||||
pathname: oldEntity.path,
|
||||
newPathname: '',
|
||||
})
|
||||
}
|
||||
}
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
|
||||
for (const id in newEntitiesHash) {
|
||||
const newEntity = newEntitiesHash[id]
|
||||
const oldEntity = oldEntitiesHash[id]
|
||||
|
||||
if (oldEntity == null) {
|
||||
// entity added
|
||||
adds.push({
|
||||
type: `add-${entityType}`,
|
||||
id,
|
||||
pathname: newEntity.path,
|
||||
docLines: newEntity.docLines,
|
||||
ranges: newEntity.ranges,
|
||||
historyRangesSupport,
|
||||
url: newEntity.file != null && hasFilestore ? newEntity.url : undefined,
|
||||
hash: newEntity.file != null ? newEntity.file.hash : undefined,
|
||||
metadata: buildFileMetadataForHistory(newEntity.file),
|
||||
createdBlob: (newEntity.createdBlob || !hasFilestore) ?? false,
|
||||
})
|
||||
} else if (newEntity.path !== oldEntity.path) {
|
||||
// entity renamed
|
||||
renames.push({
|
||||
type: `rename-${entityType}`,
|
||||
id,
|
||||
pathname: oldEntity.path,
|
||||
newPathname: newEntity.path,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return { deletes, adds, renames }
|
||||
}
|
||||
|
||||
function buildFileMetadataForHistory(file) {
|
||||
if (!file?.linkedFileData) return undefined
|
||||
|
||||
const metadata = {
|
||||
// Files do not have a created at timestamp in the history.
|
||||
// For cloned projects, the importedAt timestamp needs to remain untouched.
|
||||
// Record the timestamp in the metadata blob to keep everything self-contained.
|
||||
importedAt: file.created,
|
||||
...file.linkedFileData,
|
||||
}
|
||||
if (metadata.provider === 'project_output_file') {
|
||||
// The build-id and clsi-server-id are only used for downloading file.
|
||||
// Omit them from history as they are not useful in the future.
|
||||
delete metadata.build_id
|
||||
delete metadata.clsiServerId
|
||||
}
|
||||
return metadata
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
flushProjectToMongo,
|
||||
flushMultipleProjectsToMongo,
|
||||
flushProjectToMongoAndDelete,
|
||||
flushDocToMongo,
|
||||
deleteDoc,
|
||||
getComment,
|
||||
getDocument,
|
||||
getProjectLastUpdatedAt,
|
||||
setDocument,
|
||||
appendToDocument,
|
||||
getProjectDocsIfMatch,
|
||||
clearProjectState,
|
||||
acceptChanges,
|
||||
resolveThread,
|
||||
reopenThread,
|
||||
deleteThread,
|
||||
resyncProjectHistory,
|
||||
blockProject,
|
||||
unblockProject,
|
||||
updateProjectStructure,
|
||||
promises: {
|
||||
flushProjectToMongo: promisify(flushProjectToMongo),
|
||||
flushMultipleProjectsToMongo: promisify(flushMultipleProjectsToMongo),
|
||||
flushProjectToMongoAndDelete: promisify(flushProjectToMongoAndDelete),
|
||||
flushDocToMongo: promisify(flushDocToMongo),
|
||||
deleteDoc: promisify(deleteDoc),
|
||||
getComment: promisify(getComment),
|
||||
getDocument: promisifyMultiResult(getDocument, [
|
||||
'lines',
|
||||
'version',
|
||||
'ranges',
|
||||
'ops',
|
||||
]),
|
||||
setDocument: promisify(setDocument),
|
||||
getProjectDocsIfMatch: promisify(getProjectDocsIfMatch),
|
||||
getProjectLastUpdatedAt: promisify(getProjectLastUpdatedAt),
|
||||
clearProjectState: promisify(clearProjectState),
|
||||
acceptChanges: promisify(acceptChanges),
|
||||
resolveThread: promisify(resolveThread),
|
||||
reopenThread: promisify(reopenThread),
|
||||
deleteThread: promisify(deleteThread),
|
||||
resyncProjectHistory: promisify(resyncProjectHistory),
|
||||
blockProject: promisify(blockProject),
|
||||
unblockProject: promisify(unblockProject),
|
||||
updateProjectStructure: promisify(updateProjectStructure),
|
||||
appendToDocument: promisify(appendToDocument),
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
import ChatApiHandler from '../Chat/ChatApiHandler.js'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import ProjectLocator from '../Project/ProjectLocator.js'
|
||||
import ProjectEntityHandler from '../Project/ProjectEntityHandler.js'
|
||||
import ProjectEntityUpdateHandler from '../Project/ProjectEntityUpdateHandler.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import _ from 'lodash'
|
||||
import { plainTextResponse } from '../../infrastructure/Response.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
async function getDocument(req, res) {
|
||||
const { Project_id: projectId, doc_id: docId } = req.params
|
||||
const plain = req.query.plain === 'true'
|
||||
const peek = req.query.peek === 'true'
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
rootFolder: true,
|
||||
overleaf: true,
|
||||
})
|
||||
if (!project) {
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
const { path } = await ProjectLocator.promises.findElement({
|
||||
project,
|
||||
element_id: docId,
|
||||
type: 'doc',
|
||||
})
|
||||
|
||||
const { lines, version, ranges } = await ProjectEntityHandler.promises.getDoc(
|
||||
projectId,
|
||||
docId,
|
||||
{ peek }
|
||||
)
|
||||
|
||||
const resolvedCommentIdsInProject =
|
||||
await ChatApiHandler.promises.getResolvedThreadIds(projectId)
|
||||
|
||||
const commentIdsInDoc = new Set(
|
||||
ranges?.comments?.map(comment => comment.id) ?? []
|
||||
)
|
||||
|
||||
const resolvedCommentIds = resolvedCommentIdsInProject.filter(commentId =>
|
||||
commentIdsInDoc.has(commentId)
|
||||
)
|
||||
|
||||
if (plain) {
|
||||
plainTextResponse(res, lines.join('\n'))
|
||||
} else {
|
||||
const projectHistoryId = _.get(project, 'overleaf.history.id')
|
||||
const historyRangesSupport = _.get(
|
||||
project,
|
||||
'overleaf.history.rangesSupportEnabled',
|
||||
false
|
||||
)
|
||||
|
||||
// all projects are now migrated to Full Project History, keeping the field
|
||||
// for API compatibility
|
||||
const projectHistoryType = 'project-history'
|
||||
|
||||
res.json({
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
pathname: path.fileSystem,
|
||||
projectHistoryId,
|
||||
projectHistoryType,
|
||||
historyRangesSupport,
|
||||
resolvedCommentIds,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function setDocument(req, res) {
|
||||
const { Project_id: projectId, doc_id: docId } = req.params
|
||||
const { lines, version, ranges, lastUpdatedAt, lastUpdatedBy } = req.body
|
||||
const result = await ProjectEntityUpdateHandler.promises.updateDocLines(
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
lastUpdatedAt,
|
||||
lastUpdatedBy
|
||||
)
|
||||
logger.debug(
|
||||
{ docId, projectId },
|
||||
'finished receiving set document request from api (docupdater)'
|
||||
)
|
||||
res.json(result)
|
||||
}
|
||||
|
||||
export default {
|
||||
getDocument: expressify(getDocument),
|
||||
setDocument: expressify(setDocument),
|
||||
}
|
||||
78
services/web/app/src/Features/Documents/DocumentHelper.js
Normal file
78
services/web/app/src/Features/Documents/DocumentHelper.js
Normal file
@@ -0,0 +1,78 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-cond-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DocumentHelper
|
||||
module.exports = DocumentHelper = {
|
||||
getTitleFromTexContent(content, maxContentToScan) {
|
||||
if (maxContentToScan == null) {
|
||||
maxContentToScan = 30000
|
||||
}
|
||||
const TITLE_WITH_CURLY_BRACES = /\\[tT]itle\*?\s*{([^}]+)}/
|
||||
const TITLE_WITH_SQUARE_BRACES = /\\[tT]itle\s*\[([^\]]+)\]/
|
||||
for (const line of Array.from(
|
||||
DocumentHelper._getLinesFromContent(content, maxContentToScan)
|
||||
)) {
|
||||
let match
|
||||
if (
|
||||
(match =
|
||||
line.match(TITLE_WITH_CURLY_BRACES) ||
|
||||
line.match(TITLE_WITH_SQUARE_BRACES))
|
||||
) {
|
||||
return DocumentHelper.detex(match[1])
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
},
|
||||
|
||||
contentHasDocumentclass(content, maxContentToScan) {
|
||||
if (maxContentToScan == null) {
|
||||
maxContentToScan = 30000
|
||||
}
|
||||
for (const line of Array.from(
|
||||
DocumentHelper._getLinesFromContent(content, maxContentToScan)
|
||||
)) {
|
||||
// We've had problems with this regex locking up CPU.
|
||||
// Previously /.*\\documentclass/ would totally lock up on lines of 500kb (data text files :()
|
||||
// This regex will only look from the start of the line, including whitespace so will return quickly
|
||||
// regardless of line length.
|
||||
if (line.match(/^\s*\\documentclass/)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
},
|
||||
|
||||
detex(string) {
|
||||
return string
|
||||
.replace(/\\LaTeX/g, 'LaTeX')
|
||||
.replace(/\\TeX/g, 'TeX')
|
||||
.replace(/\\TikZ/g, 'TikZ')
|
||||
.replace(/\\BibTeX/g, 'BibTeX')
|
||||
.replace(/\\\[[A-Za-z0-9. ]*\]/g, ' ') // line spacing
|
||||
.replace(/\\(?:[a-zA-Z]+|.|)/g, '')
|
||||
.replace(/{}|~/g, ' ')
|
||||
.replace(/[${}]/g, '')
|
||||
.replace(/ +/g, ' ')
|
||||
.trim()
|
||||
},
|
||||
|
||||
_getLinesFromContent(content, maxContentToScan) {
|
||||
if (typeof content === 'string') {
|
||||
return content.substring(0, maxContentToScan).split('\n')
|
||||
} else {
|
||||
return content
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,79 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import Metrics from '@overleaf/metrics'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import ProjectZipStreamManager from './ProjectZipStreamManager.mjs'
|
||||
import DocumentUpdaterHandler from '../DocumentUpdater/DocumentUpdaterHandler.js'
|
||||
import { prepareZipAttachment } from '../../infrastructure/Response.js'
|
||||
|
||||
let ProjectDownloadsController
|
||||
|
||||
export default ProjectDownloadsController = {
|
||||
downloadProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
Metrics.inc('zip-downloads')
|
||||
return DocumentUpdaterHandler.flushProjectToMongo(
|
||||
projectId,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return ProjectGetter.getProject(
|
||||
projectId,
|
||||
{ name: true },
|
||||
function (error, project) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return ProjectZipStreamManager.createZipStreamForProject(
|
||||
projectId,
|
||||
function (error, stream) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
prepareZipAttachment(res, `${project.name}.zip`)
|
||||
return stream.pipe(res)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
downloadMultipleProjects(req, res, next) {
|
||||
const projectIds = req.query.project_ids.split(',')
|
||||
Metrics.inc('zip-downloads-multiple')
|
||||
return DocumentUpdaterHandler.flushMultipleProjectsToMongo(
|
||||
projectIds,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return ProjectZipStreamManager.createZipStreamForMultipleProjects(
|
||||
projectIds,
|
||||
function (error, stream) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
prepareZipAttachment(
|
||||
res,
|
||||
`Overleaf Projects (${projectIds.length} items).zip`
|
||||
)
|
||||
return stream.pipe(res)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,160 @@
|
||||
import archiver from 'archiver'
|
||||
import async from 'async'
|
||||
import logger from '@overleaf/logger'
|
||||
import ProjectEntityHandler from '../Project/ProjectEntityHandler.js'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import HistoryManager from '../History/HistoryManager.js'
|
||||
import FileStoreHandler from '../FileStore/FileStoreHandler.js'
|
||||
import Features from '../../infrastructure/Features.js'
|
||||
let ProjectZipStreamManager
|
||||
|
||||
export default ProjectZipStreamManager = {
|
||||
createZipStreamForMultipleProjects(projectIds, callback) {
|
||||
// We'll build up a zip file that contains multiple zip files
|
||||
const archive = archiver('zip')
|
||||
archive.on('error', err =>
|
||||
logger.err(
|
||||
{ err, projectIds },
|
||||
'something went wrong building archive of project'
|
||||
)
|
||||
)
|
||||
callback(null, archive)
|
||||
|
||||
const jobs = projectIds.map(projectId => cb => {
|
||||
ProjectGetter.getProject(projectId, { name: true }, (error, project) => {
|
||||
if (error) {
|
||||
return cb(error)
|
||||
}
|
||||
if (!project) {
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'cannot append project to zip stream: project not found'
|
||||
)
|
||||
return cb()
|
||||
}
|
||||
logger.debug(
|
||||
{ projectId, name: project.name },
|
||||
'appending project to zip stream'
|
||||
)
|
||||
ProjectZipStreamManager.createZipStreamForProject(
|
||||
projectId,
|
||||
(error, stream) => {
|
||||
if (error) {
|
||||
return cb(error)
|
||||
}
|
||||
archive.append(stream, { name: `${project.name}.zip` })
|
||||
stream.on('end', () => {
|
||||
logger.debug(
|
||||
{ projectId, name: project.name },
|
||||
'zip stream ended'
|
||||
)
|
||||
cb()
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
async.series(jobs, () => {
|
||||
logger.debug(
|
||||
{ projectIds },
|
||||
'finished creating zip stream of multiple projects'
|
||||
)
|
||||
archive.finalize()
|
||||
})
|
||||
},
|
||||
|
||||
createZipStreamForProject(projectId, callback) {
|
||||
const archive = archiver('zip')
|
||||
// return stream immediately before we start adding things to it
|
||||
archive.on('error', err =>
|
||||
logger.err(
|
||||
{ err, projectId },
|
||||
'something went wrong building archive of project'
|
||||
)
|
||||
)
|
||||
callback(null, archive)
|
||||
this.addAllDocsToArchive(projectId, archive, error => {
|
||||
if (error) {
|
||||
logger.error(
|
||||
{ err: error, projectId },
|
||||
'error adding docs to zip stream'
|
||||
)
|
||||
}
|
||||
this.addAllFilesToArchive(projectId, archive, error => {
|
||||
if (error) {
|
||||
logger.error(
|
||||
{ err: error, projectId },
|
||||
'error adding files to zip stream'
|
||||
)
|
||||
}
|
||||
archive.finalize()
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
addAllDocsToArchive(projectId, archive, callback) {
|
||||
ProjectEntityHandler.getAllDocs(projectId, (error, docs) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Object.entries(docs).map(([path, doc]) => cb => {
|
||||
if (path[0] === '/') {
|
||||
path = path.slice(1)
|
||||
}
|
||||
logger.debug({ projectId }, 'Adding doc')
|
||||
archive.append(doc.lines.join('\n'), { name: path })
|
||||
setImmediate(cb)
|
||||
})
|
||||
async.series(jobs, callback)
|
||||
})
|
||||
},
|
||||
|
||||
getFileStream: (projectId, file, callback) => {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
HistoryManager.requestBlobWithFallback(
|
||||
projectId,
|
||||
file.hash,
|
||||
file._id,
|
||||
(error, result) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
const { stream } = result
|
||||
callback(null, stream)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
FileStoreHandler.getFileStream(projectId, file._id, {}, callback)
|
||||
}
|
||||
},
|
||||
|
||||
addAllFilesToArchive(projectId, archive, callback) {
|
||||
ProjectEntityHandler.getAllFiles(projectId, (error, files) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Object.entries(files).map(([path, file]) => cb => {
|
||||
ProjectZipStreamManager.getFileStream(
|
||||
projectId,
|
||||
file,
|
||||
(error, stream) => {
|
||||
if (error) {
|
||||
logger.warn(
|
||||
{ err: error, projectId, fileId: file._id },
|
||||
'something went wrong adding file to zip archive'
|
||||
)
|
||||
return cb(error)
|
||||
}
|
||||
if (path[0] === '/') {
|
||||
path = path.slice(1)
|
||||
}
|
||||
archive.append(stream, { name: path })
|
||||
stream.on('end', () => cb())
|
||||
}
|
||||
)
|
||||
})
|
||||
async.parallelLimit(jobs, 5, callback)
|
||||
})
|
||||
},
|
||||
}
|
||||
695
services/web/app/src/Features/Editor/EditorController.js
Normal file
695
services/web/app/src/Features/Editor/EditorController.js
Normal file
@@ -0,0 +1,695 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const ProjectEntityUpdateHandler = require('../Project/ProjectEntityUpdateHandler')
|
||||
const ProjectOptionsHandler = require('../Project/ProjectOptionsHandler')
|
||||
const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler')
|
||||
const ProjectDeleter = require('../Project/ProjectDeleter')
|
||||
const EditorRealTimeController = require('./EditorRealTimeController')
|
||||
const async = require('async')
|
||||
const PublicAccessLevels = require('../Authorization/PublicAccessLevels')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
|
||||
const EditorController = {
|
||||
addDoc(projectId, folderId, docName, docLines, source, userId, callback) {
|
||||
EditorController.addDocWithRanges(
|
||||
projectId,
|
||||
folderId,
|
||||
docName,
|
||||
docLines,
|
||||
{},
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
addDocWithRanges(
|
||||
projectId,
|
||||
folderId,
|
||||
docName,
|
||||
docLines,
|
||||
docRanges,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
docName = docName.trim()
|
||||
Metrics.inc('editor.add-doc')
|
||||
ProjectEntityUpdateHandler.addDocWithRanges(
|
||||
projectId,
|
||||
folderId,
|
||||
docName,
|
||||
docLines,
|
||||
docRanges,
|
||||
userId,
|
||||
source,
|
||||
(err, doc, folderId) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'error adding doc without lock', {
|
||||
projectId,
|
||||
docName,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewDoc',
|
||||
folderId,
|
||||
doc,
|
||||
source,
|
||||
userId
|
||||
)
|
||||
callback(err, doc)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
addFile(
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
fileName = fileName.trim()
|
||||
Metrics.inc('editor.add-file')
|
||||
ProjectEntityUpdateHandler.addFile(
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
userId,
|
||||
source,
|
||||
(err, fileRef, folderId) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'error adding file without lock', {
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewFile',
|
||||
folderId,
|
||||
fileRef,
|
||||
source,
|
||||
linkedFileData,
|
||||
userId
|
||||
)
|
||||
callback(err, fileRef)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
appendToDoc(projectId, docId, docLines, source, userId, callback) {
|
||||
ProjectEntityUpdateHandler.appendToDoc(
|
||||
projectId,
|
||||
docId,
|
||||
docLines,
|
||||
source,
|
||||
userId,
|
||||
function (err, doc) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error appending to doc', {
|
||||
projectId,
|
||||
docId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
callback(err, doc)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
upsertDoc(projectId, folderId, docName, docLines, source, userId, callback) {
|
||||
ProjectEntityUpdateHandler.upsertDoc(
|
||||
projectId,
|
||||
folderId,
|
||||
docName,
|
||||
docLines,
|
||||
source,
|
||||
userId,
|
||||
function (err, doc, didAddNewDoc) {
|
||||
if (didAddNewDoc) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewDoc',
|
||||
folderId,
|
||||
doc,
|
||||
source,
|
||||
userId
|
||||
)
|
||||
}
|
||||
callback(err, doc)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
upsertFile(
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
ProjectEntityUpdateHandler.upsertFile(
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
userId,
|
||||
source,
|
||||
function (err, newFile, didAddFile, existingFile) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (!didAddFile) {
|
||||
// replacement, so remove the existing file from the client
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'removeEntity',
|
||||
existingFile._id,
|
||||
source
|
||||
)
|
||||
}
|
||||
// now add the new file on the client
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewFile',
|
||||
folderId,
|
||||
newFile,
|
||||
source,
|
||||
linkedFileData,
|
||||
userId
|
||||
)
|
||||
callback(null, newFile)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
upsertDocWithPath(
|
||||
projectId,
|
||||
elementPath,
|
||||
docLines,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
ProjectEntityUpdateHandler.upsertDocWithPath(
|
||||
projectId,
|
||||
elementPath,
|
||||
docLines,
|
||||
source,
|
||||
userId,
|
||||
function (err, doc, didAddNewDoc, newFolders, lastFolder) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorController._notifyProjectUsersOfNewFolders(
|
||||
projectId,
|
||||
newFolders,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (didAddNewDoc) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewDoc',
|
||||
lastFolder._id,
|
||||
doc,
|
||||
source,
|
||||
userId
|
||||
)
|
||||
}
|
||||
callback(null, { doc, folder: lastFolder })
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
upsertFileWithPath(
|
||||
projectId,
|
||||
elementPath,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
ProjectEntityUpdateHandler.upsertFileWithPath(
|
||||
projectId,
|
||||
elementPath,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
userId,
|
||||
source,
|
||||
function (
|
||||
err,
|
||||
newFile,
|
||||
didAddFile,
|
||||
existingFile,
|
||||
newFolders,
|
||||
lastFolder
|
||||
) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorController._notifyProjectUsersOfNewFolders(
|
||||
projectId,
|
||||
newFolders,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (!didAddFile) {
|
||||
// replacement, so remove the existing file from the client
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'removeEntity',
|
||||
existingFile._id,
|
||||
source
|
||||
)
|
||||
}
|
||||
// now add the new file on the client
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewFile',
|
||||
lastFolder._id,
|
||||
newFile,
|
||||
source,
|
||||
linkedFileData,
|
||||
userId
|
||||
)
|
||||
callback(null, { file: newFile, folder: lastFolder })
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
addFolder(projectId, folderId, folderName, source, userId, callback) {
|
||||
folderName = folderName.trim()
|
||||
Metrics.inc('editor.add-folder')
|
||||
ProjectEntityUpdateHandler.addFolder(
|
||||
projectId,
|
||||
folderId,
|
||||
folderName,
|
||||
userId,
|
||||
(err, folder, folderId) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'could not add folder', {
|
||||
projectId,
|
||||
folderId,
|
||||
folderName,
|
||||
source,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorController._notifyProjectUsersOfNewFolder(
|
||||
projectId,
|
||||
folderId,
|
||||
folder,
|
||||
userId,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
callback(null, folder)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
mkdirp(projectId, path, userId, callback) {
|
||||
logger.debug({ projectId, path }, "making directories if they don't exist")
|
||||
ProjectEntityUpdateHandler.mkdirp(
|
||||
projectId,
|
||||
path,
|
||||
userId,
|
||||
(err, newFolders, lastFolder) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'could not mkdirp', {
|
||||
projectId,
|
||||
path,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
EditorController._notifyProjectUsersOfNewFolders(
|
||||
projectId,
|
||||
newFolders,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
callback(null, newFolders, lastFolder)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
deleteEntity(projectId, entityId, entityType, source, userId, callback) {
|
||||
Metrics.inc('editor.delete-entity')
|
||||
ProjectEntityUpdateHandler.deleteEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
userId,
|
||||
source,
|
||||
function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'could not delete entity', {
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
logger.debug(
|
||||
{ projectId, entityId, entityType },
|
||||
'telling users entity has been deleted'
|
||||
)
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'removeEntity',
|
||||
entityId,
|
||||
source
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
deleteEntityWithPath(projectId, path, source, userId, callback) {
|
||||
ProjectEntityUpdateHandler.deleteEntityWithPath(
|
||||
projectId,
|
||||
path,
|
||||
userId,
|
||||
source,
|
||||
function (err, entityId) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'removeEntity',
|
||||
entityId,
|
||||
source
|
||||
)
|
||||
callback(null, entityId)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
updateProjectDescription(projectId, description, callback) {
|
||||
logger.debug({ projectId, description }, 'updating project description')
|
||||
ProjectDetailsHandler.setProjectDescription(
|
||||
projectId,
|
||||
description,
|
||||
function (err) {
|
||||
if (err) {
|
||||
OError.tag(
|
||||
err,
|
||||
'something went wrong setting the project description',
|
||||
{
|
||||
projectId,
|
||||
description,
|
||||
}
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'projectDescriptionUpdated',
|
||||
description
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
deleteProject(projectId, callback) {
|
||||
Metrics.inc('editor.delete-project')
|
||||
ProjectDeleter.deleteProject(projectId, callback)
|
||||
},
|
||||
|
||||
renameEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
newName,
|
||||
userId,
|
||||
source,
|
||||
callback
|
||||
) {
|
||||
Metrics.inc('editor.rename-entity')
|
||||
ProjectEntityUpdateHandler.renameEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
newName,
|
||||
userId,
|
||||
source,
|
||||
function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error renaming entity', {
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
newName,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
if (newName.length > 0) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveEntityRename',
|
||||
entityId,
|
||||
newName
|
||||
)
|
||||
}
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
moveEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
folderId,
|
||||
entityType,
|
||||
userId,
|
||||
source,
|
||||
callback
|
||||
) {
|
||||
Metrics.inc('editor.move-entity')
|
||||
ProjectEntityUpdateHandler.moveEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
folderId,
|
||||
entityType,
|
||||
userId,
|
||||
source,
|
||||
function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error moving entity', {
|
||||
projectId,
|
||||
entityId,
|
||||
folderId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveEntityMove',
|
||||
entityId,
|
||||
folderId
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
renameProject(projectId, newName, callback) {
|
||||
ProjectDetailsHandler.renameProject(projectId, newName, function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error renaming project', {
|
||||
projectId,
|
||||
newName,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'projectNameUpdated',
|
||||
newName
|
||||
)
|
||||
callback()
|
||||
})
|
||||
},
|
||||
|
||||
setCompiler(projectId, compiler, callback) {
|
||||
ProjectOptionsHandler.setCompiler(projectId, compiler, function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'compilerUpdated',
|
||||
compiler
|
||||
)
|
||||
callback()
|
||||
})
|
||||
},
|
||||
|
||||
setImageName(projectId, imageName, callback) {
|
||||
ProjectOptionsHandler.setImageName(projectId, imageName, function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'imageNameUpdated',
|
||||
imageName
|
||||
)
|
||||
callback()
|
||||
})
|
||||
},
|
||||
|
||||
setSpellCheckLanguage(projectId, languageCode, callback) {
|
||||
ProjectOptionsHandler.setSpellCheckLanguage(
|
||||
projectId,
|
||||
languageCode,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'spellCheckLanguageUpdated',
|
||||
languageCode
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
setPublicAccessLevel(projectId, newAccessLevel, callback) {
|
||||
async.series(
|
||||
[
|
||||
cb => {
|
||||
if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||
ProjectDetailsHandler.ensureTokensArePresent(projectId, cb)
|
||||
} else {
|
||||
cb()
|
||||
}
|
||||
},
|
||||
cb =>
|
||||
ProjectDetailsHandler.setPublicAccessLevel(
|
||||
projectId,
|
||||
newAccessLevel,
|
||||
cb
|
||||
),
|
||||
cb => {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'project:publicAccessLevel:changed',
|
||||
{ newAccessLevel }
|
||||
)
|
||||
cb()
|
||||
},
|
||||
],
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
setRootDoc(projectId, newRootDocID, callback) {
|
||||
ProjectEntityUpdateHandler.setRootDoc(
|
||||
projectId,
|
||||
newRootDocID,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'rootDocUpdated',
|
||||
newRootDocID
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
setMainBibliographyDoc(projectId, newBibliographyDocId, callback) {
|
||||
ProjectEntityUpdateHandler.setMainBibliographyDoc(
|
||||
projectId,
|
||||
newBibliographyDocId,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'mainBibliographyDocUpdated',
|
||||
newBibliographyDocId
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_notifyProjectUsersOfNewFolders(projectId, folders, callback) {
|
||||
async.eachSeries(
|
||||
folders,
|
||||
(folder, cb) =>
|
||||
EditorController._notifyProjectUsersOfNewFolder(
|
||||
projectId,
|
||||
folder.parentFolder_id,
|
||||
folder,
|
||||
null,
|
||||
cb
|
||||
),
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
_notifyProjectUsersOfNewFolder(
|
||||
projectId,
|
||||
folderId,
|
||||
folder,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewFolder',
|
||||
folderId,
|
||||
folder,
|
||||
userId
|
||||
)
|
||||
callback()
|
||||
},
|
||||
}
|
||||
|
||||
EditorController.promises = promisifyAll(EditorController, {
|
||||
multiResult: {
|
||||
mkdirp: ['newFolders', 'lastFolder'],
|
||||
},
|
||||
})
|
||||
module.exports = EditorController
|
||||
294
services/web/app/src/Features/Editor/EditorHttpController.js
Normal file
294
services/web/app/src/Features/Editor/EditorHttpController.js
Normal file
@@ -0,0 +1,294 @@
|
||||
const ProjectDeleter = require('../Project/ProjectDeleter')
|
||||
const EditorController = require('./EditorController')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const AuthorizationManager = require('../Authorization/AuthorizationManager')
|
||||
const ProjectEditorHandler = require('../Project/ProjectEditorHandler')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter')
|
||||
const CollaboratorsInviteGetter = require('../Collaborators/CollaboratorsInviteGetter')
|
||||
const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const DocstoreManager = require('../Docstore/DocstoreManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = {
|
||||
joinProject: expressify(joinProject),
|
||||
addDoc: expressify(addDoc),
|
||||
addFolder: expressify(addFolder),
|
||||
renameEntity: expressify(renameEntity),
|
||||
moveEntity: expressify(moveEntity),
|
||||
deleteDoc: expressify(deleteDoc),
|
||||
deleteFile: expressify(deleteFile),
|
||||
deleteFolder: expressify(deleteFolder),
|
||||
deleteEntity: expressify(deleteEntity),
|
||||
_nameIsAcceptableLength,
|
||||
}
|
||||
|
||||
async function joinProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
let userId = req.body.userId // keep schema in sync with router
|
||||
if (userId === 'anonymous-user') {
|
||||
userId = null
|
||||
}
|
||||
Metrics.inc('editor.join-project')
|
||||
const {
|
||||
project,
|
||||
privilegeLevel,
|
||||
isRestrictedUser,
|
||||
isTokenMember,
|
||||
isInvitedMember,
|
||||
} = await _buildJoinProjectView(req, projectId, userId)
|
||||
if (!project) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
// Hide sensitive data if the user is restricted
|
||||
if (isRestrictedUser) {
|
||||
project.owner = { _id: project.owner._id }
|
||||
project.members = []
|
||||
project.invites = []
|
||||
}
|
||||
// Only show the 'renamed or deleted' message once
|
||||
if (project.deletedByExternalDataSource) {
|
||||
await ProjectDeleter.promises.unmarkAsDeletedByExternalSource(projectId)
|
||||
}
|
||||
|
||||
if (project.spellCheckLanguage) {
|
||||
project.spellCheckLanguage = await chooseSpellCheckLanguage(
|
||||
project.spellCheckLanguage
|
||||
)
|
||||
}
|
||||
|
||||
res.json({
|
||||
project,
|
||||
privilegeLevel,
|
||||
isRestrictedUser,
|
||||
isTokenMember,
|
||||
isInvitedMember,
|
||||
})
|
||||
}
|
||||
|
||||
async function _buildJoinProjectView(req, projectId, userId) {
|
||||
const project =
|
||||
await ProjectGetter.promises.getProjectWithoutDocLines(projectId)
|
||||
if (project == null) {
|
||||
throw new Errors.NotFoundError('project not found')
|
||||
}
|
||||
let deletedDocsFromDocstore = []
|
||||
try {
|
||||
deletedDocsFromDocstore =
|
||||
await DocstoreManager.promises.getAllDeletedDocs(projectId)
|
||||
} catch (err) {
|
||||
// The query in docstore is not optimized at this time and fails for
|
||||
// projects with many very large, deleted documents.
|
||||
// Not serving the user with deletedDocs from docstore may cause a minor
|
||||
// UI issue with deleted files that are no longer available for restore.
|
||||
logger.warn(
|
||||
{ err, projectId },
|
||||
'soft-failure when fetching deletedDocs from docstore'
|
||||
)
|
||||
}
|
||||
const members =
|
||||
await CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels(
|
||||
projectId
|
||||
)
|
||||
const token = req.body.anonymousAccessToken
|
||||
const privilegeLevel =
|
||||
await AuthorizationManager.promises.getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) {
|
||||
return { project: null, privilegeLevel: null, isRestrictedUser: false }
|
||||
}
|
||||
const invites =
|
||||
await CollaboratorsInviteGetter.promises.getAllInvites(projectId)
|
||||
const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
const isInvitedMember =
|
||||
await CollaboratorsGetter.promises.isUserInvitedMemberOfProject(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
const isRestrictedUser = AuthorizationManager.isRestrictedUser(
|
||||
userId,
|
||||
privilegeLevel,
|
||||
isTokenMember,
|
||||
isInvitedMember
|
||||
)
|
||||
return {
|
||||
project: ProjectEditorHandler.buildProjectModelView(
|
||||
project,
|
||||
members,
|
||||
invites,
|
||||
deletedDocsFromDocstore
|
||||
),
|
||||
privilegeLevel,
|
||||
isTokenMember,
|
||||
isInvitedMember,
|
||||
isRestrictedUser,
|
||||
}
|
||||
}
|
||||
|
||||
function _nameIsAcceptableLength(name) {
|
||||
return name != null && name.length < 150 && name.length !== 0
|
||||
}
|
||||
|
||||
async function addDoc(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { name } = req.body
|
||||
const parentFolderId = req.body.parent_folder_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
if (!_nameIsAcceptableLength(name)) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
try {
|
||||
const doc = await EditorController.promises.addDoc(
|
||||
projectId,
|
||||
parentFolderId,
|
||||
name,
|
||||
[],
|
||||
'editor',
|
||||
userId
|
||||
)
|
||||
res.json(doc)
|
||||
} catch (err) {
|
||||
if (err.message === 'project_has_too_many_files') {
|
||||
res.status(400).json(req.i18n.translate('project_has_too_many_files'))
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function addFolder(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { name } = req.body
|
||||
const parentFolderId = req.body.parent_folder_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
if (!_nameIsAcceptableLength(name)) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
try {
|
||||
const doc = await EditorController.promises.addFolder(
|
||||
projectId,
|
||||
parentFolderId,
|
||||
name,
|
||||
'editor',
|
||||
userId
|
||||
)
|
||||
res.json(doc)
|
||||
} catch (err) {
|
||||
if (err.message === 'project_has_too_many_files') {
|
||||
res.status(400).json(req.i18n.translate('project_has_too_many_files'))
|
||||
} else if (err.message === 'invalid element name') {
|
||||
res.status(400).json(req.i18n.translate('invalid_file_name'))
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function renameEntity(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const entityId = req.params.entity_id
|
||||
const entityType = req.params.entity_type
|
||||
const { name, source = 'editor' } = req.body
|
||||
if (!_nameIsAcceptableLength(name)) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await EditorController.promises.renameEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
name,
|
||||
userId,
|
||||
source
|
||||
)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function moveEntity(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const entityId = req.params.entity_id
|
||||
const entityType = req.params.entity_type
|
||||
const folderId = req.body.folder_id
|
||||
const source = req.body.source ?? 'editor'
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await EditorController.promises.moveEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
folderId,
|
||||
entityType,
|
||||
userId,
|
||||
source
|
||||
)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function deleteDoc(req, res, next) {
|
||||
req.params.entity_type = 'doc'
|
||||
await deleteEntity(req, res, next)
|
||||
}
|
||||
|
||||
async function deleteFile(req, res, next) {
|
||||
req.params.entity_type = 'file'
|
||||
await deleteEntity(req, res, next)
|
||||
}
|
||||
|
||||
async function deleteFolder(req, res, next) {
|
||||
req.params.entity_type = 'folder'
|
||||
await deleteEntity(req, res, next)
|
||||
}
|
||||
|
||||
async function deleteEntity(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const entityId = req.params.entity_id
|
||||
const entityType = req.params.entity_type
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await EditorController.promises.deleteEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
'editor',
|
||||
userId
|
||||
)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
const supportedSpellCheckLanguages = new Set(
|
||||
Settings.languages
|
||||
// only include spell-check languages that are available in the client
|
||||
.filter(language => language.dic !== undefined)
|
||||
.map(language => language.code)
|
||||
)
|
||||
|
||||
async function chooseSpellCheckLanguage(spellCheckLanguage) {
|
||||
if (supportedSpellCheckLanguages.has(spellCheckLanguage)) {
|
||||
return spellCheckLanguage
|
||||
}
|
||||
|
||||
// Preserve the value in the database so they can use it again once we add back support.
|
||||
// Map some server-only languages to a specific variant, or disable spell checking for currently unsupported spell check languages.
|
||||
switch (spellCheckLanguage) {
|
||||
case 'en':
|
||||
// map "English" to "English (American)"
|
||||
return 'en_US'
|
||||
|
||||
case 'no':
|
||||
// map "Norwegian" to "Norwegian (Bokmål)"
|
||||
return 'nb_NO'
|
||||
|
||||
default:
|
||||
// map anything else to "off"
|
||||
return ''
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let EditorRealTimeController
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||
const rclient = RedisWrapper.client('pubsub')
|
||||
const os = require('os')
|
||||
const crypto = require('crypto')
|
||||
|
||||
const HOST = os.hostname()
|
||||
const RND = crypto.randomBytes(4).toString('hex') // generate a random key for this process
|
||||
let COUNT = 0
|
||||
|
||||
module.exports = EditorRealTimeController = {
|
||||
emitToRoom(roomId, message, ...payload) {
|
||||
// create a unique message id using a counter
|
||||
const messageId = `web:${HOST}:${RND}-${COUNT++}`
|
||||
let channel
|
||||
if (roomId === 'all' || !Settings.publishOnIndividualChannels) {
|
||||
channel = 'editor-events'
|
||||
} else {
|
||||
channel = `editor-events:${roomId}`
|
||||
}
|
||||
const blob = JSON.stringify({
|
||||
room_id: roomId,
|
||||
message,
|
||||
payload,
|
||||
_id: messageId,
|
||||
})
|
||||
Metrics.summary('redis.publish.editor-events', blob.length, {
|
||||
status: message,
|
||||
})
|
||||
return rclient.publish(channel, blob)
|
||||
},
|
||||
|
||||
emitToAll(message, ...payload) {
|
||||
return this.emitToRoom('all', message, ...Array.from(payload))
|
||||
},
|
||||
}
|
||||
86
services/web/app/src/Features/Editor/EditorRouter.mjs
Normal file
86
services/web/app/src/Features/Editor/EditorRouter.mjs
Normal file
@@ -0,0 +1,86 @@
|
||||
import EditorHttpController from './EditorHttpController.js'
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js'
|
||||
import { validate, Joi } from '../../infrastructure/Validation.js'
|
||||
|
||||
const rateLimiters = {
|
||||
addDocToProject: new RateLimiter('add-doc-to-project', {
|
||||
points: 30,
|
||||
duration: 60,
|
||||
}),
|
||||
addFolderToProject: new RateLimiter('add-folder-to-project', {
|
||||
points: 60,
|
||||
duration: 60,
|
||||
}),
|
||||
joinProject: new RateLimiter('join-project', { points: 45, duration: 60 }),
|
||||
}
|
||||
|
||||
export default {
|
||||
apply(webRouter, privateApiRouter) {
|
||||
webRouter.post(
|
||||
'/project/:Project_id/doc',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.addDocToProject, {
|
||||
params: ['Project_id'],
|
||||
}),
|
||||
EditorHttpController.addDoc
|
||||
)
|
||||
webRouter.post(
|
||||
'/project/:Project_id/folder',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.addFolderToProject, {
|
||||
params: ['Project_id'],
|
||||
}),
|
||||
EditorHttpController.addFolder
|
||||
)
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/:entity_type/:entity_id/rename',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.renameEntity
|
||||
)
|
||||
webRouter.post(
|
||||
'/project/:Project_id/:entity_type/:entity_id/move',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.moveEntity
|
||||
)
|
||||
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/file/:entity_id',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.deleteFile
|
||||
)
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/doc/:entity_id',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.deleteDoc
|
||||
)
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/folder/:entity_id',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.deleteFolder
|
||||
)
|
||||
|
||||
// Called by the real-time API to load up the current project state.
|
||||
// This is a post request because it's more than just a getting of data. We take actions
|
||||
// whenever a user joins a project, like updating the deleted status.
|
||||
privateApiRouter.post(
|
||||
'/project/:Project_id/join',
|
||||
AuthenticationController.requirePrivateApiAuth(),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.joinProject, {
|
||||
params: ['Project_id'],
|
||||
// keep schema in sync with controller
|
||||
getUserId: req => req.body.userId,
|
||||
}),
|
||||
validate({
|
||||
body: Joi.object({
|
||||
userId: Joi.string().required(),
|
||||
anonymousAccessToken: Joi.string().optional(),
|
||||
}),
|
||||
}),
|
||||
EditorHttpController.joinProject
|
||||
)
|
||||
},
|
||||
}
|
||||
46
services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js
Normal file
46
services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js
Normal file
@@ -0,0 +1,46 @@
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = _.template(`\
|
||||
<table class="row" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;">
|
||||
<tbody>
|
||||
<tr style="padding: 0; vertical-align: top;">
|
||||
<th class="small-12 columns" style="line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 16px; padding-left: 16px; padding-right: 16px; text-align: left; width: 564px;">
|
||||
<table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<th style="margin: 0; padding: 0; text-align: left;">
|
||||
<% if (title) { %>
|
||||
<h3 class="force-overleaf-style" style="margin: 0; color: #5D6879; font-family: Georgia, serif; font-size: 24px; font-weight: normal; line-height: 1.3; padding: 0; text-align: left; word-wrap: normal;">
|
||||
<%= title %>
|
||||
</h3>
|
||||
<% } %>
|
||||
</th>
|
||||
<tr>
|
||||
<td>
|
||||
<p style="height: 20px; margin: 0; padding: 0;"> </p>
|
||||
|
||||
<% if (greeting) { %>
|
||||
<p style="margin: 0 0 10px 0; padding: 0;">
|
||||
<%= greeting %>
|
||||
</p>
|
||||
<% } %>
|
||||
|
||||
<% (message).forEach(function(paragraph) { %>
|
||||
<p class="force-overleaf-style" style="margin: 0 0 10px 0; padding: 0;">
|
||||
<%= paragraph %>
|
||||
</p>
|
||||
<% }) %>
|
||||
<% if (highlightedText) { %>
|
||||
<div style="text-align: center; color: #1B222C; font-size: 20px; margin: 16px 0; padding: 16px 8px; border-radius: 8px; background: #F4F5F6;">
|
||||
<b><%= highlightedText %></b>
|
||||
</div>
|
||||
<% } %>
|
||||
</td>
|
||||
</tr>
|
||||
</tr>
|
||||
</table>
|
||||
</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
\
|
||||
`)
|
||||
96
services/web/app/src/Features/Email/Bodies/cta-email.js
Normal file
96
services/web/app/src/Features/Email/Bodies/cta-email.js
Normal file
@@ -0,0 +1,96 @@
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = _.template(`\
|
||||
<table class="row" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;">
|
||||
<tbody>
|
||||
<tr style="padding: 0; vertical-align: top;">
|
||||
<th class="small-12 columns" style="line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 16px; padding-left: 16px; padding-right: 16px; text-align: left;">
|
||||
<table class="cta-table" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<th style="margin: 0; padding: 0; text-align: left;">
|
||||
<% if (title) { %>
|
||||
<h3 class="force-overleaf-style" style="margin: 0; color: #5D6879; font-family: Georgia, serif; font-size: 24px; font-weight: normal; line-height: 1.3; padding: 0; text-align: left; word-wrap: normal;">
|
||||
<%= title %>
|
||||
</h3>
|
||||
<% } %>
|
||||
</th>
|
||||
<tr>
|
||||
<td>
|
||||
<p style="height: 20px; margin: 0; padding: 0;"> </p>
|
||||
|
||||
<% if (greeting) { %>
|
||||
<p style="margin: 0 0 10px 0; padding: 0;">
|
||||
<%= greeting %>
|
||||
</p>
|
||||
<% } %>
|
||||
|
||||
<% (message).forEach(function(paragraph) { %>
|
||||
<p class="force-overleaf-style" style="margin: 0 0 10px 0; padding: 0;">
|
||||
<%= paragraph %>
|
||||
</p>
|
||||
<% }) %>
|
||||
|
||||
<p style="margin: 0; padding: 0;"> </p>
|
||||
|
||||
<table style="border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: auto;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<td style="-moz-hyphens: auto; -webkit-hyphens: auto; border-collapse: collapse !important; border-radius: 9999px; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<td style="-moz-hyphens: auto; -webkit-hyphens: auto; background: #4F9C45; border: none; border-collapse: collapse !important; border-radius: 9999px; color: #fefefe; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<a href="<%= ctaURL %>" style="border: 0 solid #4F9C45; border-radius: 9999px; color: #fefefe; display: inline-block; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: bold; line-height: 1.3; margin: 0; padding: 8px 16px 8px 16px; text-align: left; text-decoration: none;">
|
||||
<%= ctaText %>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<% if (secondaryMessage && secondaryMessage.length > 0) { %>
|
||||
<p style="margin: 0; padding: 0;"> </p>
|
||||
|
||||
<% (secondaryMessage).forEach(function(paragraph) { %>
|
||||
<p class="force-overleaf-style">
|
||||
<%= paragraph %>
|
||||
</p>
|
||||
<% }) %>
|
||||
<% } %>
|
||||
|
||||
<p style="margin: 0; padding: 0;"> </p>
|
||||
|
||||
<p class="force-overleaf-style" style="font-size: 12px;">
|
||||
If the button above does not appear, please copy and paste this link into your browser's address bar:
|
||||
</p>
|
||||
|
||||
<p class="force-overleaf-style" style="font-size: 12px;">
|
||||
<%= ctaURL %>
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
</tr>
|
||||
</table>
|
||||
</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<% if (gmailGoToAction) { %>
|
||||
<script type="application/ld+json">
|
||||
<%=
|
||||
StringHelper.stringifyJsonForScript({
|
||||
"@context": "http://schema.org",
|
||||
"@type": "EmailMessage",
|
||||
"potentialAction": {
|
||||
"@type": "ViewAction",
|
||||
"target": gmailGoToAction.target,
|
||||
"url": gmailGoToAction.target,
|
||||
"name": gmailGoToAction.name
|
||||
},
|
||||
"description": gmailGoToAction.description
|
||||
})
|
||||
%>
|
||||
</script>
|
||||
<% } %>
|
||||
\
|
||||
`)
|
||||
971
services/web/app/src/Features/Email/EmailBuilder.js
Normal file
971
services/web/app/src/Features/Email/EmailBuilder.js
Normal file
@@ -0,0 +1,971 @@
|
||||
const _ = require('lodash')
|
||||
const settings = require('@overleaf/settings')
|
||||
const moment = require('moment')
|
||||
const EmailMessageHelper = require('./EmailMessageHelper')
|
||||
const StringHelper = require('../Helpers/StringHelper')
|
||||
const BaseWithHeaderEmailLayout = require('./Layouts/BaseWithHeaderEmailLayout')
|
||||
const SpamSafe = require('./SpamSafe')
|
||||
const ctaEmailBody = require('./Bodies/cta-email')
|
||||
const NoCTAEmailBody = require('./Bodies/NoCTAEmailBody')
|
||||
|
||||
function _emailBodyPlainText(content, opts, ctaEmail) {
|
||||
let emailBody = `${content.greeting(opts, true)}`
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += `${content.message(opts, true).join('\r\n\r\n')}`
|
||||
|
||||
if (ctaEmail) {
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += `${content.ctaText(opts, true)}: ${content.ctaURL(opts, true)}`
|
||||
}
|
||||
|
||||
if (
|
||||
content.secondaryMessage(opts, true) &&
|
||||
content.secondaryMessage(opts, true).length > 0
|
||||
) {
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += `${content.secondaryMessage(opts, true).join('\r\n\r\n')}`
|
||||
}
|
||||
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += `Regards,\r\nThe ${settings.appName} Team - ${settings.siteUrl}`
|
||||
|
||||
if (
|
||||
settings.email &&
|
||||
settings.email.template &&
|
||||
settings.email.template.customFooter
|
||||
) {
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += settings.email.template.customFooter
|
||||
}
|
||||
|
||||
return emailBody
|
||||
}
|
||||
|
||||
function ctaTemplate(content) {
|
||||
if (
|
||||
!content.ctaURL ||
|
||||
!content.ctaText ||
|
||||
!content.message ||
|
||||
!content.subject
|
||||
) {
|
||||
throw new Error('missing required CTA email content')
|
||||
}
|
||||
if (!content.title) {
|
||||
content.title = () => {}
|
||||
}
|
||||
if (!content.greeting) {
|
||||
content.greeting = () => 'Hi,'
|
||||
}
|
||||
if (!content.secondaryMessage) {
|
||||
content.secondaryMessage = () => []
|
||||
}
|
||||
if (!content.gmailGoToAction) {
|
||||
content.gmailGoToAction = () => {}
|
||||
}
|
||||
return {
|
||||
subject(opts) {
|
||||
return content.subject(opts)
|
||||
},
|
||||
layout: BaseWithHeaderEmailLayout,
|
||||
plainTextTemplate(opts) {
|
||||
return _emailBodyPlainText(content, opts, true)
|
||||
},
|
||||
compiledTemplate(opts) {
|
||||
return ctaEmailBody({
|
||||
title: content.title(opts),
|
||||
greeting: content.greeting(opts),
|
||||
message: content.message(opts),
|
||||
secondaryMessage: content.secondaryMessage(opts),
|
||||
ctaText: content.ctaText(opts),
|
||||
ctaURL: content.ctaURL(opts),
|
||||
gmailGoToAction: content.gmailGoToAction(opts),
|
||||
StringHelper,
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function NoCTAEmailTemplate(content) {
|
||||
if (content.greeting == null) {
|
||||
content.greeting = () => 'Hi,'
|
||||
}
|
||||
if (!content.message) {
|
||||
throw new Error('missing message')
|
||||
}
|
||||
return {
|
||||
subject(opts) {
|
||||
return content.subject(opts)
|
||||
},
|
||||
layout: BaseWithHeaderEmailLayout,
|
||||
plainTextTemplate(opts) {
|
||||
return `\
|
||||
${content.greeting(opts)}
|
||||
|
||||
${content.message(opts, true).join('\r\n\r\n')}
|
||||
|
||||
Regards,
|
||||
The ${settings.appName} Team - ${settings.siteUrl}\
|
||||
`
|
||||
},
|
||||
compiledTemplate(opts) {
|
||||
return NoCTAEmailBody({
|
||||
title:
|
||||
typeof content.title === 'function' ? content.title(opts) : undefined,
|
||||
greeting: content.greeting(opts),
|
||||
highlightedText:
|
||||
typeof content.highlightedText === 'function'
|
||||
? content.highlightedText(opts)
|
||||
: undefined,
|
||||
message: content.message(opts),
|
||||
StringHelper,
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function buildEmail(templateName, opts) {
|
||||
const template = templates[templateName]
|
||||
opts.siteUrl = settings.siteUrl
|
||||
opts.body = template.compiledTemplate(opts)
|
||||
return {
|
||||
subject: template.subject(opts),
|
||||
html: template.layout(opts),
|
||||
text: template.plainTextTemplate && template.plainTextTemplate(opts),
|
||||
}
|
||||
}
|
||||
|
||||
const templates = {}
|
||||
|
||||
templates.registered = ctaTemplate({
|
||||
subject() {
|
||||
return `Activate your ${settings.appName} Account`
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Congratulations, you've just had an account created for you on ${
|
||||
settings.appName
|
||||
} with the email address '${_.escape(opts.to)}'.`,
|
||||
'Click here to set your password and log in:',
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
`If you have any questions or problems, please contact ${settings.adminEmail}`,
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Set password'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.setNewPasswordUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.canceledSubscription = ctaTemplate({
|
||||
subject() {
|
||||
return `${settings.appName} thoughts`
|
||||
},
|
||||
message() {
|
||||
return [
|
||||
`We are sorry to see you cancelled your ${settings.appName} premium subscription. Would you mind giving us some feedback on what the site is lacking at the moment via this quick survey?`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return ['Thank you in advance!']
|
||||
},
|
||||
ctaText() {
|
||||
return 'Leave Feedback'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return 'https://docs.google.com/forms/d/e/1FAIpQLSfa7z_s-cucRRXm70N4jEcSbFsZeb0yuKThHGQL8ySEaQzF0Q/viewform?usp=sf_link'
|
||||
},
|
||||
})
|
||||
|
||||
templates.reactivatedSubscription = ctaTemplate({
|
||||
subject() {
|
||||
return `Subscription Reactivated - ${settings.appName}`
|
||||
},
|
||||
message(opts) {
|
||||
return ['Your subscription was reactivated successfully.']
|
||||
},
|
||||
ctaText() {
|
||||
return 'View Subscription Dashboard'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return `${settings.siteUrl}/user/subscription`
|
||||
},
|
||||
})
|
||||
|
||||
templates.passwordResetRequested = ctaTemplate({
|
||||
subject() {
|
||||
return `Password Reset - ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return 'Password Reset'
|
||||
},
|
||||
message() {
|
||||
return [`We got a request to reset your ${settings.appName} password.`]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
"If you ignore this message, your password won't be changed.",
|
||||
"If you didn't request a password reset, let us know.",
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Reset password'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.setNewPasswordUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.confirmEmail = ctaTemplate({
|
||||
subject() {
|
||||
return `Confirm Email - ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return 'Confirm Email'
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Please confirm that you have added a new email, ${opts.to}, to your ${settings.appName} account.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
`If you did not request this, please let us know at <a href="mailto:${settings.adminEmail}">${settings.adminEmail}</a>.`,
|
||||
`If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`,
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Confirm Email'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.confirmEmailUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.confirmCode = NoCTAEmailTemplate({
|
||||
greeting(opts) {
|
||||
return ''
|
||||
},
|
||||
subject(opts) {
|
||||
return `Confirm your email address on Overleaf (${opts.confirmCode})`
|
||||
},
|
||||
title(opts) {
|
||||
return 'Confirm your email address'
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const msg = opts.welcomeUser
|
||||
? [
|
||||
`Welcome to Overleaf! We're so glad you joined us.`,
|
||||
'Use this 6-digit confirmation code to finish your setup.',
|
||||
]
|
||||
: ['Use this 6-digit code to confirm your email address.']
|
||||
|
||||
if (isPlainText && opts.confirmCode) {
|
||||
msg.push(opts.confirmCode)
|
||||
}
|
||||
return msg
|
||||
},
|
||||
highlightedText(opts) {
|
||||
return opts.confirmCode
|
||||
},
|
||||
})
|
||||
|
||||
templates.projectInvite = ctaTemplate({
|
||||
subject(opts) {
|
||||
const safeName = SpamSafe.isSafeProjectName(opts.project.name)
|
||||
const safeEmail = SpamSafe.isSafeEmail(opts.owner.email)
|
||||
|
||||
if (safeName && safeEmail) {
|
||||
return `"${_.escape(opts.project.name)}" — shared by ${_.escape(
|
||||
opts.owner.email
|
||||
)}`
|
||||
}
|
||||
if (safeName) {
|
||||
return `${settings.appName} project shared with you — "${_.escape(
|
||||
opts.project.name
|
||||
)}"`
|
||||
}
|
||||
if (safeEmail) {
|
||||
return `${_.escape(opts.owner.email)} shared an ${
|
||||
settings.appName
|
||||
} project with you`
|
||||
}
|
||||
|
||||
return `An ${settings.appName} project has been shared with you`
|
||||
},
|
||||
title(opts) {
|
||||
return 'Project Invite'
|
||||
},
|
||||
greeting(opts) {
|
||||
return ''
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
// build message depending on spam-safe variables
|
||||
const message = [`You have been invited to an ${settings.appName} project.`]
|
||||
|
||||
if (SpamSafe.isSafeProjectName(opts.project.name)) {
|
||||
message.push('<br/> Project:')
|
||||
message.push(`<b>${_.escape(opts.project.name)}</b>`)
|
||||
}
|
||||
|
||||
if (SpamSafe.isSafeEmail(opts.owner.email)) {
|
||||
message.push(`<br/> Shared by:`)
|
||||
message.push(`<b>${_.escape(opts.owner.email)}</b>`)
|
||||
}
|
||||
|
||||
if (message.length === 1) {
|
||||
message.push('<br/> Please view the project to find out more.')
|
||||
}
|
||||
|
||||
return message.map(m => {
|
||||
return EmailMessageHelper.cleanHTML(m, isPlainText)
|
||||
})
|
||||
},
|
||||
ctaText() {
|
||||
return 'View project'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.inviteUrl
|
||||
},
|
||||
gmailGoToAction(opts) {
|
||||
return {
|
||||
target: opts.inviteUrl,
|
||||
name: 'View project',
|
||||
description: `Join ${_.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'project')
|
||||
)} at ${settings.appName}`,
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
templates.reconfirmEmail = ctaTemplate({
|
||||
subject() {
|
||||
return `Reconfirm Email - ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return 'Reconfirm Email'
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Please reconfirm your email address, ${opts.to}, on your ${settings.appName} account.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
'If you did not request this, you can simply ignore this message.',
|
||||
`If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`,
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Reconfirm Email'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.confirmEmailUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.verifyEmailToJoinTeam = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `${opts.reminder ? 'Reminder: ' : ''}${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'A collaborator')
|
||||
)} has invited you to join a group subscription on ${settings.appName}`
|
||||
},
|
||||
title(opts) {
|
||||
return `${opts.reminder ? 'Reminder: ' : ''}${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'A collaborator')
|
||||
)} has invited you to join a group subscription on ${settings.appName}`
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Please click the button below to join the group subscription and enjoy the benefits of an upgraded ${settings.appName} account.`,
|
||||
]
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Join now'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.acceptInviteUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.verifyEmailToJoinManagedUsers = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'a collaborator')
|
||||
)} to join an ${settings.appName} group subscription.`
|
||||
},
|
||||
title(opts) {
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'a collaborator')
|
||||
)} to join an ${settings.appName} group subscription.`
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`By joining this group, you'll have access to ${settings.appName} premium features such as additional collaborators, greater maximum compile time, and real-time track changes.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage(opts, isPlainText) {
|
||||
const changeProjectOwnerLink = EmailMessageHelper.displayLink(
|
||||
'change project owner',
|
||||
`${settings.siteUrl}/learn/how-to/How_to_Transfer_Project_Ownership`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`<b>User accounts in this group are managed by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.admin, 'an admin')
|
||||
)}</b>`,
|
||||
`If you accept, you’ll transfer the management of your ${settings.appName} account to the owner of the group subscription, who will then have admin rights over your account and control over your stuff.`,
|
||||
`If you have personal projects in your ${settings.appName} account that you want to keep separate, that’s not a problem. You can set up another account under a personal email address and change the ownership of your personal projects to the new account. Find out how to ${changeProjectOwnerLink}.`,
|
||||
]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.acceptInviteUrl
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Accept invitation'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.inviteNewUserToJoinManagedUsers = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'a collaborator')
|
||||
)} to join an ${settings.appName} group subscription.`
|
||||
},
|
||||
title(opts) {
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'a collaborator')
|
||||
)} to join an ${settings.appName} group subscription.`
|
||||
},
|
||||
message(opts) {
|
||||
return ['']
|
||||
},
|
||||
secondaryMessage(opts) {
|
||||
return [
|
||||
`<b>User accounts in this group are managed by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.admin, 'an admin')
|
||||
)}.</b>`,
|
||||
`If you accept, the owner of the group subscription will have admin rights over your account and control over your stuff.`,
|
||||
`<b>What is ${settings.appName}?</b>`,
|
||||
`${settings.appName} is the collaborative online LaTeX editor loved by researchers and technical writers. With thousands of ready-to-use templates and an array of LaTeX learning resources you’ll be up and running in no time.`,
|
||||
]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.acceptInviteUrl
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Accept invitation'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.groupSSOLinkingInvite = ctaTemplate({
|
||||
subject(opts) {
|
||||
const subjectPrefix = opts.reminder ? 'Reminder: ' : 'Action required: '
|
||||
return `${subjectPrefix}Authenticate your Overleaf account`
|
||||
},
|
||||
title(opts) {
|
||||
const titlePrefix = opts.reminder ? 'Reminder: ' : ''
|
||||
return `${titlePrefix}Single sign-on enabled`
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Hi,
|
||||
<div>
|
||||
Your group administrator has enabled single sign-on for your group.
|
||||
</div>
|
||||
</br>
|
||||
<div>
|
||||
<strong>What does this mean for you?</strong>
|
||||
</div>
|
||||
</br>
|
||||
<div>
|
||||
You won't need to remember a separate email address and password to sign in to Overleaf.
|
||||
All you need to do is authenticate your existing Overleaf account with your SSO provider.
|
||||
</div>
|
||||
`,
|
||||
]
|
||||
},
|
||||
secondaryMessage(opts) {
|
||||
return [``]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.authenticateWithSSO
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Authenticate with SSO'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.groupSSOReauthenticate = ctaTemplate({
|
||||
subject(opts) {
|
||||
return 'Action required: Reauthenticate your Overleaf account'
|
||||
},
|
||||
title(opts) {
|
||||
return 'Action required: Reauthenticate SSO'
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Hi,
|
||||
<div>
|
||||
Single sign-on for your Overleaf group has been updated.
|
||||
This means you need to reauthenticate your Overleaf account with your group’s SSO provider.
|
||||
</div>
|
||||
`,
|
||||
]
|
||||
},
|
||||
secondaryMessage(opts) {
|
||||
if (!opts.isManagedUser) {
|
||||
return ['']
|
||||
} else {
|
||||
const passwordResetUrl = `${settings.siteUrl}/user/password/reset`
|
||||
return [
|
||||
`If you’re not currently logged in to Overleaf, you'll need to <a href="${passwordResetUrl}">set a new password</a> to reauthenticate.`,
|
||||
]
|
||||
}
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.authenticateWithSSO
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Reauthenticate now'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.groupSSODisabled = ctaTemplate({
|
||||
subject(opts) {
|
||||
if (opts.userIsManaged) {
|
||||
return `Action required: Set your Overleaf password`
|
||||
} else {
|
||||
return 'A change to your Overleaf login options'
|
||||
}
|
||||
},
|
||||
title(opts) {
|
||||
return `Single sign-on disabled`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const loginUrl = `${settings.siteUrl}/login`
|
||||
let whatDoesThisMeanExplanation = [
|
||||
`You can still log in to Overleaf using one of our other <a href="${loginUrl}" style="color: #0F7A06; text-decoration: none;">login options</a> or with your email address and password.`,
|
||||
`If you don't have a password, you can set one now.`,
|
||||
]
|
||||
if (opts.userIsManaged) {
|
||||
whatDoesThisMeanExplanation = [
|
||||
'You now need an email address and password to sign in to your Overleaf account.',
|
||||
]
|
||||
}
|
||||
|
||||
const message = [
|
||||
'Your group administrator has disabled single sign-on for your group.',
|
||||
'<br/>',
|
||||
'<b>What does this mean for you?</b>',
|
||||
...whatDoesThisMeanExplanation,
|
||||
]
|
||||
|
||||
return message.map(m => {
|
||||
return EmailMessageHelper.cleanHTML(m, isPlainText)
|
||||
})
|
||||
},
|
||||
secondaryMessage(opts) {
|
||||
return [``]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.setNewPasswordUrl
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Set your new password'
|
||||
},
|
||||
})
|
||||
|
||||
templates.surrenderAccountForManagedUsers = ctaTemplate({
|
||||
subject(opts) {
|
||||
const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin'))
|
||||
|
||||
const toGroupName = opts.groupName ? ` to ${opts.groupName}` : ''
|
||||
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${admin} to transfer management of your ${
|
||||
settings.appName
|
||||
} account${toGroupName}`
|
||||
},
|
||||
title(opts) {
|
||||
const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin'))
|
||||
|
||||
const toGroupName = opts.groupName ? ` to ${opts.groupName}` : ''
|
||||
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${admin} to transfer management of your ${
|
||||
settings.appName
|
||||
} account${toGroupName}`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin'))
|
||||
|
||||
const managedUsersLink = EmailMessageHelper.displayLink(
|
||||
'user account management',
|
||||
`${settings.siteUrl}/learn/how-to/Understanding_Managed_Overleaf_Accounts`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`Your ${settings.appName} account ${_.escape(
|
||||
opts.to
|
||||
)} is part of ${admin}'s group. They’ve now enabled ${managedUsersLink} for the group. This will ensure that projects aren’t lost when someone leaves the group.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage(opts, isPlainText) {
|
||||
const transferProjectOwnershipLink = EmailMessageHelper.displayLink(
|
||||
'change project owner',
|
||||
`${settings.siteUrl}/learn/how-to/How_to_Transfer_Project_Ownership`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`<b>What does this mean for you?</b>`,
|
||||
`If you accept, you’ll transfer the management of your ${settings.appName} account to the owner of the group subscription, who will then have admin rights over your account and control over your stuff.`,
|
||||
`If you have personal projects in your ${settings.appName} account that you want to keep separate, that’s not a problem. You can set up another account under a personal email address and change the ownership of your personal projects to the new account. Find out how to ${transferProjectOwnershipLink}.`,
|
||||
`If you think this invitation has been sent in error please contact your group administrator.`,
|
||||
]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.acceptInviteUrl
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Accept invitation'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.testEmail = ctaTemplate({
|
||||
subject() {
|
||||
return `A Test Email from ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return `A Test Email from ${settings.appName}`
|
||||
},
|
||||
greeting() {
|
||||
return 'Hi,'
|
||||
},
|
||||
message() {
|
||||
return [`This is a test Email from ${settings.appName}`]
|
||||
},
|
||||
ctaText() {
|
||||
return `Open ${settings.appName}`
|
||||
},
|
||||
ctaURL() {
|
||||
return settings.siteUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.ownershipTransferConfirmationPreviousOwner = NoCTAEmailTemplate({
|
||||
subject(opts) {
|
||||
return `Project ownership transfer - ${settings.appName}`
|
||||
},
|
||||
title(opts) {
|
||||
const projectName = _.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'Your project')
|
||||
)
|
||||
return `${projectName} - Owner change`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const nameAndEmail = _.escape(
|
||||
_formatUserNameAndEmail(opts.newOwner, 'a collaborator')
|
||||
)
|
||||
const projectName = _.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'your project')
|
||||
)
|
||||
const projectNameDisplay = isPlainText
|
||||
? projectName
|
||||
: `<b>${projectName}</b>`
|
||||
return [
|
||||
`As per your request, we have made ${nameAndEmail} the owner of ${projectNameDisplay}.`,
|
||||
`If you haven't asked to change the owner of ${projectNameDisplay}, please get in touch with us via ${settings.adminEmail}.`,
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
templates.ownershipTransferConfirmationNewOwner = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `Project ownership transfer - ${settings.appName}`
|
||||
},
|
||||
title(opts) {
|
||||
const projectName = _.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'Your project')
|
||||
)
|
||||
return `${projectName} - Owner change`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const nameAndEmail = _.escape(
|
||||
_formatUserNameAndEmail(opts.previousOwner, 'A collaborator')
|
||||
)
|
||||
const projectName = _.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'a project')
|
||||
)
|
||||
const projectNameEmphasized = isPlainText
|
||||
? projectName
|
||||
: `<b>${projectName}</b>`
|
||||
return [
|
||||
`${nameAndEmail} has made you the owner of ${projectNameEmphasized}. You can now manage ${projectName} sharing settings.`,
|
||||
]
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'View project'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
const projectUrl = `${
|
||||
settings.siteUrl
|
||||
}/project/${opts.project._id.toString()}`
|
||||
return projectUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.userOnboardingEmail = NoCTAEmailTemplate({
|
||||
subject(opts) {
|
||||
return `Getting more out of ${settings.appName}`
|
||||
},
|
||||
greeting(opts) {
|
||||
return ''
|
||||
},
|
||||
title(opts) {
|
||||
return `Getting more out of ${settings.appName}`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const learnLatexLink = EmailMessageHelper.displayLink(
|
||||
'Learn LaTeX in 30 minutes',
|
||||
`${settings.siteUrl}/learn/latex/Learn_LaTeX_in_30_minutes?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||
isPlainText
|
||||
)
|
||||
const templatesLinks = EmailMessageHelper.displayLink(
|
||||
'Find a beautiful template',
|
||||
`${settings.siteUrl}/latex/templates?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||
isPlainText
|
||||
)
|
||||
const collaboratorsLink = EmailMessageHelper.displayLink(
|
||||
'Work with your collaborators',
|
||||
`${settings.siteUrl}/learn/how-to/Sharing_a_project?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||
isPlainText
|
||||
)
|
||||
const siteLink = EmailMessageHelper.displayLink(
|
||||
'www.overleaf.com',
|
||||
settings.siteUrl,
|
||||
isPlainText
|
||||
)
|
||||
const userSettingsLink = EmailMessageHelper.displayLink(
|
||||
'here',
|
||||
`${settings.siteUrl}/user/email-preferences`,
|
||||
isPlainText
|
||||
)
|
||||
const onboardingSurveyLink = EmailMessageHelper.displayLink(
|
||||
'Join our user feedback program',
|
||||
'https://forms.gle/DB7pdk2B1VFQqVVB9',
|
||||
isPlainText
|
||||
)
|
||||
return [
|
||||
`Thanks for signing up for ${settings.appName} recently. We hope you've been finding it useful! Here are some key features to help you get the most out of the service:`,
|
||||
`${learnLatexLink}: In this tutorial we provide a quick and easy first introduction to LaTeX with no prior knowledge required. By the time you are finished, you will have written your first LaTeX document!`,
|
||||
`${templatesLinks}: If you're looking for a template or example to get started, we've a large selection available in our template gallery, including CVs, project reports, journal articles and more.`,
|
||||
`${collaboratorsLink}: One of the key features of Overleaf is the ability to share projects and collaborate on them with other users. Find out how to share your projects with your colleagues in this quick how-to guide.`,
|
||||
`${onboardingSurveyLink} to help us make Overleaf even better!`,
|
||||
'Thanks again for using Overleaf :)',
|
||||
`Lee`,
|
||||
`Lee Shalit<br />CEO<br />${siteLink}<hr>`,
|
||||
`You're receiving this email because you've recently signed up for an Overleaf account. If you've previously subscribed to emails about product offers and company news and events, you can unsubscribe ${userSettingsLink}.`,
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
templates.securityAlert = NoCTAEmailTemplate({
|
||||
subject(opts) {
|
||||
return `Overleaf security note: ${opts.action}`
|
||||
},
|
||||
title(opts) {
|
||||
return opts.action.charAt(0).toUpperCase() + opts.action.slice(1)
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const dateFormatted = moment().format('dddd D MMMM YYYY')
|
||||
const timeFormatted = moment().format('HH:mm')
|
||||
const helpLink = EmailMessageHelper.displayLink(
|
||||
'quick guide',
|
||||
`${settings.siteUrl}/learn/how-to/Keeping_your_account_secure`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
const actionDescribed = EmailMessageHelper.cleanHTML(
|
||||
opts.actionDescribed,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
if (!opts.message) {
|
||||
opts.message = []
|
||||
}
|
||||
const message = opts.message.map(m => {
|
||||
return EmailMessageHelper.cleanHTML(m, isPlainText)
|
||||
})
|
||||
|
||||
return [
|
||||
`We are writing to let you know that ${actionDescribed} on ${dateFormatted} at ${timeFormatted} GMT.`,
|
||||
...message,
|
||||
`If this was you, you can ignore this email.`,
|
||||
`If this was not you, we recommend getting in touch with our support team at ${settings.adminEmail} to report this as potentially suspicious activity on your account.`,
|
||||
`We also encourage you to read our ${helpLink} to keeping your ${settings.appName} account safe.`,
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
templates.SAMLDataCleared = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `Institutional Login No Longer Linked - ${settings.appName}`
|
||||
},
|
||||
title(opts) {
|
||||
return 'Institutional Login No Longer Linked'
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
return [
|
||||
`We're writing to let you know that due to a bug on our end, we've had to temporarily disable logging into your ${settings.appName} through your institution.`,
|
||||
`To get it going again, you'll need to relink your institutional email address to your ${settings.appName} account via your settings.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
`If you ordinarily log in to your ${settings.appName} account through your institution, you may need to set or reset your password to regain access to your account first.`,
|
||||
'This bug did not affect the security of any accounts, but it may have affected license entitlements for a small number of users. We are sorry for any inconvenience that this may cause for you.',
|
||||
`If you have any questions, please get in touch with our support team at ${settings.adminEmail} or by replying to this email.`,
|
||||
]
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Update my Emails and Affiliations'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return `${settings.siteUrl}/user/settings`
|
||||
},
|
||||
})
|
||||
|
||||
templates.welcome = ctaTemplate({
|
||||
subject() {
|
||||
return `Welcome to ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return `Welcome to ${settings.appName}`
|
||||
},
|
||||
greeting() {
|
||||
return 'Hi,'
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const logInAgainDisplay = EmailMessageHelper.displayLink(
|
||||
'log in again',
|
||||
`${settings.siteUrl}/login`,
|
||||
isPlainText
|
||||
)
|
||||
const helpGuidesDisplay = EmailMessageHelper.displayLink(
|
||||
'Help Guides',
|
||||
`${settings.siteUrl}/learn`,
|
||||
isPlainText
|
||||
)
|
||||
const templatesDisplay = EmailMessageHelper.displayLink(
|
||||
'Templates',
|
||||
`${settings.siteUrl}/templates`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`Thanks for signing up to ${settings.appName}! If you ever get lost, you can ${logInAgainDisplay} with the email address '${opts.to}'.`,
|
||||
`If you're new to LaTeX, take a look at our ${helpGuidesDisplay} and ${templatesDisplay}.`,
|
||||
`Please also take a moment to confirm your email address for ${settings.appName}:`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
`PS. We love talking to our users about ${settings.appName}. Reply to this email to get in touch with us directly, whatever the reason. Questions, comments, problems, suggestions, all welcome!`,
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Confirm Email'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.confirmEmailUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.welcomeWithoutCTA = NoCTAEmailTemplate({
|
||||
subject() {
|
||||
return `Welcome to ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return `Welcome to ${settings.appName}`
|
||||
},
|
||||
greeting() {
|
||||
return 'Hi,'
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const logInAgainDisplay = EmailMessageHelper.displayLink(
|
||||
'log in again',
|
||||
`${settings.siteUrl}/login`,
|
||||
isPlainText
|
||||
)
|
||||
const helpGuidesDisplay = EmailMessageHelper.displayLink(
|
||||
'Help Guides',
|
||||
`${settings.siteUrl}/learn`,
|
||||
isPlainText
|
||||
)
|
||||
const templatesDisplay = EmailMessageHelper.displayLink(
|
||||
'Templates',
|
||||
`${settings.siteUrl}/templates`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`Thanks for signing up to ${settings.appName}! If you ever get lost, you can ${logInAgainDisplay} with the email address '${opts.to}'.`,
|
||||
`If you're new to LaTeX, take a look at our ${helpGuidesDisplay} and ${templatesDisplay}.`,
|
||||
`PS. We love talking to our users about ${settings.appName}. Reply to this email to get in touch with us directly, whatever the reason. Questions, comments, problems, suggestions, all welcome!`,
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
function _formatUserNameAndEmail(user, placeholder) {
|
||||
if (user.first_name && user.last_name) {
|
||||
const fullName = `${user.first_name} ${user.last_name}`
|
||||
if (SpamSafe.isSafeUserName(fullName)) {
|
||||
if (SpamSafe.isSafeEmail(user.email)) {
|
||||
return `${fullName} (${user.email})`
|
||||
} else {
|
||||
return fullName
|
||||
}
|
||||
}
|
||||
}
|
||||
return SpamSafe.safeEmail(user.email, placeholder)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
templates,
|
||||
ctaTemplate,
|
||||
NoCTAEmailTemplate,
|
||||
buildEmail,
|
||||
}
|
||||
41
services/web/app/src/Features/Email/EmailHandler.js
Normal file
41
services/web/app/src/Features/Email/EmailHandler.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const { callbackify } = require('util')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const EmailBuilder = require('./EmailBuilder')
|
||||
const EmailSender = require('./EmailSender')
|
||||
const Queues = require('../../infrastructure/Queues')
|
||||
|
||||
const EMAIL_SETTINGS = Settings.email || {}
|
||||
|
||||
/**
|
||||
* @param {string} emailType
|
||||
* @param {opts} any
|
||||
*/
|
||||
async function sendEmail(emailType, opts) {
|
||||
const email = EmailBuilder.buildEmail(emailType, opts)
|
||||
if (email.type === 'lifecycle' && !EMAIL_SETTINGS.lifecycle) {
|
||||
return
|
||||
}
|
||||
opts.html = email.html
|
||||
opts.text = email.text
|
||||
opts.subject = email.subject
|
||||
await EmailSender.promises.sendEmail(opts, emailType)
|
||||
}
|
||||
|
||||
function sendDeferredEmail(emailType, opts, delay) {
|
||||
Queues.createScheduledJob(
|
||||
'deferred-emails',
|
||||
{ data: { emailType, opts } },
|
||||
delay
|
||||
).catch(err => {
|
||||
logger.warn({ err, emailType, opts }, 'failed to queue deferred email')
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendEmail: callbackify(sendEmail),
|
||||
sendDeferredEmail,
|
||||
promises: {
|
||||
sendEmail,
|
||||
},
|
||||
}
|
||||
28
services/web/app/src/Features/Email/EmailMessageHelper.js
Normal file
28
services/web/app/src/Features/Email/EmailMessageHelper.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const sanitizeHtml = require('sanitize-html')
|
||||
const sanitizeOptions = {
|
||||
html: {
|
||||
allowedTags: ['a', 'span', 'b', 'br', 'i'],
|
||||
allowedAttributes: {
|
||||
a: ['href', 'style'],
|
||||
span: ['style', 'class'],
|
||||
},
|
||||
},
|
||||
plainText: {
|
||||
allowedTags: [],
|
||||
allowedAttributes: {},
|
||||
},
|
||||
}
|
||||
|
||||
function cleanHTML(text, isPlainText) {
|
||||
if (!isPlainText) return sanitizeHtml(text, sanitizeOptions.html)
|
||||
return sanitizeHtml(text, sanitizeOptions.plainText)
|
||||
}
|
||||
|
||||
function displayLink(text, url, isPlainText) {
|
||||
return isPlainText ? `${text} (${url})` : `<a href="${url}">${text}</a>`
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cleanHTML,
|
||||
displayLink,
|
||||
}
|
||||
29
services/web/app/src/Features/Email/EmailOptionsHelper.js
Normal file
29
services/web/app/src/Features/Email/EmailOptionsHelper.js
Normal file
@@ -0,0 +1,29 @@
|
||||
function _getIndefiniteArticle(providerName) {
|
||||
const vowels = ['a', 'e', 'i', 'o', 'u']
|
||||
|
||||
return vowels.includes(providerName.charAt(0).toLowerCase()) ? 'an' : 'a'
|
||||
}
|
||||
|
||||
function _actionBuilder(providerName, action, accountLinked) {
|
||||
if (providerName.toLowerCase() !== 'google') {
|
||||
return `${providerName} account ${action}`
|
||||
}
|
||||
|
||||
return accountLinked ? `New account ${action}` : `Account ${action}`
|
||||
}
|
||||
|
||||
function linkOrUnlink(accountLinked, providerName, email) {
|
||||
const action = accountLinked ? 'linked' : 'no longer linked'
|
||||
const actionDescribed = accountLinked ? 'was linked to' : 'was unlinked from'
|
||||
const indefiniteArticle = _getIndefiniteArticle(providerName)
|
||||
|
||||
return {
|
||||
to: email,
|
||||
action: _actionBuilder(providerName, action, accountLinked),
|
||||
actionDescribed: `${indefiniteArticle} ${providerName} account ${actionDescribed} your account ${email}`,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
linkOrUnlink,
|
||||
}
|
||||
128
services/web/app/src/Features/Email/EmailSender.js
Normal file
128
services/web/app/src/Features/Email/EmailSender.js
Normal file
@@ -0,0 +1,128 @@
|
||||
const { callbackify } = require('util')
|
||||
const logger = require('@overleaf/logger')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const nodemailer = require('nodemailer')
|
||||
const sesTransport = require('nodemailer-ses-transport')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { RateLimiter } = require('../../infrastructure/RateLimiter')
|
||||
const _ = require('lodash')
|
||||
|
||||
const EMAIL_SETTINGS = Settings.email || {}
|
||||
|
||||
module.exports = {
|
||||
sendEmail: callbackify(sendEmail),
|
||||
promises: {
|
||||
sendEmail,
|
||||
},
|
||||
}
|
||||
|
||||
const client = getClient()
|
||||
|
||||
const rateLimiter = new RateLimiter('send_email', {
|
||||
points: 100,
|
||||
duration: 3 * 60 * 60,
|
||||
})
|
||||
|
||||
function getClient() {
|
||||
let client
|
||||
if (EMAIL_SETTINGS.parameters) {
|
||||
const emailParameters = EMAIL_SETTINGS.parameters
|
||||
if (emailParameters.AWSAccessKeyID || EMAIL_SETTINGS.driver === 'ses') {
|
||||
logger.debug('using aws ses for email')
|
||||
client = nodemailer.createTransport(sesTransport(emailParameters))
|
||||
} else if (emailParameters.sendgridApiKey) {
|
||||
throw new OError(
|
||||
'sendgridApiKey configuration option is deprecated, use SMTP instead'
|
||||
)
|
||||
} else if (emailParameters.MandrillApiKey) {
|
||||
throw new OError(
|
||||
'MandrillApiKey configuration option is deprecated, use SMTP instead'
|
||||
)
|
||||
} else {
|
||||
logger.debug('using smtp for email')
|
||||
const smtp = _.pick(
|
||||
emailParameters,
|
||||
'host',
|
||||
'port',
|
||||
'secure',
|
||||
'auth',
|
||||
'ignoreTLS',
|
||||
'logger',
|
||||
'name'
|
||||
)
|
||||
client = nodemailer.createTransport(smtp)
|
||||
}
|
||||
} else {
|
||||
logger.warn(
|
||||
'Email transport and/or parameters not defined. No emails will be sent.'
|
||||
)
|
||||
client = {
|
||||
async sendMail(options) {
|
||||
logger.info({ options }, 'Would send email if enabled.')
|
||||
},
|
||||
}
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
async function sendEmail(options, emailType) {
|
||||
try {
|
||||
const canContinue = await checkCanSendEmail(options)
|
||||
metrics.inc('email_status', {
|
||||
status: canContinue ? 'sent' : 'rate_limited',
|
||||
path: emailType,
|
||||
})
|
||||
if (!canContinue) {
|
||||
logger.debug(
|
||||
{
|
||||
sendingUserId: options.sendingUser_id,
|
||||
to: options.to,
|
||||
subject: options.subject,
|
||||
canContinue,
|
||||
},
|
||||
'rate limit hit for sending email, not sending'
|
||||
)
|
||||
throw new OError('rate limit hit sending email')
|
||||
}
|
||||
metrics.inc('email')
|
||||
const sendMailOptions = {
|
||||
to: options.to,
|
||||
from: EMAIL_SETTINGS.fromAddress || '',
|
||||
subject: options.subject,
|
||||
html: options.html,
|
||||
text: options.text,
|
||||
replyTo: options.replyTo || EMAIL_SETTINGS.replyToAddress,
|
||||
socketTimeout: 30 * 1000,
|
||||
}
|
||||
if (EMAIL_SETTINGS.textEncoding != null) {
|
||||
sendMailOptions.textEncoding = EMAIL_SETTINGS.textEncoding
|
||||
}
|
||||
if (options.category) {
|
||||
// category support for sendgrid
|
||||
sendMailOptions.headers = {
|
||||
'X-SMTPAPI': JSON.stringify({ category: options.category }),
|
||||
}
|
||||
}
|
||||
await client.sendMail(sendMailOptions)
|
||||
} catch (err) {
|
||||
throw new OError('error sending message').withCause(err)
|
||||
}
|
||||
}
|
||||
|
||||
async function checkCanSendEmail(options) {
|
||||
if (options.sendingUser_id == null) {
|
||||
// email not sent from user, not rate limited
|
||||
return true
|
||||
}
|
||||
try {
|
||||
await rateLimiter.consume(options.sendingUser_id, 1, { method: 'userId' })
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
@@ -0,0 +1,394 @@
|
||||
const _ = require('lodash')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = _.template(`\
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en" style="Margin: 0; background: #E4E8EE !important; margin: 0; min-height: 100%; padding: 0;">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<style>.button td {
|
||||
border-radius: 9999px; }
|
||||
|
||||
.force-overleaf-style a,
|
||||
.force-overleaf-style a[href] {
|
||||
color: #138A07 !important;
|
||||
text-decoration: none !important;
|
||||
-moz-hyphens: none;
|
||||
-ms-hyphens: none;
|
||||
-webkit-hyphens: none;
|
||||
hyphens: none; }
|
||||
.force-overleaf-style a:visited,
|
||||
.force-overleaf-style a[href]:visited {
|
||||
color: #138A07; }
|
||||
.force-overleaf-style a:hover,
|
||||
.force-overleaf-style a[href]:hover {
|
||||
color: #3d7935; }
|
||||
.force-overleaf-style a:active,
|
||||
.force-overleaf-style a[href]:active {
|
||||
color: #3d7935; }
|
||||
</style>
|
||||
<style>@media only screen {
|
||||
html {
|
||||
min-height: 100%;
|
||||
background: #f6f6f6;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
.small-float-center {
|
||||
margin: 0 auto !important;
|
||||
float: none !important;
|
||||
text-align: center !important;
|
||||
}
|
||||
|
||||
.small-text-center {
|
||||
text-align: center !important;
|
||||
}
|
||||
|
||||
.small-text-left {
|
||||
text-align: left !important;
|
||||
}
|
||||
|
||||
.small-text-right {
|
||||
text-align: right !important;
|
||||
}
|
||||
|
||||
.cta-table {
|
||||
table-layout: fixed;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
.hide-for-large {
|
||||
display: block !important;
|
||||
width: auto !important;
|
||||
overflow: visible !important;
|
||||
max-height: none !important;
|
||||
font-size: inherit !important;
|
||||
line-height: inherit !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
table.body table.container .hide-for-large,
|
||||
table.body table.container .row.hide-for-large {
|
||||
display: table !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
table.body table.container .callout-inner.hide-for-large {
|
||||
display: table-cell !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
table.body table.container .show-for-large {
|
||||
display: none !important;
|
||||
width: 0;
|
||||
mso-hide: all;
|
||||
overflow: hidden;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
table.body img {
|
||||
width: auto;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
table.body center {
|
||||
min-width: 0 !important;
|
||||
}
|
||||
|
||||
table.body .container {
|
||||
width: 95% !important;
|
||||
}
|
||||
|
||||
table.body .columns,
|
||||
table.body .column {
|
||||
height: auto !important;
|
||||
-moz-box-sizing: border-box;
|
||||
-webkit-box-sizing: border-box;
|
||||
box-sizing: border-box;
|
||||
padding-left: 16px !important;
|
||||
padding-right: 16px !important;
|
||||
}
|
||||
|
||||
table.body .columns .column,
|
||||
table.body .columns .columns,
|
||||
table.body .column .column,
|
||||
table.body .column .columns {
|
||||
padding-left: 0 !important;
|
||||
padding-right: 0 !important;
|
||||
}
|
||||
|
||||
table.body .collapse .columns,
|
||||
table.body .collapse .column {
|
||||
padding-left: 0 !important;
|
||||
padding-right: 0 !important;
|
||||
}
|
||||
|
||||
td.small-1,
|
||||
th.small-1 {
|
||||
display: inline-block !important;
|
||||
width: 8.33333% !important;
|
||||
}
|
||||
|
||||
td.small-2,
|
||||
th.small-2 {
|
||||
display: inline-block !important;
|
||||
width: 16.66667% !important;
|
||||
}
|
||||
|
||||
td.small-3,
|
||||
th.small-3 {
|
||||
display: inline-block !important;
|
||||
width: 25% !important;
|
||||
}
|
||||
|
||||
td.small-4,
|
||||
th.small-4 {
|
||||
display: inline-block !important;
|
||||
width: 33.33333% !important;
|
||||
}
|
||||
|
||||
td.small-5,
|
||||
th.small-5 {
|
||||
display: inline-block !important;
|
||||
width: 41.66667% !important;
|
||||
}
|
||||
|
||||
td.small-6,
|
||||
th.small-6 {
|
||||
display: inline-block !important;
|
||||
width: 50% !important;
|
||||
}
|
||||
|
||||
td.small-7,
|
||||
th.small-7 {
|
||||
display: inline-block !important;
|
||||
width: 58.33333% !important;
|
||||
}
|
||||
|
||||
td.small-8,
|
||||
th.small-8 {
|
||||
display: inline-block !important;
|
||||
width: 66.66667% !important;
|
||||
}
|
||||
|
||||
td.small-9,
|
||||
th.small-9 {
|
||||
display: inline-block !important;
|
||||
width: 75% !important;
|
||||
}
|
||||
|
||||
td.small-10,
|
||||
th.small-10 {
|
||||
display: inline-block !important;
|
||||
width: 83.33333% !important;
|
||||
}
|
||||
|
||||
td.small-11,
|
||||
th.small-11 {
|
||||
display: inline-block !important;
|
||||
width: 91.66667% !important;
|
||||
}
|
||||
|
||||
td.small-12,
|
||||
th.small-12 {
|
||||
display: inline-block !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.columns td.small-12,
|
||||
.column td.small-12,
|
||||
.columns th.small-12,
|
||||
.column th.small-12 {
|
||||
display: block !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-1,
|
||||
table.body th.small-offset-1 {
|
||||
margin-left: 8.33333% !important;
|
||||
Margin-left: 8.33333% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-2,
|
||||
table.body th.small-offset-2 {
|
||||
margin-left: 16.66667% !important;
|
||||
Margin-left: 16.66667% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-3,
|
||||
table.body th.small-offset-3 {
|
||||
margin-left: 25% !important;
|
||||
Margin-left: 25% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-4,
|
||||
table.body th.small-offset-4 {
|
||||
margin-left: 33.33333% !important;
|
||||
Margin-left: 33.33333% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-5,
|
||||
table.body th.small-offset-5 {
|
||||
margin-left: 41.66667% !important;
|
||||
Margin-left: 41.66667% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-6,
|
||||
table.body th.small-offset-6 {
|
||||
margin-left: 50% !important;
|
||||
Margin-left: 50% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-7,
|
||||
table.body th.small-offset-7 {
|
||||
margin-left: 58.33333% !important;
|
||||
Margin-left: 58.33333% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-8,
|
||||
table.body th.small-offset-8 {
|
||||
margin-left: 66.66667% !important;
|
||||
Margin-left: 66.66667% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-9,
|
||||
table.body th.small-offset-9 {
|
||||
margin-left: 75% !important;
|
||||
Margin-left: 75% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-10,
|
||||
table.body th.small-offset-10 {
|
||||
margin-left: 83.33333% !important;
|
||||
Margin-left: 83.33333% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-11,
|
||||
table.body th.small-offset-11 {
|
||||
margin-left: 91.66667% !important;
|
||||
Margin-left: 91.66667% !important;
|
||||
}
|
||||
|
||||
table.body table.columns td.expander,
|
||||
table.body table.columns th.expander {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
table.body .right-text-pad,
|
||||
table.body .text-pad-right {
|
||||
padding-left: 10px !important;
|
||||
}
|
||||
|
||||
table.body .left-text-pad,
|
||||
table.body .text-pad-left {
|
||||
padding-right: 10px !important;
|
||||
}
|
||||
|
||||
table.menu {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
table.menu td,
|
||||
table.menu th {
|
||||
width: auto !important;
|
||||
display: inline-block !important;
|
||||
}
|
||||
|
||||
table.menu.vertical td,
|
||||
table.menu.vertical th,
|
||||
table.menu.small-vertical td,
|
||||
table.menu.small-vertical th {
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
table.menu[align="center"] {
|
||||
width: auto !important;
|
||||
}
|
||||
|
||||
table.button.small-expand,
|
||||
table.button.small-expanded {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
table.button.small-expand table,
|
||||
table.button.small-expanded table {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
table.button.small-expand table a,
|
||||
table.button.small-expanded table a {
|
||||
text-align: center !important;
|
||||
width: 100% !important;
|
||||
padding-left: 0 !important;
|
||||
padding-right: 0 !important;
|
||||
}
|
||||
|
||||
table.button.small-expand center,
|
||||
table.button.small-expanded center {
|
||||
min-width: 0;
|
||||
}
|
||||
}</style>
|
||||
</head>
|
||||
<body leftmargin="0" topmargin="0" marginwidth="0" marginheight="0" bgcolor="#F6F6F6" style="-moz-box-sizing: border-box; -ms-text-size-adjust: 100%; -webkit-box-sizing: border-box; -webkit-text-size-adjust: 100%; Margin: 0; box-sizing: border-box; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; min-width: 100%; padding: 0; text-align: left; width: 100% !important;">
|
||||
<!-- <span class="preheader"></span> -->
|
||||
<table class="body" border="0" cellspacing="0" cellpadding="0" width="100%" height="100%" style="Margin: 0; background: #E4E8EE; border-collapse: collapse; border-spacing: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; height: 100%; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<td class="body-cell" align="center" valign="top" bgcolor="#F6F6F6" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; background: #E4E8EE !important; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; padding-bottom: 20px; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<center data-parsed="" style="min-width: 580px; width: 100%;">
|
||||
|
||||
<table align="center" class="wrapper header float-center" style="Margin: 0 auto; background: #1E2530; border-bottom: none; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><td class="wrapper-inner" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 20px; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table align="center" class="container" style="Margin: 0 auto; background: transparent; border-collapse: collapse; border-spacing: 0; margin: 0 auto; padding: 0; text-align: inherit; vertical-align: top; width: 580px;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table class="row collapse" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<th class="small-12 large-12 columns first last" style="Margin: 0 auto; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; text-align: left; width: 588px;"><table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><th style="Margin: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left;">
|
||||
<h1 style="Margin: 0; Margin-bottom: px; color: #FFFFFF; font-family: Georgia, serif; font-size: 30px; font-weight: normal; line-height: 1.3; margin: 0; margin-bottom: px; padding: 0; text-align: left; word-wrap: normal;">
|
||||
${settings.appName}
|
||||
</h1>
|
||||
</th>
|
||||
<th class="expander" style="Margin: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; padding: 0 !important; text-align: left; visibility: hidden; width: 0;"></th></tr></table></th>
|
||||
</tr></tbody></table>
|
||||
</td></tr></tbody></table>
|
||||
</td></tr></table>
|
||||
<table class="spacer float-center" style="Margin: 0 auto; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="20px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 20px; font-weight: normal; hyphens: auto; line-height: 20px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||
<table align="center" class="container main float-center" style="Margin: 0 auto; Margin-top: 10px; background: #FFFFFF; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; margin-top: 10px; padding: 0; text-align: center; vertical-align: top; width: 580px;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table class="spacer" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="20px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 20px; font-weight: normal; hyphens: auto; line-height: 20px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||
|
||||
<%= body %>
|
||||
|
||||
<table class="wrapper secondary" align="center" style="background: #E4E8EE; border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><td class="wrapper-inner" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table class="spacer" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="10px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 10px; font-weight: normal; hyphens: auto; line-height: 10px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||
<p style="Margin: 0; Margin-bottom: 10px; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; margin-bottom: 10px; padding: 0; text-align: left;"><small style="color: #5D6879; font-size: 80%;">
|
||||
${
|
||||
settings.email &&
|
||||
settings.email.template &&
|
||||
settings.email.template.customFooter
|
||||
? `${settings.email.template.customFooter}<br>`
|
||||
: ''
|
||||
}${settings.appName} • <a href="${
|
||||
settings.siteUrl
|
||||
}" style="Margin: 0; color: #0F7A06; font-family: Helvetica, Arial, sans-serif; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left; text-decoration: none;">${
|
||||
settings.siteUrl
|
||||
}</a>
|
||||
</small></p>
|
||||
</td></tr></table>
|
||||
</td></tr></tbody></table>
|
||||
|
||||
</center>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<!-- prevent Gmail on iOS font size manipulation -->
|
||||
<div style="display:none; white-space:nowrap; font:15px courier; line-height:0;"> </div>
|
||||
</body>
|
||||
</html>\
|
||||
`)
|
||||
56
services/web/app/src/Features/Email/SpamSafe.js
Normal file
56
services/web/app/src/Features/Email/SpamSafe.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const XRegExp = require('xregexp')
|
||||
|
||||
// A note about SAFE_REGEX:
|
||||
// We have to escape the escape characters because XRegExp compiles it first.
|
||||
// So it's equivalent to `^[\p{L}\p{N}\s\-_!&\(\)]+$]
|
||||
// \p{L} = any letter in any language
|
||||
// \p{N} = any kind of numeric character
|
||||
// https://www.regular-expressions.info/unicode.html#prop is a good resource for
|
||||
// more obscure regex features. standard RegExp does not support these
|
||||
|
||||
const HAN_REGEX = XRegExp('\\p{Han}')
|
||||
const SAFE_REGEX = XRegExp("^[\\p{L}\\p{N}\\s\\-_!'&\\(\\)]+$")
|
||||
const EMAIL_REGEX = XRegExp('^[\\p{L}\\p{N}.+_-]+@[\\w.-]+$')
|
||||
|
||||
const SpamSafe = {
|
||||
isSafeUserName(name) {
|
||||
return SAFE_REGEX.test(name) && name.length <= 30
|
||||
},
|
||||
|
||||
isSafeProjectName(name) {
|
||||
if (HAN_REGEX.test(name)) {
|
||||
return SAFE_REGEX.test(name) && name.length <= 10
|
||||
}
|
||||
return SAFE_REGEX.test(name) && name.length <= 100
|
||||
},
|
||||
|
||||
isSafeEmail(email) {
|
||||
return EMAIL_REGEX.test(email) && email.length <= 40
|
||||
},
|
||||
|
||||
safeUserName(name, alternative, project) {
|
||||
if (project == null) {
|
||||
project = false
|
||||
}
|
||||
if (SpamSafe.isSafeUserName(name)) {
|
||||
return name
|
||||
}
|
||||
return alternative
|
||||
},
|
||||
|
||||
safeProjectName(name, alternative) {
|
||||
if (SpamSafe.isSafeProjectName(name)) {
|
||||
return name
|
||||
}
|
||||
return alternative
|
||||
},
|
||||
|
||||
safeEmail(email, alternative) {
|
||||
if (SpamSafe.isSafeEmail(email)) {
|
||||
return email
|
||||
}
|
||||
return alternative
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = SpamSafe
|
||||
133
services/web/app/src/Features/Errors/ErrorController.js
Normal file
133
services/web/app/src/Features/Errors/ErrorController.js
Normal file
@@ -0,0 +1,133 @@
|
||||
const Errors = require('./Errors')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const SamlLogHandler = require('../SamlLog/SamlLogHandler')
|
||||
const HttpErrorHandler = require('./HttpErrorHandler')
|
||||
const { plainTextResponse } = require('../../infrastructure/Response')
|
||||
const { expressifyErrorHandler } = require('@overleaf/promise-utils')
|
||||
|
||||
function notFound(req, res) {
|
||||
res.status(404)
|
||||
res.render('general/404', { title: 'page_not_found' })
|
||||
}
|
||||
|
||||
function forbidden(req, res) {
|
||||
res.status(403)
|
||||
res.render('user/restricted')
|
||||
}
|
||||
|
||||
function serverError(req, res) {
|
||||
res.status(500)
|
||||
res.render('general/500', { title: 'Server Error' })
|
||||
}
|
||||
|
||||
async function handleError(error, req, res, next) {
|
||||
const shouldSendErrorResponse = !res.headersSent
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
req.logger.addFields({ err: error })
|
||||
// log errors related to SAML flow
|
||||
if (req.session && req.session.saml) {
|
||||
req.logger.setLevel('error')
|
||||
await SamlLogHandler.promises.log(req, { error })
|
||||
}
|
||||
if (error.code === 'EBADCSRFTOKEN') {
|
||||
req.logger.addFields({ user })
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.sendStatus(403)
|
||||
}
|
||||
} else if (error instanceof Errors.NotFoundError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
notFound(req, res)
|
||||
}
|
||||
} else if (
|
||||
error instanceof URIError &&
|
||||
error.message.match(/^Failed to decode param/)
|
||||
) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(400)
|
||||
res.render('general/500', { title: 'Invalid Error' })
|
||||
}
|
||||
} else if (error instanceof Errors.ForbiddenError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
forbidden(req, res)
|
||||
}
|
||||
} else if (error instanceof Errors.TooManyRequestsError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.sendStatus(429)
|
||||
}
|
||||
} else if (error instanceof Errors.InvalidError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(400)
|
||||
plainTextResponse(res, error.message)
|
||||
}
|
||||
} else if (error instanceof Errors.DuplicateNameError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(400)
|
||||
plainTextResponse(res, error.message)
|
||||
}
|
||||
} else if (error instanceof Errors.InvalidNameError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(400)
|
||||
plainTextResponse(res, error.message)
|
||||
}
|
||||
} else if (error instanceof Errors.NonDeletableEntityError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(422)
|
||||
plainTextResponse(res, error.message)
|
||||
}
|
||||
} else if (error instanceof Errors.SAMLSessionDataMissing) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
HttpErrorHandler.badRequest(req, res, error.message)
|
||||
}
|
||||
} else {
|
||||
req.logger.setLevel('error')
|
||||
if (shouldSendErrorResponse) {
|
||||
serverError(req, res)
|
||||
}
|
||||
}
|
||||
if (!shouldSendErrorResponse) {
|
||||
// Pass the error to the default Express error handler, which will close
|
||||
// the connection.
|
||||
next(error)
|
||||
}
|
||||
}
|
||||
|
||||
function handleApiError(err, req, res, next) {
|
||||
req.logger.addFields({ err })
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
req.logger.setLevel('warn')
|
||||
res.sendStatus(404)
|
||||
} else if (
|
||||
err instanceof URIError &&
|
||||
err.message.match(/^Failed to decode param/)
|
||||
) {
|
||||
req.logger.setLevel('warn')
|
||||
res.sendStatus(400)
|
||||
} else if (err instanceof Errors.TooManyRequestsError) {
|
||||
req.logger.setLevel('warn')
|
||||
res.sendStatus(429)
|
||||
} else if (err instanceof Errors.ForbiddenError) {
|
||||
req.logger.setLevel('warn')
|
||||
res.sendStatus(403)
|
||||
} else {
|
||||
req.logger.setLevel('error')
|
||||
res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
notFound,
|
||||
forbidden,
|
||||
serverError,
|
||||
handleError: expressifyErrorHandler(handleError),
|
||||
handleApiError,
|
||||
}
|
||||
359
services/web/app/src/Features/Errors/Errors.js
Normal file
359
services/web/app/src/Features/Errors/Errors.js
Normal file
@@ -0,0 +1,359 @@
|
||||
const OError = require('@overleaf/o-error')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
// Error class for legacy errors so they inherit OError while staying
|
||||
// backward-compatible (can be instantiated with string as argument instead
|
||||
// of object)
|
||||
class BackwardCompatibleError extends OError {
|
||||
/**
|
||||
* @param {string | { message: string, info?: Object }} messageOrOptions
|
||||
*/
|
||||
constructor(messageOrOptions) {
|
||||
if (typeof messageOrOptions === 'string') {
|
||||
super(messageOrOptions)
|
||||
} else if (messageOrOptions) {
|
||||
const { message, info } = messageOrOptions
|
||||
super(message, info)
|
||||
} else {
|
||||
super()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Error class that facilitates the migration to OError v3 by providing
|
||||
// a signature in which the 2nd argument can be an object containing
|
||||
// the `info` object.
|
||||
class OErrorV2CompatibleError extends OError {
|
||||
constructor(message, options) {
|
||||
if (options) {
|
||||
super(message, options.info)
|
||||
} else {
|
||||
super(message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class NotFoundError extends BackwardCompatibleError {}
|
||||
|
||||
class ForbiddenError extends BackwardCompatibleError {}
|
||||
|
||||
class ServiceNotConfiguredError extends BackwardCompatibleError {}
|
||||
|
||||
class TooManyRequestsError extends BackwardCompatibleError {}
|
||||
|
||||
class DuplicateNameError extends OError {}
|
||||
|
||||
class InvalidNameError extends BackwardCompatibleError {}
|
||||
|
||||
class UnsupportedFileTypeError extends BackwardCompatibleError {}
|
||||
|
||||
class FileTooLargeError extends BackwardCompatibleError {}
|
||||
|
||||
class UnsupportedExportRecordsError extends BackwardCompatibleError {}
|
||||
|
||||
class V1HistoryNotSyncedError extends BackwardCompatibleError {}
|
||||
|
||||
class ProjectHistoryDisabledError extends BackwardCompatibleError {}
|
||||
|
||||
class V1ConnectionError extends BackwardCompatibleError {}
|
||||
|
||||
class UnconfirmedEmailError extends BackwardCompatibleError {}
|
||||
|
||||
class EmailExistsError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('Email already exists', options)
|
||||
}
|
||||
}
|
||||
|
||||
class InvalidError extends BackwardCompatibleError {}
|
||||
|
||||
class NotInV2Error extends BackwardCompatibleError {}
|
||||
|
||||
class SLInV2Error extends BackwardCompatibleError {}
|
||||
|
||||
class SAMLCommonsUnavailable extends OError {
|
||||
get i18nKey() {
|
||||
return 'saml_commons_unavailable'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLIdentityExistsError extends OError {
|
||||
get i18nKey() {
|
||||
return 'institution_account_tried_to_add_already_registered'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLAlreadyLinkedError extends OError {
|
||||
get i18nKey() {
|
||||
return 'institution_account_tried_to_add_already_linked'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLEmailNotAffiliatedError extends OError {
|
||||
get i18nKey() {
|
||||
return 'institution_account_tried_to_add_not_affiliated'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLEmailAffiliatedWithAnotherInstitutionError extends OError {
|
||||
get i18nKey() {
|
||||
return 'institution_account_tried_to_add_affiliated_with_another_institution'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLAuthenticationError extends OError {
|
||||
get i18nKey() {
|
||||
return 'saml_auth_error'
|
||||
}
|
||||
}
|
||||
class SAMLAssertionAudienceMismatch extends SAMLAuthenticationError {}
|
||||
|
||||
class SAMLAuthenticationRequiredError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_authentication_required_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLGroupSSOLoginIdentityMismatchError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_login_identity_mismatch_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLGroupSSOLoginIdentityNotFoundError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_login_identity_not_found_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLGroupSSODisabledError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_login_disabled_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLInvalidSignatureError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_invalid_signature_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLMissingSignatureError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_missing_signature_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLInvalidUserIdentifierError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_authentication_required_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLInvalidUserAttributeError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_authentication_required_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLMissingUserIdentifierError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_missing_user_attribute'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLInvalidResponseError extends SAMLAuthenticationError {}
|
||||
|
||||
class SAMLResponseAlreadyProcessedError extends SAMLInvalidResponseError {
|
||||
constructor() {
|
||||
super('saml response already processed')
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLLoginFailureError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_login_failure'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLEmailNotRecognizedError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_email_not_recognized'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLSessionDataMissing extends BackwardCompatibleError {
|
||||
constructor(arg) {
|
||||
super(arg)
|
||||
|
||||
const samlSession =
|
||||
typeof arg === 'object' && arg !== null && arg.samlSession
|
||||
? arg.samlSession
|
||||
: {}
|
||||
this.tryAgain = true
|
||||
const { universityId, universityName, externalUserId, institutionEmail } =
|
||||
samlSession
|
||||
|
||||
if (
|
||||
!universityId &&
|
||||
!universityName &&
|
||||
!externalUserId &&
|
||||
!institutionEmail
|
||||
) {
|
||||
this.message = 'Missing session data.'
|
||||
} else if (
|
||||
!institutionEmail &&
|
||||
samlSession &&
|
||||
samlSession.userEmailAttributeUnreliable
|
||||
) {
|
||||
this.tryAgain = false
|
||||
this.message = `Your account settings at your institution prevent us from accessing your email address. You will need to make your email address public at your institution in order to link with ${settings.appName}. Please contact your IT department if you have any questions.`
|
||||
} else if (!institutionEmail) {
|
||||
this.message =
|
||||
'Unable to confirm your institutional email address. The institutional identity provider did not provide an email address in the expected attribute. Please contact us if this keeps happening.'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLProviderRequesterError extends SAMLAuthenticationError {}
|
||||
|
||||
class ThirdPartyIdentityExistsError extends BackwardCompatibleError {
|
||||
constructor(arg) {
|
||||
super(arg)
|
||||
if (!this.message) {
|
||||
this.message =
|
||||
'provider and external id already linked to another account'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ThirdPartyUserNotFoundError extends BackwardCompatibleError {
|
||||
constructor(arg) {
|
||||
super(arg)
|
||||
if (!this.message) {
|
||||
this.message = 'user not found for provider and external id'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class OutputFileFetchFailedError extends OError {}
|
||||
|
||||
class SubscriptionAdminDeletionError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('subscription admins cannot be deleted', options)
|
||||
}
|
||||
}
|
||||
|
||||
class SubscriptionNotFoundError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('subscription not found', options)
|
||||
}
|
||||
}
|
||||
|
||||
class ProjectNotFoundError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('project not found', options)
|
||||
}
|
||||
}
|
||||
|
||||
class UserNotFoundError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('user not found', options)
|
||||
}
|
||||
}
|
||||
|
||||
class UserNotCollaboratorError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('user not a collaborator', options)
|
||||
}
|
||||
}
|
||||
|
||||
class DocHasRangesError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('document has ranges', options)
|
||||
}
|
||||
}
|
||||
|
||||
class InvalidQueryError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('invalid search query', options)
|
||||
}
|
||||
}
|
||||
|
||||
class AffiliationError extends OError {}
|
||||
|
||||
class InvalidEmailError extends OError {
|
||||
get i18nKey() {
|
||||
return 'invalid_email'
|
||||
}
|
||||
}
|
||||
|
||||
class InvalidInstitutionalEmailError extends OError {
|
||||
get i18nKey() {
|
||||
return 'invalid_institutional_email'
|
||||
}
|
||||
}
|
||||
|
||||
class NonDeletableEntityError extends OError {
|
||||
get i18nKey() {
|
||||
return 'non_deletable_entity'
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
OError,
|
||||
BackwardCompatibleError,
|
||||
NotFoundError,
|
||||
ForbiddenError,
|
||||
ServiceNotConfiguredError,
|
||||
TooManyRequestsError,
|
||||
DuplicateNameError,
|
||||
InvalidNameError,
|
||||
UnsupportedFileTypeError,
|
||||
FileTooLargeError,
|
||||
UnsupportedExportRecordsError,
|
||||
V1HistoryNotSyncedError,
|
||||
ProjectHistoryDisabledError,
|
||||
V1ConnectionError,
|
||||
UnconfirmedEmailError,
|
||||
EmailExistsError,
|
||||
InvalidError,
|
||||
NotInV2Error,
|
||||
OutputFileFetchFailedError,
|
||||
SAMLAssertionAudienceMismatch,
|
||||
SAMLAuthenticationRequiredError,
|
||||
SAMLCommonsUnavailable,
|
||||
SAMLIdentityExistsError,
|
||||
SAMLAlreadyLinkedError,
|
||||
SAMLEmailNotAffiliatedError,
|
||||
SAMLEmailAffiliatedWithAnotherInstitutionError,
|
||||
SAMLSessionDataMissing,
|
||||
SAMLAuthenticationError,
|
||||
SAMLGroupSSOLoginIdentityMismatchError,
|
||||
SAMLGroupSSOLoginIdentityNotFoundError,
|
||||
SAMLGroupSSODisabledError,
|
||||
SAMLInvalidUserAttributeError,
|
||||
SAMLInvalidUserIdentifierError,
|
||||
SAMLInvalidSignatureError,
|
||||
SAMLMissingUserIdentifierError,
|
||||
SAMLMissingSignatureError,
|
||||
SAMLProviderRequesterError,
|
||||
SAMLInvalidResponseError,
|
||||
SAMLLoginFailureError,
|
||||
SAMLEmailNotRecognizedError,
|
||||
SAMLResponseAlreadyProcessedError,
|
||||
SLInV2Error,
|
||||
ThirdPartyIdentityExistsError,
|
||||
ThirdPartyUserNotFoundError,
|
||||
SubscriptionAdminDeletionError,
|
||||
SubscriptionNotFoundError,
|
||||
ProjectNotFoundError,
|
||||
UserNotFoundError,
|
||||
UserNotCollaboratorError,
|
||||
DocHasRangesError,
|
||||
InvalidQueryError,
|
||||
AffiliationError,
|
||||
InvalidEmailError,
|
||||
InvalidInstitutionalEmailError,
|
||||
NonDeletableEntityError,
|
||||
}
|
||||
160
services/web/app/src/Features/Errors/HttpErrorHandler.js
Normal file
160
services/web/app/src/Features/Errors/HttpErrorHandler.js
Normal file
@@ -0,0 +1,160 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { plainTextResponse } = require('../../infrastructure/Response')
|
||||
|
||||
function renderJSONError(res, message, info = {}) {
|
||||
if (info.message) {
|
||||
logger.warn(
|
||||
info,
|
||||
`http error info shouldn't contain a 'message' field, will be overridden`
|
||||
)
|
||||
}
|
||||
if (message != null) {
|
||||
res.json({ ...info, message })
|
||||
} else {
|
||||
res.json(info)
|
||||
}
|
||||
}
|
||||
|
||||
function handleGeneric500Error(req, res, statusCode, message) {
|
||||
res.status(statusCode)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/500', { title: 'Server Error' })
|
||||
case 'json':
|
||||
return renderJSONError(res, message)
|
||||
default:
|
||||
return plainTextResponse(res, 'internal server error')
|
||||
}
|
||||
}
|
||||
|
||||
function handleGeneric400Error(req, res, statusCode, message, info = {}) {
|
||||
res.status(statusCode)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/400', {
|
||||
title: 'Client Error',
|
||||
message,
|
||||
})
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'client error')
|
||||
}
|
||||
}
|
||||
|
||||
let HttpErrorHandler
|
||||
module.exports = HttpErrorHandler = {
|
||||
handleErrorByStatusCode(req, res, err, statusCode) {
|
||||
const is400Error = statusCode >= 400 && statusCode < 500
|
||||
const is500Error = statusCode >= 500 && statusCode < 600
|
||||
|
||||
req.logger.addFields({ err })
|
||||
if (is400Error) {
|
||||
req.logger.setLevel('warn')
|
||||
} else if (is500Error) {
|
||||
req.logger.setLevel('error')
|
||||
}
|
||||
|
||||
if (statusCode === 403) {
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
} else if (statusCode === 404) {
|
||||
HttpErrorHandler.notFound(req, res)
|
||||
} else if (statusCode === 409) {
|
||||
HttpErrorHandler.conflict(req, res, '')
|
||||
} else if (statusCode === 422) {
|
||||
HttpErrorHandler.unprocessableEntity(req, res)
|
||||
} else if (is400Error) {
|
||||
handleGeneric400Error(req, res, statusCode)
|
||||
} else if (is500Error) {
|
||||
handleGeneric500Error(req, res, statusCode)
|
||||
} else {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
},
|
||||
|
||||
badRequest(req, res, message, info = {}) {
|
||||
handleGeneric400Error(req, res, 400, message, info)
|
||||
},
|
||||
|
||||
conflict(req, res, message, info = {}) {
|
||||
res.status(409)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/400', {
|
||||
title: 'Client Error',
|
||||
message,
|
||||
})
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'conflict')
|
||||
}
|
||||
},
|
||||
|
||||
forbidden(req, res, message = 'restricted', info = {}) {
|
||||
res.status(403)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('user/restricted', { title: 'restricted' })
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'restricted')
|
||||
}
|
||||
},
|
||||
|
||||
notFound(req, res, message = 'not found', info = {}) {
|
||||
res.status(404)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/404', { title: 'page_not_found' })
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'not found')
|
||||
}
|
||||
},
|
||||
|
||||
unprocessableEntity(req, res, message = 'unprocessable entity', info = {}) {
|
||||
res.status(422)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/400', {
|
||||
title: 'Client Error',
|
||||
message,
|
||||
})
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'unprocessable entity')
|
||||
}
|
||||
},
|
||||
|
||||
legacyInternal(req, res, message, err) {
|
||||
req.logger.addFields({ err })
|
||||
req.logger.setLevel('error')
|
||||
handleGeneric500Error(req, res, 500, message)
|
||||
},
|
||||
|
||||
maintenance(req, res) {
|
||||
// load balancer health checks require a success response for /
|
||||
if (req.url === '/') {
|
||||
res.status(200)
|
||||
} else {
|
||||
res.status(503)
|
||||
}
|
||||
let message = `${Settings.appName} is currently down for maintenance.`
|
||||
if (Settings.statusPageUrl) {
|
||||
message += ` Please check https://${Settings.statusPageUrl} for updates.`
|
||||
}
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/closed', { title: 'maintenance' })
|
||||
case 'json':
|
||||
return renderJSONError(res, message, {})
|
||||
default:
|
||||
return plainTextResponse(res, message)
|
||||
}
|
||||
},
|
||||
}
|
||||
128
services/web/app/src/Features/Exports/ExportsController.mjs
Normal file
128
services/web/app/src/Features/Exports/ExportsController.mjs
Normal file
@@ -0,0 +1,128 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import ExportsHandler from './ExportsHandler.mjs'
|
||||
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import logger from '@overleaf/logger'
|
||||
|
||||
export default {
|
||||
exportProject(req, res, next) {
|
||||
const { project_id: projectId, brand_variation_id: brandVariationId } =
|
||||
req.params
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
const exportParams = {
|
||||
project_id: projectId,
|
||||
brand_variation_id: brandVariationId,
|
||||
user_id: userId,
|
||||
}
|
||||
|
||||
if (req.body) {
|
||||
if (req.body.firstName) {
|
||||
exportParams.first_name = req.body.firstName.trim()
|
||||
}
|
||||
if (req.body.lastName) {
|
||||
exportParams.last_name = req.body.lastName.trim()
|
||||
}
|
||||
// additional parameters for gallery exports
|
||||
if (req.body.title) {
|
||||
exportParams.title = req.body.title.trim()
|
||||
}
|
||||
if (req.body.description) {
|
||||
exportParams.description = req.body.description.trim()
|
||||
}
|
||||
if (req.body.author) {
|
||||
exportParams.author = req.body.author.trim()
|
||||
}
|
||||
if (req.body.license) {
|
||||
exportParams.license = req.body.license.trim()
|
||||
}
|
||||
if (req.body.showSource != null) {
|
||||
exportParams.show_source = req.body.showSource
|
||||
}
|
||||
}
|
||||
|
||||
return ExportsHandler.exportProject(
|
||||
exportParams,
|
||||
function (err, exportData) {
|
||||
if (err != null) {
|
||||
if (err.forwardResponse != null) {
|
||||
logger.debug(
|
||||
{ responseError: err.forwardResponse },
|
||||
'forwarding response'
|
||||
)
|
||||
const statusCode = err.forwardResponse.status || 500
|
||||
return res.status(statusCode).json(err.forwardResponse)
|
||||
} else {
|
||||
return next(err)
|
||||
}
|
||||
}
|
||||
logger.debug(
|
||||
{
|
||||
userId,
|
||||
projectId,
|
||||
brandVariationId,
|
||||
exportV1Id: exportData.v1_id,
|
||||
},
|
||||
'exported project'
|
||||
)
|
||||
return res.json({
|
||||
export_v1_id: exportData.v1_id,
|
||||
message: exportData.message,
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
exportStatus(req, res) {
|
||||
const { export_id: exportId } = req.params
|
||||
return ExportsHandler.fetchExport(exportId, function (err, exportJson) {
|
||||
let json
|
||||
if (err != null) {
|
||||
json = {
|
||||
status_summary: 'failed',
|
||||
status_detail: err.toString,
|
||||
}
|
||||
res.json({ export_json: json })
|
||||
return err
|
||||
}
|
||||
const parsedExport = JSON.parse(exportJson)
|
||||
json = {
|
||||
status_summary: parsedExport.status_summary,
|
||||
status_detail: parsedExport.status_detail,
|
||||
partner_submission_id: parsedExport.partner_submission_id,
|
||||
v2_user_email: parsedExport.v2_user_email,
|
||||
v2_user_first_name: parsedExport.v2_user_first_name,
|
||||
v2_user_last_name: parsedExport.v2_user_last_name,
|
||||
title: parsedExport.title,
|
||||
token: parsedExport.token,
|
||||
}
|
||||
return res.json({ export_json: json })
|
||||
})
|
||||
},
|
||||
|
||||
exportDownload(req, res, next) {
|
||||
const { type, export_id: exportId } = req.params
|
||||
|
||||
SessionManager.getLoggedInUserId(req.session)
|
||||
return ExportsHandler.fetchDownload(
|
||||
exportId,
|
||||
type,
|
||||
function (err, exportFileUrl) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
|
||||
return res.redirect(exportFileUrl)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
282
services/web/app/src/Features/Exports/ExportsHandler.mjs
Normal file
282
services/web/app/src/Features/Exports/ExportsHandler.mjs
Normal file
@@ -0,0 +1,282 @@
|
||||
/* eslint-disable
|
||||
n/handle-callback-err,
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import OError from '@overleaf/o-error'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import ProjectHistoryHandler from '../Project/ProjectHistoryHandler.js'
|
||||
import ProjectLocator from '../Project/ProjectLocator.js'
|
||||
import ProjectRootDocManager from '../Project/ProjectRootDocManager.js'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import settings from '@overleaf/settings'
|
||||
import async from 'async'
|
||||
import Request from 'request'
|
||||
let ExportsHandler
|
||||
const request = Request.defaults()
|
||||
|
||||
export default ExportsHandler = {
|
||||
exportProject(exportParams, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return ExportsHandler._buildExport(
|
||||
exportParams,
|
||||
function (err, exportData) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return ExportsHandler._requestExport(exportData, function (err, body) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
exportData.v1_id = body.exportId
|
||||
exportData.message = body.message
|
||||
// TODO: possibly store the export data in Mongo
|
||||
return callback(null, exportData)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_buildExport(exportParams, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
const {
|
||||
project_id: projectId,
|
||||
user_id: userId,
|
||||
brand_variation_id: brandVariationId,
|
||||
title,
|
||||
description,
|
||||
author,
|
||||
license,
|
||||
show_source: showSource,
|
||||
} = exportParams
|
||||
const jobs = {
|
||||
project(cb) {
|
||||
return ProjectGetter.getProject(projectId, cb)
|
||||
},
|
||||
rootDoc: [
|
||||
'project',
|
||||
(results, cb) =>
|
||||
ProjectRootDocManager.ensureRootDocumentIsValid(
|
||||
projectId,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ProjectLocator.findRootDoc(
|
||||
{ project: results.project, project_id: projectId },
|
||||
cb
|
||||
)
|
||||
}
|
||||
),
|
||||
],
|
||||
user(cb) {
|
||||
return UserGetter.getUser(
|
||||
userId,
|
||||
{ first_name: 1, last_name: 1, email: 1, overleaf: 1 },
|
||||
cb
|
||||
)
|
||||
},
|
||||
historyVersion(cb) {
|
||||
return ProjectHistoryHandler.ensureHistoryExistsForProject(
|
||||
projectId,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ExportsHandler._requestVersion(projectId, cb)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
return async.auto(jobs, function (err, results) {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error building project export', {
|
||||
project_id: projectId,
|
||||
user_id: userId,
|
||||
brand_variation_id: brandVariationId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
const { project, rootDoc, user, historyVersion } = results
|
||||
if (!rootDoc || rootDoc[1] == null) {
|
||||
err = new OError('cannot export project without root doc', {
|
||||
project_id: projectId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
if (exportParams.first_name && exportParams.last_name) {
|
||||
user.first_name = exportParams.first_name
|
||||
user.last_name = exportParams.last_name
|
||||
}
|
||||
|
||||
const exportData = {
|
||||
project: {
|
||||
id: projectId,
|
||||
rootDocPath: rootDoc[1] != null ? rootDoc[1].fileSystem : undefined,
|
||||
historyId: project.overleaf?.history?.id,
|
||||
historyVersion,
|
||||
v1ProjectId:
|
||||
project.overleaf != null ? project.overleaf.id : undefined,
|
||||
metadata: {
|
||||
compiler: project.compiler,
|
||||
imageName: project.imageName,
|
||||
title,
|
||||
description,
|
||||
author,
|
||||
license,
|
||||
showSource,
|
||||
},
|
||||
},
|
||||
user: {
|
||||
id: userId,
|
||||
firstName: user.first_name,
|
||||
lastName: user.last_name,
|
||||
email: user.email,
|
||||
orcidId: null, // until v2 gets ORCID
|
||||
v1UserId: user.overleaf != null ? user.overleaf.id : undefined,
|
||||
},
|
||||
destination: {
|
||||
brandVariationId,
|
||||
},
|
||||
options: {
|
||||
callbackUrl: null,
|
||||
}, // for now, until we want v1 to call us back
|
||||
}
|
||||
return callback(null, exportData)
|
||||
})
|
||||
},
|
||||
|
||||
_requestExport(exportData, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return request.post(
|
||||
{
|
||||
url: `${settings.apis.v1.url}/api/v1/overleaf/exports`,
|
||||
auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass },
|
||||
json: exportData,
|
||||
timeout: settings.apis.v1.timeout,
|
||||
},
|
||||
function (err, res, body) {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error making request to v1 export', {
|
||||
export: exportData,
|
||||
})
|
||||
return callback(err)
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
return callback(null, body)
|
||||
} else {
|
||||
logger.warn(
|
||||
{ export: exportData },
|
||||
`v1 export returned failure; forwarding: ${body}`
|
||||
)
|
||||
// pass the v1 error along for the publish modal to handle
|
||||
const err = { forwardResponse: body }
|
||||
return callback(err)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_requestVersion(projectId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `${settings.apis.project_history.url}/project/${projectId}/version`,
|
||||
json: true,
|
||||
},
|
||||
function (err, res, body) {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error making request to project history', {
|
||||
project_id: projectId,
|
||||
})
|
||||
return callback(err)
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
return callback(null, body.version)
|
||||
} else {
|
||||
err = new OError(
|
||||
`project history version returned a failure status code: ${res.statusCode}`,
|
||||
{ project_id: projectId }
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
fetchExport(exportId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `${settings.apis.v1.url}/api/v1/overleaf/exports/${exportId}`,
|
||||
auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass },
|
||||
timeout: settings.apis.v1.timeout,
|
||||
},
|
||||
function (err, res, body) {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error making request to v1 export', {
|
||||
export: exportId,
|
||||
})
|
||||
return callback(err)
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
return callback(null, body)
|
||||
} else {
|
||||
err = new OError(
|
||||
`v1 export returned a failure status code: ${res.statusCode}`,
|
||||
{ export: exportId }
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
fetchDownload(exportId, type, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `${settings.apis.v1.url}/api/v1/overleaf/exports/${exportId}/${type}_url`,
|
||||
auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass },
|
||||
timeout: settings.apis.v1.timeout,
|
||||
},
|
||||
function (err, res, body) {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error making request to v1 export', {
|
||||
export: exportId,
|
||||
})
|
||||
return callback(err)
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
return callback(null, body)
|
||||
} else {
|
||||
err = new OError(
|
||||
`v1 export returned a failure status code: ${res.statusCode}`,
|
||||
{ export: exportId }
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
61
services/web/app/src/Features/FileStore/FileHashManager.js
Normal file
61
services/web/app/src/Features/FileStore/FileHashManager.js
Normal file
@@ -0,0 +1,61 @@
|
||||
/* eslint-disable
|
||||
n/handle-callback-err,
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let FileHashManager
|
||||
const crypto = require('crypto')
|
||||
const logger = require('@overleaf/logger')
|
||||
const fs = require('fs')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = FileHashManager = {
|
||||
computeHash(filePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
callback = _.once(callback) // avoid double callbacks
|
||||
|
||||
// taken from v1/history/storage/lib/blob_hash.js
|
||||
const getGitBlobHeader = byteLength => `blob ${byteLength}` + '\x00'
|
||||
|
||||
const getByteLengthOfFile = cb =>
|
||||
fs.stat(filePath, function (err, stats) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
return cb(null, stats.size)
|
||||
})
|
||||
|
||||
return getByteLengthOfFile(function (err, byteLength) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
const input = fs.createReadStream(filePath)
|
||||
input.on('error', function (err) {
|
||||
logger.warn({ filePath, err }, 'error opening file in computeHash')
|
||||
return callback(err)
|
||||
})
|
||||
|
||||
const hash = crypto.createHash('sha1')
|
||||
hash.setEncoding('hex')
|
||||
hash.update(getGitBlobHeader(byteLength))
|
||||
hash.on('readable', function () {
|
||||
const result = hash.read()
|
||||
if (result != null) {
|
||||
return callback(null, result.toString('hex'))
|
||||
}
|
||||
})
|
||||
return input.pipe(hash)
|
||||
})
|
||||
},
|
||||
}
|
||||
207
services/web/app/src/Features/FileStore/FileStoreController.mjs
Normal file
207
services/web/app/src/Features/FileStore/FileStoreController.mjs
Normal file
@@ -0,0 +1,207 @@
|
||||
// @ts-check
|
||||
|
||||
import { pipeline } from 'node:stream/promises'
|
||||
import logger from '@overleaf/logger'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import Metrics from '@overleaf/metrics'
|
||||
import FileStoreHandler from './FileStoreHandler.js'
|
||||
import ProjectLocator from '../Project/ProjectLocator.js'
|
||||
import HistoryManager from '../History/HistoryManager.js'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import Features from '../../infrastructure/Features.js'
|
||||
import { preparePlainTextResponse } from '../../infrastructure/Response.js'
|
||||
|
||||
async function getFile(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const fileId = req.params.File_id
|
||||
const queryString = req.query
|
||||
const userAgent = req.get('User-Agent')
|
||||
req.logger.addFields({ projectId, fileId, queryString })
|
||||
|
||||
let file
|
||||
try {
|
||||
;({ element: file } = await ProjectLocator.promises.findElement({
|
||||
project_id: projectId,
|
||||
element_id: fileId,
|
||||
type: 'file',
|
||||
}))
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
logger.warn(
|
||||
{ err, projectId, fileId, queryString },
|
||||
'entity not found when downloading file'
|
||||
)
|
||||
// res.sendStatus() sends a description of the status as body.
|
||||
// Using res.status().end() avoids sending that fake body.
|
||||
return res.status(404).end()
|
||||
} else {
|
||||
// Instead of using the global error handler, we send an empty response in
|
||||
// case the client forgets to check the response status. This is arguably
|
||||
// not our responsibility, and it won't work if something else breaks in
|
||||
// this endpoint, so it could be revisited in the future.
|
||||
logger.err(
|
||||
{ err, projectId, fileId, queryString },
|
||||
'error finding element for downloading file'
|
||||
)
|
||||
return res.status(500).end()
|
||||
}
|
||||
}
|
||||
|
||||
// This metric has this name because it used to be recorded in a middleware.
|
||||
// It tracks how many files have a hash and can be served by the history
|
||||
// system.
|
||||
Metrics.inc('fileToBlobRedirectMiddleware', 1, {
|
||||
method: 'GET',
|
||||
status: Boolean(file?.hash),
|
||||
})
|
||||
|
||||
let source, stream, contentLength
|
||||
try {
|
||||
if (Features.hasFeature('project-history-blobs') && file?.hash) {
|
||||
// Get the file from history
|
||||
;({ source, stream, contentLength } =
|
||||
await HistoryManager.promises.requestBlobWithFallback(
|
||||
projectId,
|
||||
file.hash,
|
||||
fileId
|
||||
))
|
||||
} else {
|
||||
// The file-hash is missing. Fall back to filestore.
|
||||
stream = await FileStoreHandler.promises.getFileStream(
|
||||
projectId,
|
||||
fileId,
|
||||
queryString
|
||||
)
|
||||
source = 'filestore'
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.status(404).end()
|
||||
} else {
|
||||
logger.err(
|
||||
{ err, projectId, fileId, queryString },
|
||||
'error finding element for downloading file'
|
||||
)
|
||||
return res.status(500).end()
|
||||
}
|
||||
}
|
||||
|
||||
// mobile safari will try to render html files, prevent this
|
||||
if (isMobileSafari(userAgent) && isHtml(file)) {
|
||||
preparePlainTextResponse(res)
|
||||
}
|
||||
if (contentLength) {
|
||||
res.setHeader('Content-Length', contentLength)
|
||||
}
|
||||
res.setContentDisposition('attachment', { filename: file.name })
|
||||
// allow the browser to cache these immutable files
|
||||
// note: both "private" and "max-age" appear to be required for caching
|
||||
res.setHeader('Cache-Control', 'private, max-age=3600')
|
||||
res.appendHeader('X-Served-By', source)
|
||||
try {
|
||||
await pipeline(stream, res)
|
||||
} catch (err) {
|
||||
if (
|
||||
err instanceof Error &&
|
||||
'code' in err &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE'
|
||||
) {
|
||||
// Ignore clients closing the connection prematurely
|
||||
return
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async function getFileHead(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const fileId = req.params.File_id
|
||||
|
||||
let file
|
||||
try {
|
||||
;({ element: file } = await ProjectLocator.promises.findElement({
|
||||
project_id: projectId,
|
||||
element_id: fileId,
|
||||
type: 'file',
|
||||
}))
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
// res.sendStatus() sends a description of the status as body.
|
||||
// Using res.status().end() avoids sending that fake body.
|
||||
return res.status(404).end()
|
||||
} else {
|
||||
// Instead of using the global error handler, we send an empty response in
|
||||
// case the client forgets to check the response status. This is arguably
|
||||
// not our responsibility, and it won't work if something else breaks in
|
||||
// this endpoint, so it could be revisited in the future.
|
||||
logger.err(
|
||||
{ err, projectId, fileId },
|
||||
'error finding element for downloading file'
|
||||
)
|
||||
return res.status(500).end()
|
||||
}
|
||||
}
|
||||
|
||||
// This metric has this name because it used to be recorded in a middleware.
|
||||
// It tracks how many files have a hash and can be served by the history
|
||||
// system.
|
||||
Metrics.inc('fileToBlobRedirectMiddleware', 1, {
|
||||
method: 'HEAD',
|
||||
status: Boolean(file?.hash),
|
||||
})
|
||||
|
||||
let fileSize, source
|
||||
try {
|
||||
if (Features.hasFeature('project-history-blobs') && file?.hash) {
|
||||
;({ source, contentLength: fileSize } =
|
||||
await HistoryManager.promises.requestBlobWithFallback(
|
||||
projectId,
|
||||
file.hash,
|
||||
fileId,
|
||||
'HEAD'
|
||||
))
|
||||
} else {
|
||||
fileSize = await FileStoreHandler.promises.getFileSize(projectId, fileId)
|
||||
source = 'filestore'
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.status(404).end()
|
||||
} else {
|
||||
logger.err({ err, projectId, fileId }, 'error obtaining file size')
|
||||
return res.status(500).end()
|
||||
}
|
||||
}
|
||||
|
||||
res.setHeader('Content-Length', fileSize)
|
||||
res.appendHeader('X-Served-By', source)
|
||||
res.status(200).end()
|
||||
}
|
||||
|
||||
function isHtml(file) {
|
||||
return (
|
||||
fileEndsWith(file, '.html') ||
|
||||
fileEndsWith(file, '.htm') ||
|
||||
fileEndsWith(file, '.xhtml')
|
||||
)
|
||||
}
|
||||
|
||||
function fileEndsWith(file, ext) {
|
||||
return (
|
||||
file.name != null &&
|
||||
file.name.length > ext.length &&
|
||||
file.name.lastIndexOf(ext) === file.name.length - ext.length
|
||||
)
|
||||
}
|
||||
|
||||
function isMobileSafari(userAgent) {
|
||||
return (
|
||||
userAgent &&
|
||||
(userAgent.indexOf('iPhone') >= 0 || userAgent.indexOf('iPad') >= 0)
|
||||
)
|
||||
}
|
||||
|
||||
export default {
|
||||
getFile: expressify(getFile),
|
||||
getFileHead: expressify(getFileHead),
|
||||
}
|
||||
372
services/web/app/src/Features/FileStore/FileStoreHandler.js
Normal file
372
services/web/app/src/Features/FileStore/FileStoreHandler.js
Normal file
@@ -0,0 +1,372 @@
|
||||
const _ = require('lodash')
|
||||
const logger = require('@overleaf/logger')
|
||||
const fs = require('fs')
|
||||
const request = require('request')
|
||||
const settings = require('@overleaf/settings')
|
||||
const Async = require('async')
|
||||
const FileHashManager = require('./FileHashManager')
|
||||
const HistoryManager = require('../History/HistoryManager')
|
||||
const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler')
|
||||
const { File } = require('../../models/File')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
|
||||
const ONE_MIN_IN_MS = 60 * 1000
|
||||
const FIVE_MINS_IN_MS = ONE_MIN_IN_MS * 5
|
||||
|
||||
const FileStoreHandler = {
|
||||
RETRY_ATTEMPTS: 3,
|
||||
|
||||
uploadFileFromDisk(projectId, fileArgs, fsPath, callback) {
|
||||
// Look up the history id for the project if we don't have it already
|
||||
ProjectDetailsHandler.getDetails(projectId, function (err, project) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
const historyId = project.overleaf?.history?.id
|
||||
if (!historyId) {
|
||||
return callback(new OError('missing history id'))
|
||||
}
|
||||
FileStoreHandler.uploadFileFromDiskWithHistoryId(
|
||||
projectId,
|
||||
historyId,
|
||||
fileArgs,
|
||||
fsPath,
|
||||
callback
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
_uploadToHistory(historyId, hash, size, fsPath, callback) {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
Async.retry(
|
||||
FileStoreHandler.RETRY_ATTEMPTS,
|
||||
cb =>
|
||||
HistoryManager.uploadBlobFromDisk(historyId, hash, size, fsPath, cb),
|
||||
error => {
|
||||
if (error) return callback(error, false)
|
||||
callback(null, true)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
callback(null, false)
|
||||
}
|
||||
},
|
||||
|
||||
_uploadToFileStore(projectId, fileArgs, fsPath, callback) {
|
||||
Async.retry(
|
||||
FileStoreHandler.RETRY_ATTEMPTS,
|
||||
(cb, results) =>
|
||||
FileStoreHandler._doUploadFileFromDisk(projectId, fileArgs, fsPath, cb),
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
uploadFileFromDiskWithHistoryId(
|
||||
projectId,
|
||||
historyId,
|
||||
fileArgs,
|
||||
fsPath,
|
||||
callback
|
||||
) {
|
||||
fs.lstat(fsPath, function (err, stat) {
|
||||
if (err) {
|
||||
logger.warn({ err, projectId, fileArgs, fsPath }, 'error stating file')
|
||||
callback(err)
|
||||
}
|
||||
if (!stat) {
|
||||
logger.warn(
|
||||
{ projectId, fileArgs, fsPath },
|
||||
'stat is not available, can not check file from disk'
|
||||
)
|
||||
return callback(new Error('error getting stat, not available'))
|
||||
}
|
||||
if (!stat.isFile()) {
|
||||
logger.debug(
|
||||
{ projectId, fileArgs, fsPath },
|
||||
'tried to upload symlink, not continuing'
|
||||
)
|
||||
return callback(new Error('can not upload symlink'))
|
||||
}
|
||||
FileHashManager.computeHash(fsPath, function (err, hash) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
FileStoreHandler._uploadToHistory(
|
||||
historyId,
|
||||
hash,
|
||||
stat.size,
|
||||
fsPath,
|
||||
function (err, createdBlob) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
fileArgs = { ...fileArgs, hash }
|
||||
FileStoreHandler._uploadToFileStore(
|
||||
projectId,
|
||||
fileArgs,
|
||||
fsPath,
|
||||
function (err, result) {
|
||||
if (err) {
|
||||
OError.tag(err, 'Error uploading file, retries failed', {
|
||||
projectId,
|
||||
fileArgs,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
callback(err, result.url, result.fileRef, createdBlob)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_doUploadFileFromDisk(projectId, fileArgs, fsPath, callback) {
|
||||
const callbackOnce = _.once(callback)
|
||||
|
||||
const fileRef = new File(fileArgs)
|
||||
const fileId = fileRef._id
|
||||
const url = FileStoreHandler._buildUrl(projectId, fileId)
|
||||
|
||||
if (!Features.hasFeature('filestore')) {
|
||||
return callbackOnce(null, { url, fileRef })
|
||||
}
|
||||
|
||||
const readStream = fs.createReadStream(fsPath)
|
||||
readStream.on('error', function (err) {
|
||||
logger.warn(
|
||||
{ err, projectId, fileId, fsPath },
|
||||
'something went wrong on the read stream of uploadFileFromDisk'
|
||||
)
|
||||
callbackOnce(err)
|
||||
})
|
||||
readStream.on('open', function () {
|
||||
const opts = {
|
||||
method: 'post',
|
||||
uri: url,
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
headers: {
|
||||
'X-File-Hash-From-Web': fileArgs.hash,
|
||||
}, // send the hash to the filestore as a custom header so it can be checked
|
||||
}
|
||||
const writeStream = request(opts)
|
||||
writeStream.on('error', function (err) {
|
||||
logger.warn(
|
||||
{ err, projectId, fileId, fsPath },
|
||||
'something went wrong on the write stream of uploadFileFromDisk'
|
||||
)
|
||||
callbackOnce(err)
|
||||
})
|
||||
writeStream.on('response', function (response) {
|
||||
if (![200, 201].includes(response.statusCode)) {
|
||||
const err = new OError(
|
||||
`non-ok response from filestore for upload: ${response.statusCode}`,
|
||||
{ statusCode: response.statusCode }
|
||||
)
|
||||
return callbackOnce(err)
|
||||
}
|
||||
callbackOnce(null, { url, fileRef })
|
||||
}) // have to pass back an object because async.retry only accepts a single result argument
|
||||
readStream.pipe(writeStream)
|
||||
})
|
||||
},
|
||||
|
||||
getFileStreamNew(project, file, query, callback) {
|
||||
const projectId = project._id
|
||||
const historyId = project.overleaf?.history?.id
|
||||
const fileId = file._id
|
||||
const hash = file.hash
|
||||
if (historyId && hash && Features.hasFeature('project-history-blobs')) {
|
||||
// new behaviour - request from history
|
||||
const range = _extractRange(query?.range)
|
||||
HistoryManager.requestBlobWithFallback(
|
||||
projectId,
|
||||
hash,
|
||||
fileId,
|
||||
'GET',
|
||||
range,
|
||||
function (err, result) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
const { stream } = result
|
||||
callback(null, stream)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
// original behaviour
|
||||
FileStoreHandler.getFileStream(projectId, fileId, query, callback)
|
||||
}
|
||||
},
|
||||
|
||||
getFileStream(projectId, fileId, query, callback) {
|
||||
if (!Features.hasFeature('filestore')) {
|
||||
return callback(
|
||||
new Errors.NotFoundError('filestore is disabled, file not found')
|
||||
)
|
||||
}
|
||||
|
||||
let queryString = '?from=getFileStream'
|
||||
if (query != null && query.format != null) {
|
||||
queryString += `&format=${query.format}`
|
||||
}
|
||||
const opts = {
|
||||
method: 'get',
|
||||
uri: `${this._buildUrl(projectId, fileId)}${queryString}`,
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
headers: {},
|
||||
}
|
||||
if (query != null && query.range != null) {
|
||||
const rangeText = query.range
|
||||
if (rangeText && rangeText.match != null && rangeText.match(/\d+-\d+/)) {
|
||||
opts.headers.range = `bytes=${query.range}`
|
||||
}
|
||||
}
|
||||
const readStream = request(opts)
|
||||
readStream.on('error', err =>
|
||||
logger.err(
|
||||
{ err, projectId, fileId, query, opts },
|
||||
'error in file stream'
|
||||
)
|
||||
)
|
||||
callback(null, readStream)
|
||||
},
|
||||
|
||||
getFileSize(projectId, fileId, callback) {
|
||||
const url = this._buildUrl(projectId, fileId)
|
||||
request.head(`${url}?from=getFileSize`, (err, res) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'failed to get file size from filestore', {
|
||||
projectId,
|
||||
fileId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
if (res.statusCode === 404) {
|
||||
return callback(new Errors.NotFoundError('file not found in filestore'))
|
||||
}
|
||||
if (res.statusCode !== 200) {
|
||||
logger.warn(
|
||||
{ projectId, fileId, statusCode: res.statusCode },
|
||||
'filestore returned non-200 response'
|
||||
)
|
||||
return callback(new Error('filestore returned non-200 response'))
|
||||
}
|
||||
const fileSize = res.headers['content-length']
|
||||
callback(null, fileSize)
|
||||
})
|
||||
},
|
||||
|
||||
deleteFile(projectId, fileId, callback) {
|
||||
logger.debug({ projectId, fileId }, 'telling file store to delete file')
|
||||
const opts = {
|
||||
method: 'delete',
|
||||
uri: this._buildUrl(projectId, fileId),
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
}
|
||||
request(opts, function (err, response) {
|
||||
if (err) {
|
||||
logger.warn(
|
||||
{ err, projectId, fileId },
|
||||
'something went wrong deleting file from filestore'
|
||||
)
|
||||
}
|
||||
callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
deleteProject(projectId, callback) {
|
||||
if (!Features.hasFeature('filestore')) {
|
||||
return callback() // if filestore is not in use, we don't need to delete anything
|
||||
}
|
||||
request(
|
||||
{
|
||||
method: 'delete',
|
||||
uri: this._buildUrl(projectId),
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
},
|
||||
err => {
|
||||
if (err) {
|
||||
return callback(
|
||||
OError.tag(
|
||||
err,
|
||||
'something went wrong deleting a project in filestore',
|
||||
{ projectId }
|
||||
)
|
||||
)
|
||||
}
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
copyFile(oldProjectId, oldFileId, newProjectId, newFileId, callback) {
|
||||
logger.debug(
|
||||
{ oldProjectId, oldFileId, newProjectId, newFileId },
|
||||
'telling filestore to copy a file'
|
||||
)
|
||||
const opts = {
|
||||
method: 'put',
|
||||
json: {
|
||||
source: {
|
||||
project_id: oldProjectId,
|
||||
file_id: oldFileId,
|
||||
},
|
||||
},
|
||||
uri: this._buildUrl(newProjectId, newFileId),
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
}
|
||||
request(opts, function (err, response) {
|
||||
if (err) {
|
||||
OError.tag(
|
||||
err,
|
||||
'something went wrong telling filestore api to copy file',
|
||||
{
|
||||
oldProjectId,
|
||||
oldFileId,
|
||||
newProjectId,
|
||||
newFileId,
|
||||
}
|
||||
)
|
||||
callback(err)
|
||||
} else if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||
// successful response
|
||||
callback(null, opts.uri)
|
||||
} else {
|
||||
err = new OError(
|
||||
`non-ok response from filestore for copyFile: ${response.statusCode}`,
|
||||
{
|
||||
uri: opts.uri,
|
||||
statusCode: response.statusCode,
|
||||
}
|
||||
)
|
||||
callback(err)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_buildUrl(projectId, fileId) {
|
||||
return (
|
||||
`${settings.apis.filestore.url}/project/${projectId}` +
|
||||
(fileId ? `/file/${fileId}` : '')
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
function _extractRange(range) {
|
||||
if (typeof range === 'string' && /\d+-\d+/.test(range)) {
|
||||
return `bytes=${range}`
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileStoreHandler
|
||||
module.exports.promises = promisifyAll(FileStoreHandler, {
|
||||
multiResult: {
|
||||
uploadFileFromDisk: ['url', 'fileRef', 'createdBlob'],
|
||||
uploadFileFromDiskWithHistoryId: ['url', 'fileRef', 'createdBlob'],
|
||||
},
|
||||
})
|
||||
@@ -0,0 +1,38 @@
|
||||
const { GlobalMetric } = require('../../models/GlobalMetric')
|
||||
/**
|
||||
* A Generic collection used to track metrics shared across the entirety of the application
|
||||
* examples:
|
||||
* - a metric to measure how many signups we have for an expensive labs experiment, so we can end stop signups
|
||||
* - a metric to measure how many users have been added to a test, so we can stop adding more once a cap is reached
|
||||
*
|
||||
*/
|
||||
|
||||
async function getMetric(key, defaultValue = 0) {
|
||||
const metric = await GlobalMetric.findById(key)
|
||||
if (!metric) {
|
||||
return defaultValue
|
||||
}
|
||||
return metric.value
|
||||
}
|
||||
|
||||
async function setMetric(key, value) {
|
||||
return await GlobalMetric.findOneAndUpdate(
|
||||
{ _id: key },
|
||||
{ $set: { value } },
|
||||
{ new: true, upsert: true }
|
||||
)
|
||||
}
|
||||
|
||||
async function incrementMetric(key, value = 1) {
|
||||
return await GlobalMetric.findOneAndUpdate(
|
||||
{ _id: key },
|
||||
{ $inc: { value } },
|
||||
{ new: true, upsert: true, setDefaultsOnInsert: true }
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMetric,
|
||||
setMetric,
|
||||
incrementMetric,
|
||||
}
|
||||
@@ -0,0 +1,125 @@
|
||||
import RedisWrapper from '../../infrastructure/RedisWrapper.js'
|
||||
import settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import {
|
||||
SmokeTestFailure,
|
||||
runSmokeTests,
|
||||
} from './../../../../test/smoke/src/SmokeTests.js'
|
||||
|
||||
const rclient = RedisWrapper.client('health_check')
|
||||
|
||||
export default {
|
||||
check(req, res, next) {
|
||||
if (!settings.siteIsOpen || !settings.editorIsOpen) {
|
||||
// always return successful health checks when site is closed
|
||||
res.sendStatus(200)
|
||||
} else {
|
||||
// detach from express for cleaner stack traces
|
||||
setTimeout(() => runSmokeTestsDetached(req, res).catch(next))
|
||||
}
|
||||
},
|
||||
|
||||
checkActiveHandles(req, res, next) {
|
||||
if (!(settings.maxActiveHandles > 0) || !process._getActiveHandles) {
|
||||
return next()
|
||||
}
|
||||
const activeHandlesCount = (process._getActiveHandles() || []).length
|
||||
if (activeHandlesCount > settings.maxActiveHandles) {
|
||||
logger.err(
|
||||
{ activeHandlesCount, maxActiveHandles: settings.maxActiveHandles },
|
||||
'exceeded max active handles, failing health check'
|
||||
)
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
logger.debug(
|
||||
{ activeHandlesCount, maxActiveHandles: settings.maxActiveHandles },
|
||||
'active handles are below maximum'
|
||||
)
|
||||
next()
|
||||
}
|
||||
},
|
||||
|
||||
checkApi(req, res, next) {
|
||||
rclient.healthCheck(err => {
|
||||
if (err) {
|
||||
logger.err({ err }, 'failed api redis health check')
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
if (!settings.smokeTest.userId) {
|
||||
logger.err({}, 'smokeTest.userId is undefined in health check')
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
UserGetter.getUserEmail(settings.smokeTest.userId, (err, email) => {
|
||||
if (err) {
|
||||
logger.err({ err }, 'failed api mongo health check')
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
if (email == null) {
|
||||
logger.err({ err }, 'failed api mongo health check (no email)')
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
res.sendStatus(200)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
checkRedis(req, res, next) {
|
||||
return rclient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
logger.err({ err: error }, 'failed redis health check')
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
checkMongo(req, res, next) {
|
||||
return UserGetter.getUserEmail(
|
||||
settings.smokeTest.userId,
|
||||
function (err, email) {
|
||||
if (err != null) {
|
||||
logger.err({ err }, 'mongo health check failed, error present')
|
||||
return res.sendStatus(500)
|
||||
} else if (email == null) {
|
||||
logger.err(
|
||||
{ err },
|
||||
'mongo health check failed, no emai present in find result'
|
||||
)
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
async function runSmokeTestsDetached(req, res) {
|
||||
function isAborted() {
|
||||
return req.destroyed
|
||||
}
|
||||
const stats = { start: new Date(), steps: [] }
|
||||
let status, response
|
||||
try {
|
||||
try {
|
||||
await runSmokeTests({ isAborted, stats })
|
||||
} finally {
|
||||
stats.end = new Date()
|
||||
stats.duration = stats.end - stats.start
|
||||
}
|
||||
status = 200
|
||||
response = { stats }
|
||||
} catch (e) {
|
||||
let err = e
|
||||
if (!(e instanceof SmokeTestFailure)) {
|
||||
err = new SmokeTestFailure('low level error', {}, e)
|
||||
}
|
||||
logger.err({ err, stats }, 'health check failed')
|
||||
status = 500
|
||||
response = { stats, error: err.message }
|
||||
}
|
||||
if (isAborted()) return
|
||||
res.status(status).json(response)
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = {
|
||||
hasAdminAccess,
|
||||
canRedirectToAdminDomain,
|
||||
}
|
||||
|
||||
function hasAdminAccess(user) {
|
||||
if (!Settings.adminPrivilegeAvailable) return false
|
||||
if (!user) return false
|
||||
return Boolean(user.isAdmin)
|
||||
}
|
||||
|
||||
function canRedirectToAdminDomain(user) {
|
||||
if (Settings.adminPrivilegeAvailable) return false
|
||||
if (!Settings.adminUrl) return false
|
||||
if (!user) return false
|
||||
return Boolean(user.isAdmin)
|
||||
}
|
||||
17
services/web/app/src/Features/Helpers/AsyncFormHelper.js
Normal file
17
services/web/app/src/Features/Helpers/AsyncFormHelper.js
Normal file
@@ -0,0 +1,17 @@
|
||||
const {
|
||||
acceptsJson,
|
||||
} = require('../../infrastructure/RequestContentTypeDetection')
|
||||
|
||||
module.exports = {
|
||||
redirect,
|
||||
}
|
||||
|
||||
// redirect the request via headers or JSON response depending on the request
|
||||
// format
|
||||
function redirect(req, res, redir) {
|
||||
if (acceptsJson(req)) {
|
||||
res.json({ redir })
|
||||
} else {
|
||||
res.redirect(redir)
|
||||
}
|
||||
}
|
||||
44
services/web/app/src/Features/Helpers/AuthorizationHelper.js
Normal file
44
services/web/app/src/Features/Helpers/AuthorizationHelper.js
Normal file
@@ -0,0 +1,44 @@
|
||||
const { UserSchema } = require('../../models/User')
|
||||
const SplitTestHandler = require('../SplitTests/SplitTestHandler')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const { callbackify } = require('@overleaf/promise-utils')
|
||||
|
||||
module.exports = {
|
||||
hasAnyStaffAccess,
|
||||
isReviewerRoleEnabled: callbackify(isReviewerRoleEnabled),
|
||||
promises: {
|
||||
isReviewerRoleEnabled,
|
||||
},
|
||||
}
|
||||
|
||||
function hasAnyStaffAccess(user) {
|
||||
if (!user.staffAccess) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (const key of Object.keys(UserSchema.obj.staffAccess)) {
|
||||
if (user.staffAccess[key]) return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async function isReviewerRoleEnabled(projectId) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
reviewer_refs: 1,
|
||||
owner_ref: 1,
|
||||
})
|
||||
|
||||
// if there are reviewers, it means the role is enabled
|
||||
if (Object.keys(project.reviewer_refs || {}).length > 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
// if there are no reviewers, check split test from project owner
|
||||
const reviewerRoleAssigment =
|
||||
await SplitTestHandler.promises.getAssignmentForUser(
|
||||
project.owner_ref,
|
||||
'reviewer-role'
|
||||
)
|
||||
|
||||
return reviewerRoleAssigment.variant === 'enabled'
|
||||
}
|
||||
55
services/web/app/src/Features/Helpers/DiffHelper.js
Normal file
55
services/web/app/src/Features/Helpers/DiffHelper.js
Normal file
@@ -0,0 +1,55 @@
|
||||
const MAX_LENGTH = 254
|
||||
|
||||
function _calculateRatio(matches, length) {
|
||||
if (length) {
|
||||
const ratio = (2.0 * matches) / length
|
||||
const rounded = Math.floor(ratio * 100) / 100
|
||||
return rounded
|
||||
}
|
||||
return 1.0
|
||||
}
|
||||
|
||||
/**
|
||||
* Ported from python's `difflib`:
|
||||
* https://github.com/python/cpython/blob/0415cf895f96ae3f896f1f25f0c030a820845e13/Lib/difflib.py#L622-L649
|
||||
*
|
||||
* Accepts two strings, `a` and `b`, and returns a float ratio
|
||||
* corresponding (approximatey) to the overlap between the strings.
|
||||
* Identical strings produce 1.0, completely different strings produce 0.0
|
||||
* */
|
||||
function stringSimilarity(a, b) {
|
||||
if (
|
||||
typeof a !== 'string' ||
|
||||
typeof b !== 'string' ||
|
||||
a.length > MAX_LENGTH ||
|
||||
b.length > MAX_LENGTH
|
||||
) {
|
||||
throw new Error('Invalid input to quickMatchRatio')
|
||||
}
|
||||
// Count how many times each character occurs in `b`
|
||||
const fullBCount = {}
|
||||
b.split('').forEach(e => {
|
||||
fullBCount[e] = (fullBCount[e] || 0) + 1
|
||||
})
|
||||
// avail[x] is the number of times x appears in 'b' less the
|
||||
// number of times we've seen it in 'a' so far ... kinda
|
||||
const avail = {}
|
||||
let matches = 0
|
||||
a.split('').forEach(e => {
|
||||
let n = null
|
||||
if (Object.hasOwn(avail, e)) {
|
||||
n = avail[e]
|
||||
} else {
|
||||
n = fullBCount[e] || 0
|
||||
}
|
||||
avail[e] = n - 1
|
||||
if (n > 0) {
|
||||
matches = matches + 1
|
||||
}
|
||||
})
|
||||
return _calculateRatio(matches, a.length + b.length)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
stringSimilarity,
|
||||
}
|
||||
42
services/web/app/src/Features/Helpers/EmailHelper.js
Normal file
42
services/web/app/src/Features/Helpers/EmailHelper.js
Normal file
@@ -0,0 +1,42 @@
|
||||
const { parseOneAddress } = require('email-addresses')
|
||||
|
||||
// available for frontend in https://github.com/overleaf/internal/blob/19d432c70b173752ee7c6d8978dd6be16b042921/services/web/frontend/js/shared/utils/email.tsx#L4
|
||||
const EMAIL_REGEXP =
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
/^([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
||||
|
||||
function getDomain(email) {
|
||||
email = parseEmail(email)
|
||||
return email ? email.split('@').pop() : null
|
||||
}
|
||||
|
||||
function parseEmail(email, parseRfcAddress = false) {
|
||||
if (typeof email !== 'string' || !email) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (parseRfcAddress) {
|
||||
const result = parseOneAddress(email)
|
||||
if (!result?.address) {
|
||||
return null
|
||||
}
|
||||
email = result.address
|
||||
}
|
||||
|
||||
if (email.length > 254) {
|
||||
return null
|
||||
}
|
||||
email = email.trim().toLowerCase()
|
||||
|
||||
const matched = email.match(EMAIL_REGEXP)
|
||||
if (matched == null || matched[0] == null) {
|
||||
return null
|
||||
}
|
||||
|
||||
return matched[0]
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getDomain,
|
||||
parseEmail,
|
||||
}
|
||||
54
services/web/app/src/Features/Helpers/Mongo.js
Normal file
54
services/web/app/src/Features/Helpers/Mongo.js
Normal file
@@ -0,0 +1,54 @@
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const { ObjectId: MongooseObjectId } = require('mongoose').mongo
|
||||
|
||||
function _getObjectIdInstance(id) {
|
||||
if (typeof id === 'string') {
|
||||
return new ObjectId(id)
|
||||
} else if (id instanceof ObjectId) {
|
||||
return id
|
||||
} else if (id instanceof MongooseObjectId) {
|
||||
return new ObjectId(id.toString())
|
||||
} else {
|
||||
throw new OError('unexpected object id', { id })
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeQuery(query) {
|
||||
if (!query) {
|
||||
throw new Error('no query provided')
|
||||
}
|
||||
if (
|
||||
typeof query === 'string' ||
|
||||
query instanceof ObjectId ||
|
||||
query instanceof MongooseObjectId
|
||||
) {
|
||||
return { _id: _getObjectIdInstance(query) }
|
||||
} else if (typeof query._id === 'string') {
|
||||
query._id = new ObjectId(query._id)
|
||||
return query
|
||||
} else {
|
||||
return query
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeMultiQuery(query) {
|
||||
if (query instanceof Set) {
|
||||
query = Array.from(query)
|
||||
}
|
||||
if (Array.isArray(query)) {
|
||||
return { _id: { $in: query.map(id => _getObjectIdInstance(id)) } }
|
||||
} else {
|
||||
return normalizeQuery(query)
|
||||
}
|
||||
}
|
||||
|
||||
function isObjectIdInstance(id) {
|
||||
return id instanceof ObjectId || id instanceof MongooseObjectId
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isObjectIdInstance,
|
||||
normalizeQuery,
|
||||
normalizeMultiQuery,
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
const pug = require('pug-runtime')
|
||||
|
||||
const SPLIT_REGEX = /<(\d+)>(.*?)<\/\1>/g
|
||||
|
||||
function render(locale, components) {
|
||||
const output = []
|
||||
function addPlainText(text) {
|
||||
if (!text) return
|
||||
output.push(pug.escape(text))
|
||||
}
|
||||
|
||||
// 'PRE<0>INNER</0>POST' -> ['PRE', '0', 'INNER', 'POST']
|
||||
// '<0>INNER</0>' -> ['', '0', 'INNER', '']
|
||||
// '<0></0>' -> ['', '0', '', '']
|
||||
// '<0>INNER</0><0>INNER2</0>' -> ['', '0', 'INNER', '', '0', 'INNER2', '']
|
||||
// '<0><1>INNER</1></0>' -> ['', '0', '<1>INNER</1>', '']
|
||||
// 'PLAIN TEXT' -> ['PLAIN TEXT']
|
||||
// NOTE: a test suite is verifying these cases: SafeHTMLSubstituteTests
|
||||
const chunks = locale.split(SPLIT_REGEX)
|
||||
|
||||
// extract the 'PRE' chunk
|
||||
addPlainText(chunks.shift())
|
||||
|
||||
while (chunks.length) {
|
||||
// each batch consists of three chunks: ['0', 'INNER', 'POST']
|
||||
const [idx, innerChunk, intermediateChunk] = chunks.splice(0, 3)
|
||||
|
||||
const component = components[idx]
|
||||
const componentName =
|
||||
typeof component === 'string' ? component : component.name
|
||||
// pug is doing any necessary escaping on attribute values
|
||||
const attributes = (component.attrs && pug.attrs(component.attrs)) || ''
|
||||
output.push(
|
||||
`<${componentName + attributes}>`,
|
||||
...render(innerChunk, components),
|
||||
`</${componentName}>`
|
||||
)
|
||||
addPlainText(intermediateChunk)
|
||||
}
|
||||
return output.join('')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
SPLIT_REGEX,
|
||||
render,
|
||||
}
|
||||
30
services/web/app/src/Features/Helpers/StringHelper.js
Normal file
30
services/web/app/src/Features/Helpers/StringHelper.js
Normal file
@@ -0,0 +1,30 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
let StringHelper
|
||||
const JSON_ESCAPE_REGEXP = /[\u2028\u2029&><]/g
|
||||
|
||||
const JSON_ESCAPE = {
|
||||
'&': '\\u0026',
|
||||
'>': '\\u003e',
|
||||
'<': '\\u003c',
|
||||
'\u2028': '\\u2028',
|
||||
'\u2029': '\\u2029',
|
||||
}
|
||||
|
||||
module.exports = StringHelper = {
|
||||
// stringifies and escapes a json object for use in a script. This ensures that &, < and > characters are escaped,
|
||||
// along with quotes. This ensures that the string can be safely rendered into HTML. See rationale at:
|
||||
// https://api.rubyonrails.org/classes/ERB/Util.html#method-c-json_escape
|
||||
// and implementation lifted from:
|
||||
// https://github.com/ember-fastboot/fastboot/blob/cafd96c48564d8384eb83dc908303dba8ece10fd/src/ember-app.js#L496-L510
|
||||
stringifyJsonForScript(object) {
|
||||
return JSON.stringify(object).replace(
|
||||
JSON_ESCAPE_REGEXP,
|
||||
match => JSON_ESCAPE[match]
|
||||
)
|
||||
},
|
||||
}
|
||||
49
services/web/app/src/Features/Helpers/UrlHelper.js
Normal file
49
services/web/app/src/Features/Helpers/UrlHelper.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { URL } = require('url')
|
||||
|
||||
const PROTO = new URL(Settings.siteUrl).protocol
|
||||
|
||||
function getCanonicalURL(req, url) {
|
||||
const origin = `${PROTO}//${req.headers.host}`
|
||||
url = new URL(url || req.originalUrl, origin)
|
||||
if (url.pathname.endsWith('/')) {
|
||||
url.pathname = url.pathname.replace(/\/+$/, '')
|
||||
}
|
||||
url.search = ''
|
||||
url.hash = ''
|
||||
return url.href
|
||||
}
|
||||
|
||||
function getSafeRedirectPath(value) {
|
||||
const baseURL = Settings.siteUrl // base URL is required to construct URL from path
|
||||
const url = new URL(value, baseURL)
|
||||
let safePath = `${url.pathname}${url.search}${url.hash}`.replace(/^\/+/, '/')
|
||||
if (safePath === '/') {
|
||||
safePath = undefined
|
||||
}
|
||||
return safePath
|
||||
}
|
||||
|
||||
function getSafeAdminDomainRedirect(path) {
|
||||
return Settings.adminUrl + (getSafeRedirectPath(path) || '/')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getCanonicalURL,
|
||||
getSafeRedirectPath,
|
||||
getSafeAdminDomainRedirect,
|
||||
wrapUrlWithProxy(url) {
|
||||
// TODO: Consider what to do for Community and Enterprise edition?
|
||||
if (!Settings.apis.linkedUrlProxy.url) {
|
||||
throw new Error('no linked url proxy configured')
|
||||
}
|
||||
return `${Settings.apis.linkedUrlProxy.url}?url=${encodeURIComponent(url)}`
|
||||
},
|
||||
|
||||
prependHttpIfNeeded(url) {
|
||||
if (!url.match('://')) {
|
||||
url = `http://${url}`
|
||||
}
|
||||
return url
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
const { fetchNothing } = require('@overleaf/fetch-utils')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
async function deleteProject(projectId) {
|
||||
if (!Settings.apis.historyBackupDeletion.enabled) return
|
||||
|
||||
const url = new URL(Settings.apis.historyBackupDeletion.url)
|
||||
url.pathname += `project/${projectId}/backup`
|
||||
await fetchNothing(url, {
|
||||
method: 'DELETE',
|
||||
basicAuth: {
|
||||
user: Settings.apis.historyBackupDeletion.user,
|
||||
password: Settings.apis.historyBackupDeletion.pass,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
deleteProject,
|
||||
}
|
||||
502
services/web/app/src/Features/History/HistoryController.js
Normal file
502
services/web/app/src/Features/History/HistoryController.js
Normal file
@@ -0,0 +1,502 @@
|
||||
// @ts-check
|
||||
|
||||
const { setTimeout } = require('timers/promises')
|
||||
const { pipeline } = require('stream/promises')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const {
|
||||
fetchStream,
|
||||
fetchStreamWithResponse,
|
||||
fetchJson,
|
||||
fetchNothing,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const settings = require('@overleaf/settings')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const HistoryManager = require('./HistoryManager')
|
||||
const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler')
|
||||
const ProjectEntityUpdateHandler = require('../Project/ProjectEntityUpdateHandler')
|
||||
const RestoreManager = require('./RestoreManager')
|
||||
const { prepareZipAttachment } = require('../../infrastructure/Response')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
|
||||
// Number of seconds after which the browser should send a request to revalidate
|
||||
// blobs
|
||||
const REVALIDATE_BLOB_AFTER_SECONDS = 86400 // 1 day
|
||||
|
||||
// Number of seconds during which the browser can serve a stale response while
|
||||
// revalidating
|
||||
const STALE_WHILE_REVALIDATE_SECONDS = 365 * 86400 // 1 year
|
||||
|
||||
const MAX_HISTORY_ZIP_ATTEMPTS = 40
|
||||
|
||||
async function getBlob(req, res) {
|
||||
await requestBlob('GET', req, res)
|
||||
}
|
||||
|
||||
async function headBlob(req, res) {
|
||||
await requestBlob('HEAD', req, res)
|
||||
}
|
||||
|
||||
async function requestBlob(method, req, res) {
|
||||
const { project_id: projectId, hash } = req.params
|
||||
|
||||
// Handle conditional GET request
|
||||
if (req.get('If-None-Match') === hash) {
|
||||
setBlobCacheHeaders(res, hash)
|
||||
return res.status(304).end()
|
||||
}
|
||||
|
||||
const range = req.get('Range')
|
||||
let stream, source, contentLength
|
||||
try {
|
||||
;({ stream, source, contentLength } =
|
||||
await HistoryManager.promises.requestBlobWithFallback(
|
||||
projectId,
|
||||
hash,
|
||||
req.query.fallback,
|
||||
method,
|
||||
range
|
||||
))
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) return res.status(404).end()
|
||||
throw err
|
||||
}
|
||||
res.appendHeader('X-Served-By', source)
|
||||
|
||||
if (contentLength) res.setHeader('Content-Length', contentLength) // set on HEAD
|
||||
res.setHeader('Content-Type', 'application/octet-stream')
|
||||
setBlobCacheHeaders(res, hash)
|
||||
|
||||
try {
|
||||
await pipeline(stream, res)
|
||||
} catch (err) {
|
||||
// If the downstream request is cancelled, we get an
|
||||
// ERR_STREAM_PREMATURE_CLOSE, ignore these "errors".
|
||||
if (!isPrematureClose(err)) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setBlobCacheHeaders(res, etag) {
|
||||
// Blobs are immutable, so they can in principle be cached indefinitely. Here,
|
||||
// we ask the browser to cache them for some time, but then check back
|
||||
// regularly in case they changed (even though they shouldn't). This is a
|
||||
// precaution in case a bug makes us send bad data through that endpoint.
|
||||
res.set(
|
||||
'Cache-Control',
|
||||
`private, max-age=${REVALIDATE_BLOB_AFTER_SECONDS}, stale-while-revalidate=${STALE_WHILE_REVALIDATE_SECONDS}`
|
||||
)
|
||||
res.set('ETag', etag)
|
||||
}
|
||||
|
||||
async function proxyToHistoryApi(req, res, next) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
const url = settings.apis.project_history.url + req.url
|
||||
|
||||
const { stream, response } = await fetchStreamWithResponse(url, {
|
||||
method: req.method,
|
||||
headers: { 'X-User-Id': userId },
|
||||
})
|
||||
|
||||
const contentType = response.headers.get('Content-Type')
|
||||
const contentLength = response.headers.get('Content-Length')
|
||||
if (contentType != null) {
|
||||
res.set('Content-Type', contentType)
|
||||
}
|
||||
if (contentLength != null) {
|
||||
res.set('Content-Length', contentLength)
|
||||
}
|
||||
|
||||
try {
|
||||
await pipeline(stream, res)
|
||||
} catch (err) {
|
||||
// If the downstream request is cancelled, we get an
|
||||
// ERR_STREAM_PREMATURE_CLOSE.
|
||||
if (!isPrematureClose(err)) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function proxyToHistoryApiAndInjectUserDetails(req, res, next) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
const url = settings.apis.project_history.url + req.url
|
||||
const body = await fetchJson(url, {
|
||||
method: req.method,
|
||||
headers: { 'X-User-Id': userId },
|
||||
})
|
||||
const data = await HistoryManager.promises.injectUserDetails(body)
|
||||
res.json(data)
|
||||
}
|
||||
|
||||
async function resyncProjectHistory(req, res, next) {
|
||||
// increase timeout to 6 minutes
|
||||
res.setTimeout(6 * 60 * 1000)
|
||||
const projectId = req.params.Project_id
|
||||
const opts = {}
|
||||
const historyRangesMigration = req.body.historyRangesMigration
|
||||
if (historyRangesMigration) {
|
||||
opts.historyRangesMigration = historyRangesMigration
|
||||
}
|
||||
if (req.body.resyncProjectStructureOnly) {
|
||||
opts.resyncProjectStructureOnly = req.body.resyncProjectStructureOnly
|
||||
}
|
||||
|
||||
try {
|
||||
await ProjectEntityUpdateHandler.promises.resyncProjectHistory(
|
||||
projectId,
|
||||
opts
|
||||
)
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.ProjectHistoryDisabledError) {
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function restoreFileFromV2(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
const { version, pathname } = req.body
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
const entity = await RestoreManager.promises.restoreFileFromV2(
|
||||
userId,
|
||||
projectId,
|
||||
version,
|
||||
pathname
|
||||
)
|
||||
|
||||
res.json({
|
||||
type: entity.type,
|
||||
id: entity._id,
|
||||
})
|
||||
}
|
||||
|
||||
async function revertFile(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
const { version, pathname } = req.body
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
const entity = await RestoreManager.promises.revertFile(
|
||||
userId,
|
||||
projectId,
|
||||
version,
|
||||
pathname,
|
||||
{}
|
||||
)
|
||||
|
||||
res.json({
|
||||
type: entity.type,
|
||||
id: entity._id,
|
||||
})
|
||||
}
|
||||
|
||||
async function revertProject(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
const { version } = req.body
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
await RestoreManager.promises.revertProject(userId, projectId, version)
|
||||
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
async function getLabels(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
|
||||
let labels = await fetchJson(
|
||||
`${settings.apis.project_history.url}/project/${projectId}/labels`
|
||||
)
|
||||
labels = await _enrichLabels(labels)
|
||||
|
||||
res.json(labels)
|
||||
}
|
||||
|
||||
async function createLabel(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { comment, version } = req.body
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
let label = await fetchJson(
|
||||
`${settings.apis.project_history.url}/project/${projectId}/labels`,
|
||||
{
|
||||
method: 'POST',
|
||||
json: { comment, version, user_id: userId },
|
||||
}
|
||||
)
|
||||
label = await _enrichLabel(label)
|
||||
|
||||
res.json(label)
|
||||
}
|
||||
|
||||
async function _enrichLabel(label) {
|
||||
const newLabel = Object.assign({}, label)
|
||||
if (!label.user_id) {
|
||||
newLabel.user_display_name = _displayNameForUser(null)
|
||||
return newLabel
|
||||
}
|
||||
|
||||
const user = await UserGetter.promises.getUser(label.user_id, {
|
||||
first_name: 1,
|
||||
last_name: 1,
|
||||
email: 1,
|
||||
})
|
||||
newLabel.user_display_name = _displayNameForUser(user)
|
||||
return newLabel
|
||||
}
|
||||
|
||||
async function _enrichLabels(labels) {
|
||||
if (!labels || !labels.length) {
|
||||
return []
|
||||
}
|
||||
const uniqueUsers = new Set(labels.map(label => label.user_id))
|
||||
|
||||
// For backwards compatibility, and for anonymously created labels in SP
|
||||
// expect missing user_id fields
|
||||
uniqueUsers.delete(undefined)
|
||||
|
||||
if (!uniqueUsers.size) {
|
||||
return labels
|
||||
}
|
||||
|
||||
const rawUsers = await UserGetter.promises.getUsers(Array.from(uniqueUsers), {
|
||||
first_name: 1,
|
||||
last_name: 1,
|
||||
email: 1,
|
||||
})
|
||||
const users = new Map(rawUsers.map(user => [String(user._id), user]))
|
||||
|
||||
labels.forEach(label => {
|
||||
const user = users.get(label.user_id)
|
||||
label.user_display_name = _displayNameForUser(user)
|
||||
})
|
||||
return labels
|
||||
}
|
||||
|
||||
function _displayNameForUser(user) {
|
||||
if (user == null) {
|
||||
return 'Anonymous'
|
||||
}
|
||||
if (user.name) {
|
||||
return user.name
|
||||
}
|
||||
let name = [user.first_name, user.last_name]
|
||||
.filter(n => n != null)
|
||||
.join(' ')
|
||||
.trim()
|
||||
if (name === '') {
|
||||
name = user.email.split('@')[0]
|
||||
}
|
||||
if (!name) {
|
||||
return '?'
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
async function deleteLabel(req, res, next) {
|
||||
const { Project_id: projectId, label_id: labelId } = req.params
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: true,
|
||||
})
|
||||
|
||||
// If the current user is the project owner, we can use the non-user-specific
|
||||
// delete label endpoint. Otherwise, we have to use the user-specific version
|
||||
// (which only deletes the label if it is owned by the user)
|
||||
const deleteEndpointUrl = project.owner_ref.equals(userId)
|
||||
? `${settings.apis.project_history.url}/project/${projectId}/labels/${labelId}`
|
||||
: `${settings.apis.project_history.url}/project/${projectId}/user/${userId}/labels/${labelId}`
|
||||
|
||||
await fetchNothing(deleteEndpointUrl, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function downloadZipOfVersion(req, res, next) {
|
||||
const { project_id: projectId, version } = req.params
|
||||
|
||||
const project = await ProjectDetailsHandler.promises.getDetails(projectId)
|
||||
const v1Id =
|
||||
project.overleaf && project.overleaf.history && project.overleaf.history.id
|
||||
|
||||
if (v1Id == null) {
|
||||
logger.error(
|
||||
{ projectId, version },
|
||||
'got request for zip version of non-v1 history project'
|
||||
)
|
||||
return res.sendStatus(402)
|
||||
}
|
||||
|
||||
await _pipeHistoryZipToResponse(
|
||||
v1Id,
|
||||
version,
|
||||
`${project.name} (Version ${version})`,
|
||||
req,
|
||||
res
|
||||
)
|
||||
}
|
||||
|
||||
async function _pipeHistoryZipToResponse(v1ProjectId, version, name, req, res) {
|
||||
if (req.destroyed) {
|
||||
// client has disconnected -- skip project history api call and download
|
||||
return
|
||||
}
|
||||
// increase timeout to 6 minutes
|
||||
res.setTimeout(6 * 60 * 1000)
|
||||
const url = `${settings.apis.v1_history.url}/projects/${v1ProjectId}/version/${version}/zip`
|
||||
const basicAuth = {
|
||||
user: settings.apis.v1_history.user,
|
||||
password: settings.apis.v1_history.pass,
|
||||
}
|
||||
|
||||
if (!Features.hasFeature('saas')) {
|
||||
let stream
|
||||
try {
|
||||
stream = await fetchStream(url, { basicAuth })
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
prepareZipAttachment(res, `${name}.zip`)
|
||||
|
||||
try {
|
||||
await pipeline(stream, res)
|
||||
} catch (err) {
|
||||
// If the downstream request is cancelled, we get an
|
||||
// ERR_STREAM_PREMATURE_CLOSE.
|
||||
if (!isPrematureClose(err)) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchJson(url, { method: 'POST', basicAuth })
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
throw new Errors.NotFoundError('zip not found')
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
if (req.destroyed) {
|
||||
// client has disconnected -- skip delayed s3 download
|
||||
return
|
||||
}
|
||||
|
||||
if (!body.zipUrl) {
|
||||
throw new OError('Missing zipUrl, cannot fetch zip file', {
|
||||
v1ProjectId,
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
||||
// retry for about 6 minutes starting with short delay
|
||||
let retryDelay = 2000
|
||||
let attempt = 0
|
||||
while (true) {
|
||||
attempt += 1
|
||||
await setTimeout(retryDelay)
|
||||
|
||||
if (req.destroyed) {
|
||||
// client has disconnected -- skip s3 download
|
||||
return
|
||||
}
|
||||
|
||||
// increase delay by 1 second up to 10
|
||||
if (retryDelay < 10000) {
|
||||
retryDelay += 1000
|
||||
}
|
||||
|
||||
try {
|
||||
const stream = await fetchStream(body.zipUrl)
|
||||
prepareZipAttachment(res, `${name}.zip`)
|
||||
await pipeline(stream, res)
|
||||
} catch (err) {
|
||||
if (attempt > MAX_HISTORY_ZIP_ATTEMPTS) {
|
||||
throw err
|
||||
}
|
||||
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
// File not ready yet. Retry.
|
||||
continue
|
||||
} else if (isPrematureClose(err)) {
|
||||
// Downstream request cancelled. Retry.
|
||||
continue
|
||||
} else {
|
||||
// Unknown error. Log and retry.
|
||||
logger.warn(
|
||||
{ err, v1ProjectId, version, retryAttempt: attempt },
|
||||
'history s3 proxying error'
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// We made it through. No need to retry anymore. Exit loop
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
async function getLatestHistory(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
const history = await HistoryManager.promises.getLatestHistory(projectId)
|
||||
res.json(history)
|
||||
}
|
||||
|
||||
async function getChanges(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
const since = req.query.since
|
||||
const changes = await HistoryManager.promises.getChanges(projectId, { since })
|
||||
res.json(changes)
|
||||
}
|
||||
|
||||
function isPrematureClose(err) {
|
||||
return (
|
||||
err instanceof Error &&
|
||||
'code' in err &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE'
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getBlob: expressify(getBlob),
|
||||
headBlob: expressify(headBlob),
|
||||
proxyToHistoryApi: expressify(proxyToHistoryApi),
|
||||
proxyToHistoryApiAndInjectUserDetails: expressify(
|
||||
proxyToHistoryApiAndInjectUserDetails
|
||||
),
|
||||
resyncProjectHistory: expressify(resyncProjectHistory),
|
||||
restoreFileFromV2: expressify(restoreFileFromV2),
|
||||
revertFile: expressify(revertFile),
|
||||
revertProject: expressify(revertProject),
|
||||
getLabels: expressify(getLabels),
|
||||
createLabel: expressify(createLabel),
|
||||
deleteLabel: expressify(deleteLabel),
|
||||
downloadZipOfVersion: expressify(downloadZipOfVersion),
|
||||
getLatestHistory: expressify(getLatestHistory),
|
||||
getChanges: expressify(getChanges),
|
||||
_displayNameForUser,
|
||||
promises: {
|
||||
_pipeHistoryZipToResponse,
|
||||
},
|
||||
}
|
||||
450
services/web/app/src/Features/History/HistoryManager.js
Normal file
450
services/web/app/src/Features/History/HistoryManager.js
Normal file
@@ -0,0 +1,450 @@
|
||||
const { callbackify } = require('util')
|
||||
const {
|
||||
fetchJson,
|
||||
fetchNothing,
|
||||
fetchStreamWithResponse,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const fs = require('fs')
|
||||
const settings = require('@overleaf/settings')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const HistoryBackupDeletionHandler = require('./HistoryBackupDeletionHandler')
|
||||
const { db, ObjectId } = require('../../infrastructure/mongodb')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { NotFoundError } = require('../Errors/Errors')
|
||||
const projectKey = require('./project_key')
|
||||
|
||||
// BEGIN copy from services/history-v1/storage/lib/blob_store/index.js
|
||||
|
||||
const GLOBAL_BLOBS = new Set() // CHANGE FROM SOURCE: only store hashes.
|
||||
|
||||
const HISTORY_V1_URL = settings.apis.v1_history.url
|
||||
const HISTORY_V1_BASIC_AUTH = {
|
||||
user: settings.apis.v1_history.user,
|
||||
password: settings.apis.v1_history.pass,
|
||||
}
|
||||
|
||||
function makeGlobalKey(hash) {
|
||||
return `${hash.slice(0, 2)}/${hash.slice(2, 4)}/${hash.slice(4)}`
|
||||
}
|
||||
|
||||
function makeProjectKey(projectId, hash) {
|
||||
return `${projectKey.format(projectId)}/${hash.slice(0, 2)}/${hash.slice(2)}`
|
||||
}
|
||||
|
||||
function getBlobLocation(projectId, hash) {
|
||||
if (GLOBAL_BLOBS.has(hash)) {
|
||||
return {
|
||||
bucket: settings.apis.v1_history.buckets.globalBlobs,
|
||||
key: makeGlobalKey(hash),
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
bucket: settings.apis.v1_history.buckets.projectBlobs,
|
||||
key: makeProjectKey(projectId, hash),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function loadGlobalBlobs() {
|
||||
const blobs = db.projectHistoryGlobalBlobs.find()
|
||||
for await (const blob of blobs) {
|
||||
GLOBAL_BLOBS.add(blob._id) // CHANGE FROM SOURCE: only store hashes.
|
||||
}
|
||||
}
|
||||
|
||||
// END copy from services/history-v1/storage/lib/blob_store/index.js
|
||||
|
||||
async function initializeProject(projectId) {
|
||||
const body = await fetchJson(`${settings.apis.project_history.url}/project`, {
|
||||
method: 'POST',
|
||||
json: { historyId: projectId.toString() },
|
||||
})
|
||||
const historyId = body && body.project && body.project.id
|
||||
if (!historyId) {
|
||||
throw new OError('project-history did not provide an id', { body })
|
||||
}
|
||||
return historyId
|
||||
}
|
||||
|
||||
async function flushProject(projectId) {
|
||||
try {
|
||||
await fetchNothing(
|
||||
`${settings.apis.project_history.url}/project/${projectId}/flush`,
|
||||
{ method: 'POST' }
|
||||
)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed to flush project to project history', {
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteProjectHistory(projectId) {
|
||||
try {
|
||||
await fetchNothing(
|
||||
`${settings.apis.project_history.url}/project/${projectId}`,
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed to delete project history', {
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function resyncProject(projectId, options = {}) {
|
||||
const body = {}
|
||||
if (options.force) {
|
||||
body.force = options.force
|
||||
}
|
||||
if (options.origin) {
|
||||
body.origin = options.origin
|
||||
}
|
||||
if (options.historyRangesMigration) {
|
||||
body.historyRangesMigration = options.historyRangesMigration
|
||||
}
|
||||
try {
|
||||
await fetchNothing(
|
||||
`${settings.apis.project_history.url}/project/${projectId}/resync`,
|
||||
{
|
||||
method: 'POST',
|
||||
json: body,
|
||||
signal: AbortSignal.timeout(6 * 60 * 1000),
|
||||
}
|
||||
)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed to resync project history', {
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteProject(projectId, historyId) {
|
||||
const tasks = []
|
||||
tasks.push(_deleteProjectInProjectHistory(projectId))
|
||||
if (historyId != null) {
|
||||
tasks.push(_deleteProjectInFullProjectHistory(historyId))
|
||||
}
|
||||
await Promise.all(tasks)
|
||||
await HistoryBackupDeletionHandler.deleteProject(projectId)
|
||||
}
|
||||
|
||||
async function _deleteProjectInProjectHistory(projectId) {
|
||||
try {
|
||||
await fetchNothing(
|
||||
`${settings.apis.project_history.url}/project/${projectId}`,
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
} catch (err) {
|
||||
throw OError.tag(
|
||||
err,
|
||||
'failed to clear project history in project-history',
|
||||
{ projectId }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function _deleteProjectInFullProjectHistory(historyId) {
|
||||
try {
|
||||
await fetchNothing(`${HISTORY_V1_URL}/projects/${historyId}`, {
|
||||
method: 'DELETE',
|
||||
basicAuth: HISTORY_V1_BASIC_AUTH,
|
||||
})
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed to clear project history', { historyId })
|
||||
}
|
||||
}
|
||||
|
||||
async function uploadBlobFromDisk(historyId, hash, byteLength, fsPath) {
|
||||
const outStream = fs.createReadStream(fsPath)
|
||||
|
||||
const url = `${HISTORY_V1_URL}/projects/${historyId}/blobs/${hash}`
|
||||
await fetchNothing(url, {
|
||||
method: 'PUT',
|
||||
body: outStream,
|
||||
headers: { 'Content-Length': byteLength }, // add the content length to work around problems with chunked encoding in node 18
|
||||
signal: AbortSignal.timeout(60 * 1000),
|
||||
basicAuth: HISTORY_V1_BASIC_AUTH,
|
||||
})
|
||||
}
|
||||
|
||||
async function copyBlob(sourceHistoryId, targetHistoryId, hash) {
|
||||
const url = `${HISTORY_V1_URL}/projects/${targetHistoryId}/blobs/${hash}`
|
||||
await fetchNothing(
|
||||
`${url}?${new URLSearchParams({ copyFrom: sourceHistoryId })}`,
|
||||
{
|
||||
method: 'POST',
|
||||
basicAuth: HISTORY_V1_BASIC_AUTH,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function requestBlobWithFallback(
|
||||
projectId,
|
||||
hash,
|
||||
fileId,
|
||||
method = 'GET',
|
||||
range = ''
|
||||
) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
'overleaf.history.id': true,
|
||||
})
|
||||
// Talk to history-v1 directly to avoid streaming via project-history.
|
||||
let url = new URL(HISTORY_V1_URL)
|
||||
url.pathname += `/projects/${project.overleaf.history.id}/blobs/${hash}`
|
||||
|
||||
const opts = { method, headers: { Range: range } }
|
||||
let stream, response, source
|
||||
try {
|
||||
;({ stream, response } = await fetchStreamWithResponse(url, {
|
||||
...opts,
|
||||
basicAuth: {
|
||||
user: settings.apis.v1_history.user,
|
||||
password: settings.apis.v1_history.pass,
|
||||
},
|
||||
}))
|
||||
source = 'history-v1'
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
if (ObjectId.isValid(fileId)) {
|
||||
url = new URL(settings.apis.filestore.url)
|
||||
url.pathname = `/project/${projectId}/file/${fileId}`
|
||||
try {
|
||||
;({ stream, response } = await fetchStreamWithResponse(url, opts))
|
||||
} catch (err) {
|
||||
if (
|
||||
err instanceof RequestFailedError &&
|
||||
err.response.status === 404
|
||||
) {
|
||||
throw new NotFoundError()
|
||||
}
|
||||
throw err
|
||||
}
|
||||
logger.warn({ projectId, hash, fileId }, 'missing history blob')
|
||||
source = 'filestore'
|
||||
} else {
|
||||
throw new NotFoundError()
|
||||
}
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
Metrics.inc('request_blob', 1, { path: source })
|
||||
return {
|
||||
url,
|
||||
stream,
|
||||
source,
|
||||
contentLength: response.headers.get('Content-Length'),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Warning: Don't use this method for large projects. It will eagerly load all
|
||||
* the history data and apply all operations.
|
||||
* @param {string} projectId
|
||||
* @returns Promise<object>
|
||||
*/
|
||||
async function getCurrentContent(projectId) {
|
||||
const historyId = await getHistoryId(projectId)
|
||||
|
||||
try {
|
||||
return await fetchJson(
|
||||
`${HISTORY_V1_URL}/projects/${historyId}/latest/content`,
|
||||
{
|
||||
method: 'GET',
|
||||
basicAuth: HISTORY_V1_BASIC_AUTH,
|
||||
}
|
||||
)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed to load project history', { historyId })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Warning: Don't use this method for large projects. It will eagerly load all
|
||||
* the history data and apply all operations.
|
||||
* @param {string} projectId
|
||||
* @param {number} version
|
||||
*
|
||||
* @returns Promise<object>
|
||||
*/
|
||||
async function getContentAtVersion(projectId, version) {
|
||||
const historyId = await getHistoryId(projectId)
|
||||
|
||||
try {
|
||||
return await fetchJson(
|
||||
`${HISTORY_V1_URL}/projects/${historyId}/versions/${version}/content`,
|
||||
{
|
||||
method: 'GET',
|
||||
basicAuth: HISTORY_V1_BASIC_AUTH,
|
||||
}
|
||||
)
|
||||
} catch (err) {
|
||||
throw OError.tag(
|
||||
err,
|
||||
'failed to load project history snapshot at version',
|
||||
{ historyId, version }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest chunk from history
|
||||
*
|
||||
* @param {string} projectId
|
||||
*/
|
||||
async function getLatestHistory(projectId) {
|
||||
const historyId = await getHistoryId(projectId)
|
||||
|
||||
return await fetchJson(
|
||||
`${HISTORY_V1_URL}/projects/${historyId}/latest/history`,
|
||||
{
|
||||
basicAuth: HISTORY_V1_BASIC_AUTH,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get history changes since a given version
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {object} opts
|
||||
* @param {number} opts.since - The start version of changes to get
|
||||
*/
|
||||
async function getChanges(projectId, opts = {}) {
|
||||
const historyId = await getHistoryId(projectId)
|
||||
|
||||
const url = new URL(`${HISTORY_V1_URL}/projects/${historyId}/changes`)
|
||||
if (opts.since) {
|
||||
url.searchParams.set('since', opts.since)
|
||||
}
|
||||
|
||||
return await fetchJson(url, {
|
||||
basicAuth: HISTORY_V1_BASIC_AUTH,
|
||||
})
|
||||
}
|
||||
|
||||
async function getHistoryId(projectId) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
overleaf: true,
|
||||
})
|
||||
const historyId = project?.overleaf?.history?.id
|
||||
if (!historyId) {
|
||||
throw new OError('project does not have a history id', { projectId })
|
||||
}
|
||||
return historyId
|
||||
}
|
||||
|
||||
async function injectUserDetails(data) {
|
||||
// data can be either:
|
||||
// {
|
||||
// diff: [{
|
||||
// i: "foo",
|
||||
// meta: {
|
||||
// users: ["user_id", v1_user_id, ...]
|
||||
// ...
|
||||
// }
|
||||
// }, ...]
|
||||
// }
|
||||
// or
|
||||
// {
|
||||
// updates: [{
|
||||
// pathnames: ["main.tex"]
|
||||
// meta: {
|
||||
// users: ["user_id", v1_user_id, ...]
|
||||
// ...
|
||||
// },
|
||||
// ...
|
||||
// }, ...]
|
||||
// }
|
||||
// Either way, the top level key points to an array of objects with a meta.users property
|
||||
// that we need to replace user_ids with populated user objects.
|
||||
// Note that some entries in the users arrays may be v1 ids returned by the v1 history
|
||||
// service. v1 ids will be `numbers`
|
||||
let userIds = new Set()
|
||||
let v1UserIds = new Set()
|
||||
const entries = Array.isArray(data.diff)
|
||||
? data.diff
|
||||
: Array.isArray(data.updates)
|
||||
? data.updates
|
||||
: []
|
||||
for (const entry of entries) {
|
||||
for (const user of (entry.meta && entry.meta.users) || []) {
|
||||
if (typeof user === 'string') {
|
||||
userIds.add(user)
|
||||
} else if (typeof user === 'number') {
|
||||
v1UserIds.add(user)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
userIds = Array.from(userIds)
|
||||
v1UserIds = Array.from(v1UserIds)
|
||||
const projection = { first_name: 1, last_name: 1, email: 1 }
|
||||
const usersArray = await UserGetter.promises.getUsers(userIds, projection)
|
||||
const users = {}
|
||||
for (const user of usersArray) {
|
||||
users[user._id.toString()] = _userView(user)
|
||||
}
|
||||
projection.overleaf = 1
|
||||
const v1IdentifiedUsersArray = await UserGetter.promises.getUsersByV1Ids(
|
||||
v1UserIds,
|
||||
projection
|
||||
)
|
||||
for (const user of v1IdentifiedUsersArray) {
|
||||
users[user.overleaf.id] = _userView(user)
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (entry.meta != null) {
|
||||
entry.meta.users = ((entry.meta && entry.meta.users) || []).map(user => {
|
||||
if (typeof user === 'string' || typeof user === 'number') {
|
||||
return users[user]
|
||||
} else {
|
||||
return user
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
function _userView(user) {
|
||||
const { _id, first_name: firstName, last_name: lastName, email } = user
|
||||
return { first_name: firstName, last_name: lastName, email, id: _id }
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getBlobLocation,
|
||||
initializeProject: callbackify(initializeProject),
|
||||
flushProject: callbackify(flushProject),
|
||||
resyncProject: callbackify(resyncProject),
|
||||
deleteProject: callbackify(deleteProject),
|
||||
deleteProjectHistory: callbackify(deleteProjectHistory),
|
||||
injectUserDetails: callbackify(injectUserDetails),
|
||||
getCurrentContent: callbackify(getCurrentContent),
|
||||
uploadBlobFromDisk: callbackify(uploadBlobFromDisk),
|
||||
copyBlob: callbackify(copyBlob),
|
||||
requestBlobWithFallback: callbackify(requestBlobWithFallback),
|
||||
getLatestHistory: callbackify(getLatestHistory),
|
||||
getChanges: callbackify(getChanges),
|
||||
promises: {
|
||||
loadGlobalBlobs,
|
||||
initializeProject,
|
||||
flushProject,
|
||||
resyncProject,
|
||||
deleteProject,
|
||||
injectUserDetails,
|
||||
deleteProjectHistory,
|
||||
getCurrentContent,
|
||||
getContentAtVersion,
|
||||
uploadBlobFromDisk,
|
||||
copyBlob,
|
||||
requestBlobWithFallback,
|
||||
getLatestHistory,
|
||||
getChanges,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
// @ts-check
|
||||
|
||||
import { callbackify } from 'node:util'
|
||||
import OError from '@overleaf/o-error'
|
||||
import logger from '@overleaf/logger'
|
||||
import HistoryManager from '../History/HistoryManager.js'
|
||||
import DocumentUpdaterHandler from '../DocumentUpdater/DocumentUpdaterHandler.js'
|
||||
import DocstoreManager from '../Docstore/DocstoreManager.js'
|
||||
import ProjectOptionsHandler from '../Project/ProjectOptionsHandler.js'
|
||||
import mongodb from '../../infrastructure/mongodb.js'
|
||||
|
||||
const { db, ObjectId, READ_PREFERENCE_SECONDARY } = mongodb
|
||||
|
||||
/**
|
||||
* Migrate projects based on a query.
|
||||
*
|
||||
* @param {object} opts
|
||||
* @param {string[]} [opts.projectIds]
|
||||
* @param {string[]} [opts.ownerIds]
|
||||
* @param {string} [opts.minId]
|
||||
* @param {string} [opts.maxId]
|
||||
* @param {number} [opts.maxCount]
|
||||
* @param {"forwards" | "backwards"} [opts.direction]
|
||||
* @param {boolean} [opts.force]
|
||||
* @param {boolean} [opts.stopOnError]
|
||||
* @param {boolean} [opts.quickOnly]
|
||||
* @param {number} [opts.concurrency]
|
||||
*/
|
||||
async function migrateProjects(opts = {}) {
|
||||
const {
|
||||
ownerIds,
|
||||
projectIds,
|
||||
minId,
|
||||
maxId,
|
||||
maxCount = Infinity,
|
||||
direction = 'forwards',
|
||||
force = false,
|
||||
stopOnError = false,
|
||||
quickOnly = false,
|
||||
concurrency = 1,
|
||||
} = opts
|
||||
|
||||
const clauses = []
|
||||
|
||||
// skip projects that don't have full project history
|
||||
clauses.push({ 'overleaf.history.id': { $exists: true } })
|
||||
|
||||
if (projectIds != null) {
|
||||
clauses.push({ _id: { $in: projectIds.map(id => new ObjectId(id)) } })
|
||||
}
|
||||
if (ownerIds != null) {
|
||||
clauses.push({ owner_ref: { $in: ownerIds.map(id => new ObjectId(id)) } })
|
||||
}
|
||||
if (minId) {
|
||||
clauses.push({ _id: { $gte: new ObjectId(minId) } })
|
||||
}
|
||||
if (maxId) {
|
||||
clauses.push({ _id: { $lte: new ObjectId(maxId) } })
|
||||
}
|
||||
|
||||
const filter = {}
|
||||
if (clauses.length > 0) {
|
||||
filter.$and = clauses
|
||||
}
|
||||
|
||||
const projects = db.projects
|
||||
.find(filter, {
|
||||
readPreference: READ_PREFERENCE_SECONDARY,
|
||||
projection: { _id: 1, overleaf: 1 },
|
||||
})
|
||||
.sort({ _id: -1 })
|
||||
|
||||
let terminating = false
|
||||
const handleSignal = signal => {
|
||||
logger.info({ signal }, 'History ranges support migration received signal')
|
||||
terminating = true
|
||||
}
|
||||
process.on('SIGINT', handleSignal)
|
||||
process.on('SIGTERM', handleSignal)
|
||||
|
||||
const projectsProcessed = {
|
||||
quick: 0,
|
||||
skipped: 0,
|
||||
resync: 0,
|
||||
total: 0,
|
||||
}
|
||||
const jobsByProjectId = new Map()
|
||||
let errors = 0
|
||||
|
||||
for await (const project of projects) {
|
||||
if (projectsProcessed.total >= maxCount) {
|
||||
break
|
||||
}
|
||||
|
||||
if (errors > 0 && stopOnError) {
|
||||
break
|
||||
}
|
||||
|
||||
if (terminating) {
|
||||
break
|
||||
}
|
||||
|
||||
const projectId = project._id.toString()
|
||||
|
||||
if (!force) {
|
||||
// Skip projects that are already migrated
|
||||
if (
|
||||
(direction === 'forwards' &&
|
||||
project.overleaf.history.rangesSupportEnabled) ||
|
||||
(direction === 'backwards' &&
|
||||
!project.overleaf.history.rangesSupportEnabled)
|
||||
) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (jobsByProjectId.size >= concurrency) {
|
||||
// Wait until the next job finishes
|
||||
await Promise.race(jobsByProjectId.values())
|
||||
}
|
||||
|
||||
const job = processProject(projectId, direction, quickOnly)
|
||||
.then(info => {
|
||||
jobsByProjectId.delete(projectId)
|
||||
projectsProcessed[info.migrationType] += 1
|
||||
projectsProcessed.total += 1
|
||||
logger.debug(
|
||||
{
|
||||
projectId,
|
||||
direction,
|
||||
projectsProcessed,
|
||||
errors,
|
||||
...info,
|
||||
},
|
||||
'History ranges support migration'
|
||||
)
|
||||
if (projectsProcessed.total % 10000 === 0) {
|
||||
logger.info(
|
||||
{ projectsProcessed, errors, lastProjectId: projectId },
|
||||
'History ranges support migration progress'
|
||||
)
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
jobsByProjectId.delete(projectId)
|
||||
errors += 1
|
||||
logger.error(
|
||||
{ err, projectId, direction, projectsProcessed, errors },
|
||||
'Failed to migrate history ranges support'
|
||||
)
|
||||
})
|
||||
|
||||
jobsByProjectId.set(projectId, job)
|
||||
}
|
||||
|
||||
// Let the last jobs finish
|
||||
await Promise.all(jobsByProjectId.values())
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate a single project
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {"forwards" | "backwards"} direction
|
||||
* @param {boolean} quickOnly
|
||||
*/
|
||||
async function processProject(projectId, direction, quickOnly) {
|
||||
const startTimeMs = Date.now()
|
||||
const quickMigrationSuccess = await quickMigration(projectId, direction)
|
||||
let migrationType
|
||||
if (quickMigrationSuccess) {
|
||||
migrationType = 'quick'
|
||||
} else if (quickOnly) {
|
||||
migrationType = 'skipped'
|
||||
} else {
|
||||
await migrateProject(projectId, direction)
|
||||
migrationType = 'resync'
|
||||
}
|
||||
const elapsedMs = Date.now() - startTimeMs
|
||||
return { migrationType, elapsedMs }
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt a quick migration (without resync)
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {"forwards" | "backwards"} direction
|
||||
* @return {Promise<boolean>} whether or not the quick migration was a success
|
||||
*/
|
||||
async function quickMigration(projectId, direction = 'forwards') {
|
||||
const blockSuccess =
|
||||
await DocumentUpdaterHandler.promises.blockProject(projectId)
|
||||
if (!blockSuccess) {
|
||||
return false
|
||||
}
|
||||
|
||||
let projectHasRanges
|
||||
try {
|
||||
projectHasRanges =
|
||||
await DocstoreManager.promises.projectHasRanges(projectId)
|
||||
} catch (err) {
|
||||
// Docstore request probably timed out. Assume the project has ranges
|
||||
logger.warn(
|
||||
{ err, projectId },
|
||||
'Failed to check if project has ranges; proceeding with a resync migration'
|
||||
)
|
||||
projectHasRanges = true
|
||||
}
|
||||
if (projectHasRanges) {
|
||||
await DocumentUpdaterHandler.promises.unblockProject(projectId)
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
await ProjectOptionsHandler.promises.setHistoryRangesSupport(
|
||||
projectId,
|
||||
direction === 'forwards'
|
||||
)
|
||||
} catch (err) {
|
||||
await DocumentUpdaterHandler.promises.unblockProject(projectId)
|
||||
await hardResyncProject(projectId)
|
||||
throw err
|
||||
}
|
||||
|
||||
let wasBlocked
|
||||
try {
|
||||
wasBlocked = await DocumentUpdaterHandler.promises.unblockProject(projectId)
|
||||
} catch (err) {
|
||||
await hardResyncProject(projectId)
|
||||
throw err
|
||||
}
|
||||
if (!wasBlocked) {
|
||||
await hardResyncProject(projectId)
|
||||
throw new OError('Tried to unblock project but it was not blocked', {
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate a single project
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {"forwards" | "backwards"} direction
|
||||
*/
|
||||
async function migrateProject(projectId, direction = 'forwards') {
|
||||
await HistoryManager.promises.flushProject(projectId)
|
||||
await HistoryManager.promises.resyncProject(projectId, {
|
||||
historyRangesMigration: direction,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Hard resync a project
|
||||
*
|
||||
* This is used when something goes wrong with the quick migration after we've
|
||||
* changed the history ranges support flag on a project.
|
||||
*
|
||||
* @param {string} projectId
|
||||
*/
|
||||
async function hardResyncProject(projectId) {
|
||||
await HistoryManager.promises.flushProject(projectId)
|
||||
await HistoryManager.promises.resyncProject(projectId, { force: true })
|
||||
}
|
||||
|
||||
export default {
|
||||
migrateProjects: callbackify(migrateProjects),
|
||||
migrateProject: callbackify(migrateProject),
|
||||
promises: { migrateProjects, migrateProject },
|
||||
}
|
||||
179
services/web/app/src/Features/History/HistoryRouter.mjs
Normal file
179
services/web/app/src/Features/History/HistoryRouter.mjs
Normal file
@@ -0,0 +1,179 @@
|
||||
// @ts-check
|
||||
|
||||
import Settings from '@overleaf/settings'
|
||||
import { Joi, validate } from '../../infrastructure/Validation.js'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js'
|
||||
import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js'
|
||||
import HistoryController from './HistoryController.js'
|
||||
|
||||
const rateLimiters = {
|
||||
downloadProjectRevision: new RateLimiter('download-project-revision', {
|
||||
points: 30,
|
||||
duration: 60 * 60,
|
||||
}),
|
||||
getProjectBlob: new RateLimiter('get-project-blob', {
|
||||
// Download project in full once per hour
|
||||
points: Settings.maxEntitiesPerProject,
|
||||
duration: 60 * 60,
|
||||
}),
|
||||
flushHistory: new RateLimiter('flush-project-history', {
|
||||
points: 30,
|
||||
duration: 60,
|
||||
}),
|
||||
}
|
||||
|
||||
function apply(webRouter, privateApiRouter) {
|
||||
// Blobs
|
||||
|
||||
webRouter.head(
|
||||
'/project/:project_id/blob/:hash',
|
||||
validate({
|
||||
params: Joi.object({
|
||||
project_id: Joi.objectId().required(),
|
||||
hash: Joi.string().required().hex().length(40),
|
||||
}),
|
||||
query: Joi.object({
|
||||
fallback: Joi.objectId().optional(),
|
||||
}),
|
||||
}),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.getProjectBlob),
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.headBlob
|
||||
)
|
||||
webRouter.get(
|
||||
'/project/:project_id/blob/:hash',
|
||||
validate({
|
||||
params: Joi.object({
|
||||
project_id: Joi.objectId().required(),
|
||||
hash: Joi.string().required().hex().length(40),
|
||||
}),
|
||||
query: Joi.object({
|
||||
fallback: Joi.objectId().optional(),
|
||||
}),
|
||||
}),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.getProjectBlob),
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.getBlob
|
||||
)
|
||||
|
||||
// History diffs
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/updates',
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.proxyToHistoryApiAndInjectUserDetails
|
||||
)
|
||||
webRouter.get(
|
||||
'/project/:Project_id/doc/:doc_id/diff',
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.proxyToHistoryApi
|
||||
)
|
||||
webRouter.get(
|
||||
'/project/:Project_id/diff',
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.proxyToHistoryApiAndInjectUserDetails
|
||||
)
|
||||
webRouter.get(
|
||||
'/project/:Project_id/filetree/diff',
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.proxyToHistoryApi
|
||||
)
|
||||
|
||||
// File and project restore
|
||||
|
||||
webRouter.post(
|
||||
'/project/:project_id/restore_file',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
HistoryController.restoreFileFromV2
|
||||
)
|
||||
webRouter.post(
|
||||
'/project/:project_id/revert_file',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
HistoryController.revertFile
|
||||
)
|
||||
webRouter.post(
|
||||
'/project/:project_id/revert-project',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
HistoryController.revertProject
|
||||
)
|
||||
|
||||
// History download
|
||||
|
||||
webRouter.get(
|
||||
'/project/:project_id/version/:version/zip',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.downloadProjectRevision),
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.downloadZipOfVersion
|
||||
)
|
||||
|
||||
// History flush and resync
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/flush',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.flushHistory),
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.proxyToHistoryApi
|
||||
)
|
||||
privateApiRouter.post(
|
||||
'/project/:Project_id/history/resync',
|
||||
AuthenticationController.requirePrivateApiAuth(),
|
||||
HistoryController.resyncProjectHistory
|
||||
)
|
||||
|
||||
// History labels
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/labels',
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.getLabels
|
||||
)
|
||||
webRouter.post(
|
||||
'/project/:Project_id/labels',
|
||||
AuthorizationMiddleware.ensureUserCanWriteOrReviewProjectContent,
|
||||
HistoryController.createLabel
|
||||
)
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/labels/:label_id',
|
||||
AuthorizationMiddleware.ensureUserCanWriteOrReviewProjectContent,
|
||||
HistoryController.deleteLabel
|
||||
)
|
||||
|
||||
// History snapshot
|
||||
|
||||
webRouter.get(
|
||||
'/project/:project_id/latest/history',
|
||||
validate({
|
||||
params: Joi.object({
|
||||
project_id: Joi.objectId().required(),
|
||||
}),
|
||||
}),
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.getLatestHistory
|
||||
)
|
||||
webRouter.get(
|
||||
'/project/:project_id/changes',
|
||||
validate({
|
||||
params: Joi.object({
|
||||
project_id: Joi.objectId().required(),
|
||||
}),
|
||||
query: Joi.object({
|
||||
since: Joi.number().integer().min(0).optional(),
|
||||
}),
|
||||
}),
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
HistoryController.getChanges
|
||||
)
|
||||
}
|
||||
|
||||
export default { apply }
|
||||
21
services/web/app/src/Features/History/HistoryURLHelper.js
Normal file
21
services/web/app/src/Features/History/HistoryURLHelper.js
Normal file
@@ -0,0 +1,21 @@
|
||||
// Pass settings to enable consistent unit tests from .js and .mjs modules
|
||||
function projectHistoryURLWithFilestoreFallback(
|
||||
Settings,
|
||||
projectId,
|
||||
historyId,
|
||||
fileRef,
|
||||
origin
|
||||
) {
|
||||
const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}`
|
||||
// TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests.
|
||||
if (fileRef.hash && Settings.enableProjectHistoryBlobs) {
|
||||
return {
|
||||
url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
|
||||
fallbackURL: filestoreURL,
|
||||
}
|
||||
} else {
|
||||
return { url: filestoreURL }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { projectHistoryURLWithFilestoreFallback }
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user