first commit

This commit is contained in:
2025-04-24 13:11:28 +08:00
commit ff9c54d5e4
5960 changed files with 834111 additions and 0 deletions

View File

@@ -0,0 +1,13 @@
FROM sharelatex/sharelatex:2.0.0
# Patch 1: Fixes project deletion (https://github.com/overleaf/overleaf/issues/644)
ADD disable_project_history.patch /etc/sharelatex/disable_project_history.patch
RUN cd /etc/sharelatex && \
patch < disable_project_history.patch
# Patch 2: Fixes admin creation via CLI (https://github.com/overleaf/overleaf/issues/647)
ADD create_and_destroy_users.patch /var/www/sharelatex/tasks/create_and_destroy_users.patch
RUN cd /var/www/sharelatex/tasks/ && \
patch < create_and_destroy_users.patch

View File

@@ -0,0 +1,11 @@
--- CreateAndDestoryUsers.coffee
+++ CreateAndDestoryUsers.coffee
@@ -21,7 +21,7 @@ module.exports = (grunt) ->
user.save (error) ->
throw error if error?
ONE_WEEK = 7 * 24 * 60 * 60 # seconds
- OneTimeTokenHandler.getNewToken user._id, { expiresIn: ONE_WEEK }, (err, token)->
+ OneTimeTokenHandler.getNewToken "password", { expiresIn: ONE_WEEK, email:user.email, user_id: user._id.toString() }, (err, token)->
return next(err) if err?
console.log ""

View File

@@ -0,0 +1,11 @@
--- settings.coffee
+++ settings.coffee
@@ -200,6 +200,8 @@ settings =
# is not available
v1:
url: ""
+ project_history:
+ enabled: false
references:{}
notifications:undefined

View File

@@ -0,0 +1,60 @@
--- UploadsRouter.js
+++ UploadsRouter.js
@@ -1,13 +1,3 @@
-/* eslint-disable
- no-unused-vars,
-*/
-// TODO: This file was created by bulk-decaffeinate.
-// Fix any style issues and re-enable lint.
-/*
- * decaffeinate suggestions:
- * DS102: Remove unnecessary code created because of implicit returns
- * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
- */
const AuthorizationMiddleware = require('../Authorization/AuthorizationMiddleware')
const AuthenticationController = require('../Authentication/AuthenticationController')
const ProjectUploadController = require('./ProjectUploadController')
@@ -28,18 +18,30 @@ module.exports = {
ProjectUploadController.uploadProject
)
- return webRouter.post(
- '/Project/:Project_id/upload',
- RateLimiterMiddleware.rateLimit({
- endpointName: 'file-upload',
- params: ['Project_id'],
- maxRequests: 200,
- timeInterval: 60 * 30
- }),
- AuthenticationController.requireLogin(),
- AuthorizationMiddleware.ensureUserCanWriteProjectContent,
- ProjectUploadController.multerMiddleware,
- ProjectUploadController.uploadFile
- )
+ const fileUploadEndpoint = '/Project/:Project_id/upload'
+ const fileUploadRateLimit = RateLimiterMiddleware.rateLimit({
+ endpointName: 'file-upload',
+ params: ['Project_id'],
+ maxRequests: 200,
+ timeInterval: 60 * 30
+ })
+ if (Settings.allowAnonymousReadAndWriteSharing) {
+ webRouter.post(
+ fileUploadEndpoint,
+ fileUploadRateLimit,
+ AuthorizationMiddleware.ensureUserCanWriteProjectContent,
+ ProjectUploadController.multerMiddleware,
+ ProjectUploadController.uploadFile
+ )
+ } else {
+ webRouter.post(
+ fileUploadEndpoint,
+ fileUploadRateLimit,
+ AuthenticationController.requireLogin(),
+ AuthorizationMiddleware.ensureUserCanWriteProjectContent,
+ ProjectUploadController.multerMiddleware,
+ ProjectUploadController.uploadFile
+ )
+ }
}
}

View File

@@ -0,0 +1,11 @@
--- TokenAccessHandler.js
+++ TokenAccessHandler.js
@@ -255,7 +255,7 @@ const TokenAccessHandler = {
getV1DocPublishedInfo(token, callback) {
// default to allowing access
- if (!Settings.apis || !Settings.apis.v1) {
+ if (!Settings.apis.v1 || !Settings.apis.v1.url) {
return callback(null, { allow: true })
}
V1Api.request(

View File

@@ -0,0 +1,11 @@
--- Features.js
+++ Features.js
@@ -53,6 +53,8 @@ module.exports = Features = {
return Settings.apis.references.url != null
case 'saml':
return Settings.enableSaml
+ case 'link-url':
+ return Settings.apis.linkedUrlProxy && Settings.apis.linkedUrlProxy.url
default:
throw new Error(`unknown feature: ${feature}`)
}

View File

@@ -0,0 +1,20 @@
--- new-file-modal.pug
+++ new-file-modal.pug
@@ -21,11 +21,12 @@ script(type='text/ng-template', id='newFileModalTemplate')
i.fa.fa-fw.fa-folder-open
|
| From Another Project
- li(ng-class="type == 'url' ? 'active' : null")
- a(href, ng-click="type = 'url'")
- i.fa.fa-fw.fa-globe
- |
- | From External URL
+ if hasFeature('link-url')
+ li(ng-class="type == 'url' ? 'active' : null")
+ a(href, ng-click="type = 'url'")
+ i.fa.fa-fw.fa-globe
+ |
+ | From External URL
!= moduleIncludes("newFileModal:selector", locals)
td(class="modal-new-file--body modal-new-file--body-{{type}}")

View File

@@ -0,0 +1,26 @@
--- AnalyticsController.js
+++ AnalyticsController.js
@@ -3,9 +3,13 @@ const Errors = require('../Errors/Errors')
const AuthenticationController = require('../Authentication/AuthenticationController')
const InstitutionsAPI = require('../Institutions/InstitutionsAPI')
const GeoIpLookup = require('../../infrastructure/GeoIpLookup')
+const Features = require('../../infrastructure/Features')
module.exports = {
updateEditingSession(req, res, next) {
+ if (!Features.hasFeature('analytics')) {
+ return res.send(204)
+ }
const userId = AuthenticationController.getLoggedInUserId(req)
const { projectId } = req.params
let countryCode = null
@@ -28,6 +32,9 @@ module.exports = {
},
recordEvent(req, res, next) {
+ if (!Features.hasFeature('analytics')) {
+ return res.send(204)
+ }
const userId =
AuthenticationController.getLoggedInUserId(req) || req.sessionID
AnalyticsManager.recordEvent(userId, req.params.event, req.body, error =>

View File

@@ -0,0 +1,10 @@
--- Features.js
+++ Features.js
@@ -41,6 +41,7 @@ module.exports = Features = {
case 'templates-server-pro':
return Settings.overleaf == null
case 'affiliations':
+ case 'analytics':
// Checking both properties is needed for the time being to allow
// enabling the feature in web-api and disabling in Server Pro
// see https://github.com/overleaf/web-internal/pull/2127

View File

@@ -0,0 +1,31 @@
FROM sharelatex/sharelatex:2.0.1
# Patch 1: Fixes anonymous link sharing
ADD 1-anon-upload.patch /var/www/sharelatex/web/app/src/Features/Uploads/1-anon-upload.patch
RUN cd /var/www/sharelatex/web/app/src/Features/Uploads/ && \
patch < 1-anon-upload.patch
# Patch 2: Fixes read-only access
ADD 2-read-only-access.patch /var/www/sharelatex/web/app/src/Features/TokenAccess/3-read-only-access.patch
RUN cd /var/www/sharelatex/web/app/src/Features/TokenAccess/ && \
patch < 3-read-only-access.patch
# Patch 3: Fixes url linking
ADD 3-url-linking-1.patch /var/www/sharelatex/web/app/src/infrastructure/6-url-linking-1.patch
RUN cd /var/www/sharelatex/web/app/src/infrastructure/ && \
patch < 6-url-linking-1.patch
ADD 4-url-linking-2.patch /var/www/sharelatex/web/app/views/project/editor/7-url-linking-2.patch
RUN cd /var/www/sharelatex/web/app/views/project/editor/ && \
patch < 7-url-linking-2.patch
# Patch 4: Disables analytics
ADD 5-disable-analytics-1.patch /var/www/sharelatex/web/app/src/Features/Analytics/8-disable-analytics-1.patch
RUN cd /var/www/sharelatex/web/app/src/Features/Analytics/ && \
patch < 8-disable-analytics-1.patch
ADD 6-disable-analytics-2.patch /var/www/sharelatex/web/app/src/infrastructure/9-disable-analytics-2.patch
RUN cd /var/www/sharelatex/web/app/src/infrastructure/ && \
patch < 9-disable-analytics-2.patch

View File

@@ -0,0 +1,8 @@
FROM sharelatex/sharelatex:2.1.0
# Patch: defines recaptcha config to fix share-related issues
# - https://github.com/overleaf/overleaf/issues/684
ADD add-recaptcha-config.patch /etc/sharelatex/add-recaptcha-config.patch
RUN cd /etc/sharelatex/ && \
patch < add-recaptcha-config.patch

View File

@@ -0,0 +1,14 @@
--- a/settings.coffee
+++ b/settings.coffee
@@ -180,6 +180,11 @@ settings =
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
+ recaptcha:
+ disabled:
+ invite: true
+ register: true
+
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.

View File

@@ -0,0 +1,7 @@
FROM sharelatex/sharelatex:2.3.0
# Patch: Fixes NPE when invoking synctex (https://github.com/overleaf/overleaf/issues/756)
ADD check-clsi-setting-exists.patch /var/www/sharelatex/clsi/app/js/check-clsi-setting-exists.patch
RUN cd /var/www/sharelatex/clsi/app/js && \
patch < check-clsi-setting-exists.patch

View File

@@ -0,0 +1,11 @@
--- a/app/js/CompileManager.js
+++ b/app/js/CompileManager.js
@@ -536,7 +536,7 @@ module.exports = CompileManager = {
compileName,
command,
directory,
- Settings.clsi != null ? Settings.clsi.docker.image : undefined,
+ Settings.clsi && Settings.clsi.docker ? Settings.clsi.docker.image : undefined,
timeout,
{},
function(error, output) {

View File

@@ -0,0 +1,6 @@
FROM sharelatex/sharelatex:2.4.0
# Patch: Fixes missing dependencies on web startup (https://github.com/overleaf/overleaf/issues/767)
RUN cd /var/www/sharelatex/web && \
npm install i18next@^19.6.3 i18next-fs-backend@^1.0.7 i18next-http-middleware@^3.0.2

View File

@@ -0,0 +1,10 @@
FROM sharelatex/sharelatex:2.4.1
# Patch: Fixes anonymous read/write sharing
COPY anonymous-metadata.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < anonymous-metadata.patch
# Patch: Fixes left footer with html text
COPY left-footer-skip-translation.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < left-footer-skip-translation.patch

View File

@@ -0,0 +1,43 @@
--- /var/www/sharelatex/web/app/src/router.js 2020-09-14 20:21:39.741433000 +0000
+++ /var/www/sharelatex/web/app/src/router.js 2020-09-14 20:13:08.000000000 +0000
@@ -607,16 +607,17 @@
ProjectDownloadsController.downloadMultipleProjects
)
+ console.log(`allowAnonymousReadAndWriteSharing: ${Settings.allowAnonymousReadAndWriteSharing}`)
webRouter.get(
'/project/:project_id/metadata',
AuthorizationMiddleware.ensureUserCanReadProject,
- AuthenticationController.requireLogin(),
+ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
MetaController.getMetadata
- )
+ )
webRouter.post(
'/project/:project_id/doc/:doc_id/metadata',
AuthorizationMiddleware.ensureUserCanReadProject,
- AuthenticationController.requireLogin(),
+ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
MetaController.broadcastMetadataForDoc
)
privateApiRouter.post(
--- /var/www/sharelatex/web/app/src/Features/Contacts/ContactRouter.js 2020-09-14 20:21:52.243779000 +0000
+++ /var/www/sharelatex/web/app/src/Features/Contacts/ContactRouter.js 2020-09-14 20:13:08.000000000 +0000
@@ -5,6 +5,8 @@
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
+const Settings = require('settings-sharelatex')
+
const AuthenticationController = require('../Authentication/AuthenticationController')
const ContactController = require('./ContactController')
@@ -12,7 +14,7 @@
apply(webRouter, apiRouter) {
return webRouter.get(
'/user/contacts',
- AuthenticationController.requireLogin(),
+ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
ContactController.getContacts
)
}

View File

@@ -0,0 +1,12 @@
--- /var/www/sharelatex/web/app/views/layout/footer.pug
+++ /var/www/sharelatex/web/app/app/views/layout/footer.pug
@@ -32,7 +32,7 @@ footer.site-footer
if item.url
a(href=item.url, class=item.class) !{translate(item.text)}
else
- | !{translate(item.text)}
+ | !{item.text}
ul.col-md-3.text-right

View File

@@ -0,0 +1,13 @@
FROM sharelatex/sharelatex:2.5.0
# Patch #826: Fixes log path for contacts service to be picked up by logrotate
COPY contacts-run.patch /etc/service/contacts-sharelatex
RUN cd /etc/service/contacts-sharelatex && patch < contacts-run.patch
# Patch #826: delete old logs for the contacts service
COPY delete-old-logs.patch /etc/my_init.d
RUN cd /etc/my_init.d && patch < delete-old-logs.patch \
&& chmod +x /etc/my_init.d/10_delete_old_logs.sh
# Patch #827: fix logrotate file permissions
RUN chmod 644 /etc/logrotate.d/sharelatex

View File

@@ -0,0 +1,8 @@
--- a/run
+++ b/run
@@ -7,4 +7,4 @@ if [ "$DEBUG_NODE" == "true" ]; then
NODE_PARAMS="--inspect=0.0.0.0:30360"
fi
-exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts 2>&1
+exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts.log 2>&1

View File

@@ -0,0 +1,10 @@
--- /dev/null
+++ b/10_delete_old_logs.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+set -e
+
+# Up to version 2.5.0 the logs of the contacts service were written into a
+# file that was not picked up by logrotate.
+# The service is stable and we can safely discard any logs.
+rm -vf /var/log/sharelatex/contacts

View File

@@ -0,0 +1,29 @@
const Settings = require('settings-sharelatex')
const mongojs = require('mongojs')
const db = mongojs(Settings.mongo.url, ['tokens'])
// eslint-disable-next-line import/no-extraneous-dependencies
const async = require('async')
exports.migrate = (client, done) => {
console.log(`>> Updating 'data.email' to lower case in tokens`)
db.tokens.find({}, { 'data.email': 1 }, (err, tokens) => {
if (err) {
return done(err)
}
async.eachSeries(
tokens,
(token, callback) => {
db.tokens.update(
{ _id: token._id },
{ $set: { 'data.email': token.data.email.toLowerCase() } },
callback
)
},
done
)
})
}
exports.rollback = (client, done) => done()

View File

@@ -0,0 +1,8 @@
FROM sharelatex/sharelatex:2.5.1
# Patch: fixes registration token creation
COPY create-token-lowercase-email.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < create-token-lowercase-email.patch
# Migration for tokens with invalid email addresses
ADD 12_update_token_email.js /var/www/sharelatex/migrations/12_update_token_email.js

View File

@@ -0,0 +1,11 @@
--- /var/www/sharelatex/web/app/src/Features/User/UserRegistrationHandler.js
+++ /var/www/sharelatex/web/app/src/Features/User/UserRegistrationHandler.js
@@ -122,7 +122,7 @@ const UserRegistrationHandler = {
const ONE_WEEK = 7 * 24 * 60 * 60 // seconds
OneTimeTokenHandler.getNewToken(
'password',
- { user_id: user._id.toString(), email },
+ { user_id: user._id.toString(), email: user.email },
{ expiresIn: ONE_WEEK },
(err, token) => {
if (err != null) {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:2.6.0-RC1
# Patch: fixes Project restore inserts bad projectId into deletedFiles
COPY document-deleter-object-id.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < document-deleter-object-id.patch

View File

@@ -0,0 +1,10 @@
--- /var/www/sharelatex/web/app/src/Features/Project/ProjectDeleter.js
+++ /var/www/sharelatex/web/app/src/Features/Project/ProjectDeleter.js
@@ -278,6 +278,7 @@ async function deleteProject(projectId, options = {}) {
}
async function undeleteProject(projectId, options = {}) {
+ projectId = ObjectId(projectId)
let deletedProject = await DeletedProject.findOne({
'deleterData.deletedProjectId': projectId
}).exec()

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:2.6.1
# Patch: fixes overleaf.com onboarding email being sent in CE/SP
COPY onboarding-email.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < onboarding-email.patch

View File

@@ -0,0 +1,25 @@
--- /var/www/sharelatex/web/app/src/Features/User/UserCreator.js
+++ /var/www/sharelatex/web/app/src/Features/User/UserCreator.js
@@ -85,13 +85,15 @@ async function createNewUser(attributes, options = {}) {
}
Analytics.recordEvent(user._id, 'user-registered')
- try {
- await UserOnboardingEmailQueueManager.scheduleOnboardingEmail(user)
- } catch (error) {
- logger.error(
- `Failed to schedule sending of onboarding email for user '${user._id}'`,
- error
- )
+ if(Features.hasFeature('saas')) {
+ try {
+ await UserOnboardingEmailQueueManager.scheduleOnboardingEmail(user)
+ } catch (error) {
+ logger.error(
+ `Failed to schedule sending of onboarding email for user '${user._id}'`,
+ error
+ )
+ }
}
return user

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:2.7.0
# Patch: fixes overleaf.com onboarding email being sent in CE/SP
COPY remove-disconnect-endpoint.patch .
RUN patch -p0 < remove-disconnect-endpoint.patch

View File

@@ -0,0 +1,14 @@
--- /var/www/sharelatex/web/app/src/router.js
--- /var/www/sharelatex/web/app/src/router.js
@@ -995,11 +995,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
AdminController.unregisterServiceWorker
)
- privateApiRouter.post(
- '/disconnectAllUsers',
- AdminController.disconnectAllUsers
- )
-
privateApiRouter.get('/perfTest', (req, res) => res.send('hello'))
publicApiRouter.get('/status', (req, res) => {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.0.0
# Patch: fixes overleaf.com onboarding email being sent in CE/SP
COPY remove-disconnect-endpoint.patch .
RUN patch -p0 < remove-disconnect-endpoint.patch

View File

@@ -0,0 +1,14 @@
--- /var/www/sharelatex/web/app/src/router.js
--- /var/www/sharelatex/web/app/src/router.js
@@ -995,11 +995,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
AdminController.unregisterServiceWorker
)
- privateApiRouter.post(
- '/disconnectAllUsers',
- AdminController.disconnectAllUsers
- )
-
privateApiRouter.get('/perfTest', (req, res) => res.send('hello'))
publicApiRouter.get('/status', (req, res) => {

View File

@@ -0,0 +1,11 @@
FROM sharelatex/sharelatex:3.1.0
# Patch: fixes Sharelatex History navigation
# https://github.com/overleaf/overleaf/issues/1035
COPY fix-history-navigation.patch .
RUN patch -p0 < fix-history-navigation.patch
# Rebuild client
# --------------
RUN node genScript compile | bash

View File

@@ -0,0 +1,16 @@
--- services/web/frontend/js/ide/history/controllers/HistoryListController.js
+++ services/web/frontend/js/ide/history/controllers/HistoryListController.js
@@ -62,7 +62,12 @@ App.controller('HistoryListController', function ($scope, $modal, ide) {
return (() => {
const result = []
for (const update of Array.from($scope.history.updates)) {
- let inSelection
+
+ // replacing this declaration with `let` introduces a bug in history point selection:
+ // https://github.com/overleaf/overleaf/issues/1035
+ // eslint-disable-next-line no-var
+ var inSelection
+
if (update.selectedTo) {
inSelection = true
beforeSelection = false

View File

@@ -0,0 +1,6 @@
FROM sharelatex/sharelatex:3.2.0
# Patch: fixes source editor broken
# https://github.com/overleaf/overleaf/issues/1043
COPY disable-codemirror.patch .
RUN patch -p0 < disable-codemirror.patch

View File

@@ -0,0 +1,15 @@
--- services/web/app/src/Features/Project/ProjectController.js
--- services/web/app/src/Features/Project/ProjectController.js
@@ -1134,11 +1134,7 @@ const ProjectController = {
detachRole = req.params.detachRole
}
- const showNewSourceEditorOption =
- (newSourceEditorAssignment &&
- newSourceEditorAssignment.variant === 'codemirror') ||
- user.betaProgram ||
- shouldDisplayFeature('new_source_editor', false) // also allow override via ?new_source_editor=true
+ const showNewSourceEditorOption = false // disabled in CE/SP (Hotfix 3.2.1)
const showSymbolPalette =
!Features.hasFeature('saas') ||

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.2.1
# Fixes compilation errors in embedded compiles
# https://github.com/overleaf/overleaf/issues/1044
ENV PATH="${PATH}:/usr/local/texlive/2022/bin/x86_64-linux"

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.3.0
# Patch: add migration for convert_archived_state script
COPY pr_10442.patch .
RUN patch -p0 < pr_10442.patch

View File

@@ -0,0 +1,132 @@
--- services/web/scripts/convert_archived_state.js
+++ services/web/scripts/convert_archived_state.js
@@ -6,62 +6,77 @@
const { promiseMapWithLimit } = require('../app/src/util/promises')
// $ node scripts/convert_archived_state.js FIRST,SECOND
-const STAGE = process.argv.pop()
-async function main() {
- if (STAGE.includes('FIRST')) {
- await batchedUpdate(
- 'projects',
- { archived: false },
- {
- $set: { archived: [] },
- }
- )
+async function main(STAGE) {
+ for (const FIELD of ['archived', 'trashed']) {
+ if (STAGE.includes('FIRST')) {
+ await batchedUpdate(
+ 'projects',
+ { [FIELD]: false },
+ {
+ $set: { [FIELD]: [] },
+ }
+ )
- console.error('Done, with first part')
- }
+ console.error('Done, with first part for field:', FIELD)
+ }
- if (STAGE.includes('SECOND')) {
- await batchedUpdate('projects', { archived: true }, performUpdate, {
- _id: 1,
- owner_ref: 1,
- collaberator_refs: 1,
- readOnly_refs: 1,
- tokenAccessReadAndWrite_refs: 1,
- tokenAccessReadOnly_refs: 1,
- })
+ if (STAGE.includes('SECOND')) {
+ await batchedUpdate(
+ 'projects',
+ { [FIELD]: true },
+ async function performUpdate(collection, nextBatch) {
+ await promiseMapWithLimit(
+ WRITE_CONCURRENCY,
+ nextBatch,
+ async project => {
+ try {
+ await upgradeFieldToArray({ collection, project, FIELD })
+ } catch (err) {
+ console.error(project._id, err)
+ throw err
+ }
+ }
+ )
+ },
+ {
+ _id: 1,
+ owner_ref: 1,
+ collaberator_refs: 1,
+ readOnly_refs: 1,
+ tokenAccessReadAndWrite_refs: 1,
+ tokenAccessReadOnly_refs: 1,
+ }
+ )
- console.error('Done, with second part')
+ console.error('Done, with second part for field:', FIELD)
+ }
}
}
-main()
- .then(() => {
- process.exit(0)
- })
- .catch(error => {
- console.error({ error })
- process.exit(1)
- })
-
-async function performUpdate(collection, nextBatch) {
- await promiseMapWithLimit(WRITE_CONCURRENCY, nextBatch, project =>
- setArchived(collection, project)
- )
+module.exports = main
+
+if (require.main === module) {
+ main(process.argv.pop())
+ .then(() => {
+ process.exit(0)
+ })
+ .catch(error => {
+ console.error({ error })
+ process.exit(1)
+ })
}
-async function setArchived(collection, project) {
- const archived = calculateArchivedArray(project)
-
+async function upgradeFieldToArray({ collection, project, FIELD }) {
return collection.updateOne(
{ _id: project._id },
{
- $set: { archived },
+ $set: { [FIELD]: getAllUserIds(project) },
}
)
}
-function calculateArchivedArray(project) {
+function getAllUserIds(project) {
return _.unionWith(
[project.owner_ref],
project.collaberator_refs,
--- /dev/null
+++ services/web/migrations/20221111111111_ce_sp_convert_archived_state.js
@@ -0,0 +1,9 @@
+const runScript = require('../scripts/convert_archived_state')
+
+exports.tags = ['server-ce', 'server-pro']
+
+exports.migrate = async () => {
+ await runScript('FIRST,SECOND')
+}
+
+exports.rollback = async () => {}

View File

@@ -0,0 +1,6 @@
FROM sharelatex/sharelatex:3.5.0
# Patch: fix German locales
COPY fix_de_locales.patch .
RUN patch -p0 < fix_de_locales.patch
RUN node genScript compile | bash

View File

@@ -0,0 +1,10 @@
--- services/web/locales/de.json
+++ services/web/locales/de.json
@@ -348,7 +348,6 @@
"edit_dictionary_empty": "Dein benutzerdefiniertes Wörterbuch ist leer.",
"edit_dictionary_remove": "Aus Wörterbuch entfernen",
"editing": "Bearbeitung",
- "editor_and_pdf": "Editor &amp; PDF",
"editor_disconected_click_to_reconnect": "Editor wurde getrennt",
"editor_only_hide_pdf": "Nur Editor <0>(PDF ausblenden)</0>",
"editor_resources": "Editor-Literatur",

View File

@@ -0,0 +1,9 @@
FROM sharelatex/sharelatex:3.5.9
# Patch: clear invite and invite tokens through the websocket
COPY pr_13427.patch .
RUN patch -p0 < pr_13427.patch
# Patch: https://github.com/Automattic/mongoose/commit/f1efabf350522257364aa5c2cb36e441cf08f1a2
COPY mongoose_proto.patch .
RUN patch -p0 < mongoose_proto.patch

View File

@@ -0,0 +1,12 @@
--- node_modules/mongoose/lib/document.js
+++ node_modules/mongoose/lib/document.js
@@ -689,6 +689,10 @@ function init(self, obj, doc, opts, prefix) {
function _init(index) {
i = keys[index];
+ // avoid prototype pollution
+ if (i === '__proto__' || i === 'constructor') {
+ return;
+ }
path = prefix + i;
schema = self.$__schema.path(path);

View File

@@ -0,0 +1,92 @@
--- services/web/app/src/Features/Editor/EditorHttpController.js
+++ services/web/app/src/Features/Editor/EditorHttpController.js
@@ -73,6 +73,7 @@ async function joinProject(req, res, next) {
if (isRestrictedUser) {
project.owner = { _id: project.owner._id }
project.members = []
+ project.invites = []
}
// Only show the 'renamed or deleted' message once
if (project.deletedByExternalDataSource) {
--- services/web/app/src/Features/Project/ProjectEditorHandler.js
+++ services/web/app/src/Features/Project/ProjectEditorHandler.js
@@ -48,19 +48,13 @@
deletedDocsFromDocstore
),
members: [],
- invites,
+ invites: this.buildInvitesView(invites),
imageName:
project.imageName != null
? Path.basename(project.imageName)
: undefined,
}
- if (result.invites == null) {
- result.invites = []
- }
- result.invites.forEach(invite => {
- delete invite.token
- })
;({ owner, ownerFeatures, members } =
this.buildOwnerAndMembersViews(members))
result.owner = owner
@@ -99,7 +93,7 @@
let owner = null
let ownerFeatures = null
const filteredMembers = []
- for (const member of Array.from(members || [])) {
+ for (const member of members || []) {
if (member.privilegeLevel === 'owner') {
ownerFeatures = member.user.features
owner = this.buildUserModelView(member.user, 'owner')
@@ -128,24 +122,15 @@
},
buildFolderModelView(folder) {
- let file
const fileRefs = _.filter(folder.fileRefs || [], file => file != null)
return {
_id: folder._id,
name: folder.name,
- folders: Array.from(folder.folders || []).map(childFolder =>
+ folders: (folder.folders || []).map(childFolder =>
this.buildFolderModelView(childFolder)
),
- fileRefs: (() => {
- const result = []
- for (file of Array.from(fileRefs)) {
- result.push(this.buildFileModelView(file))
- }
- return result
- })(),
- docs: Array.from(folder.docs || []).map(doc =>
- this.buildDocModelView(doc)
- ),
+ fileRefs: fileRefs.map(file => this.buildFileModelView(file)),
+ docs: (folder.docs || []).map(doc => this.buildDocModelView(doc)),
}
},
@@ -164,4 +149,21 @@
name: doc.name,
}
},
+
+ buildInvitesView(invites) {
+ if (invites == null) {
+ return []
+ }
+ return invites.map(invite =>
+ _.pick(invite, [
+ '_id',
+ 'createdAt',
+ 'email',
+ 'expires',
+ 'privileges',
+ 'projectId',
+ 'sendingUserId',
+ ])
+ )
+ },
}

View File

@@ -0,0 +1,8 @@
FROM sharelatex/sharelatex:3.5.10
# Patch: Drop the old history collections and increase mongo query timeout
ADD clean_sl_history_data.js /overleaf/services/web/scripts/history/clean_sl_history_data.js
# Patch: convert large deleted docs to files
COPY pr_14200.patch .
RUN patch -p0 < pr_14200.patch

View File

@@ -0,0 +1,70 @@
// Increase default mongo query timeout from 1min to 1h
process.env.MONGO_SOCKET_TIMEOUT = process.env.MONGO_SOCKET_TIMEOUT || '360000'
const { waitForDb, db } = require('../../app/src/infrastructure/mongodb')
async function main() {
await checkAllProjectsAreMigrated()
await setAllowDowngradeToFalse()
await deleteHistoryCollections()
console.log('Legacy history data cleaned up successfully')
process.exit(0)
}
async function checkAllProjectsAreMigrated() {
console.log('checking all projects are migrated to Full Project History')
const count = await db.projects.countDocuments({
'overleaf.history.display': { $ne: true },
})
if (count === 0) {
console.log('All projects are migrated to Full Project History')
} else {
console.error(
`There are ${count} projects that are not migrated to Full Project History` +
` please complete the migration before running this script again.`
)
process.exit(1)
}
}
async function setAllowDowngradeToFalse() {
console.log('unsetting `allowDowngrade` flag in all projects')
await db.projects.updateMany(
{
'overleaf.history.id': { $exists: true },
'overleaf.history.allowDowngrade': true,
},
{ $unset: { 'overleaf.history.allowDowngrade': 1 } }
)
console.log('unsetting `allowDowngrade` flag in all projects - Done')
}
async function deleteHistoryCollections() {
await gracefullyDropCollection(db.docHistory)
await gracefullyDropCollection(db.docHistoryIndex)
await gracefullyDropCollection(db.projectHistoryMetaData)
}
async function gracefullyDropCollection(collection) {
const collectionName = collection.collectionName
console.log(`removing \`${collectionName}\` data`)
try {
await collection.drop()
} catch (err) {
if (err.code === 26) {
// collection already deleted
console.log(`removing \`${collectionName}\` data - Already removed`)
} else {
throw err
}
}
console.log(`removing \`${collectionName}\` data - Done`)
}
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -0,0 +1,95 @@
--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
+++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
@@ -1,6 +1,9 @@
+const _ = require('lodash')
+const fs = require('fs')
const { ReadPreference, ObjectId } = require('mongodb')
const { db } = require('../../../../app/src/infrastructure/mongodb')
const Settings = require('@overleaf/settings')
+const logger = require('@overleaf/logger')
const ProjectHistoryHandler = require('../../../../app/src/Features/Project/ProjectHistoryHandler')
const HistoryManager = require('../../../../app/src/Features/History/HistoryManager')
@@ -8,6 +11,8 @@ const ProjectHistoryController = require('./ProjectHistoryController')
const ProjectEntityHandler = require('../../../../app/src/Features/Project/ProjectEntityHandler')
const ProjectEntityUpdateHandler = require('../../../../app/src/Features/Project/ProjectEntityUpdateHandler')
const DocumentUpdaterHandler = require('../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler')
+const { Doc } = require('../../../../app/src/models/Doc')
+const FileWriter = require('../../../../app/src/infrastructure/FileWriter')
// Timestamp of when 'Enable history for SL in background' release
const ID_WHEN_FULL_PROJECT_HISTORY_ENABLED =
@@ -340,9 +345,33 @@ async function anyDocHistoryIndexExists(project) {
)
}
+async function convertDeletedDocToFile(projectId, docId, userId, source, doc) {
+ // write the doc to a temporary file and upload to filestore
+ const tmpFilePath = await FileWriter.promises.writeLinesToDisk(
+ projectId,
+ doc.lines
+ )
+ await ProjectEntityUpdateHandler.promises.upsertFileWithPath(
+ projectId,
+ `/_deleted/${docId}/${doc.name}`,
+ tmpFilePath,
+ null,
+ userId,
+ source
+ )
+ // hard delete the original doc, otherwise it will get picked up again
+ // by readDeletedDocs in ProjectHistoryController and the final
+ // resync of the history will fail.
+ await db.docs.deleteOne({ _id: docId })
+ await db.docOps.deleteOne({ doc_id: docId })
+ // clean up the temporary file
+ await fs.promises.unlink(tmpFilePath)
+}
+
async function convertLargeDocsToFile(projectId, userId) {
- const docs = await ProjectEntityHandler.promises.getAllDocs(projectId)
let convertedDocCount = 0
+ const docs = await ProjectEntityHandler.promises.getAllDocs(projectId)
+ // Convert large docs to files
for (const doc of Object.values(docs)) {
const sizeBound = JSON.stringify(doc.lines)
if (docIsTooLarge(sizeBound, doc.lines, Settings.max_doc_length)) {
@@ -355,6 +384,39 @@ async function convertLargeDocsToFile(projectId, userId) {
convertedDocCount++
}
}
+ // Convert deleted docs to files, these cannot be converted by
+ // ProjectEntityUpdateHandler so we do it manually
+ const docsCursor = Doc.find({
+ project_id: ObjectId(projectId),
+ })
+ .lean()
+ .cursor()
+ for await (const doc of docsCursor) {
+ // check whether the doc is present in the filetree instead of
+ // relying on the deletedAt property
+ const docExistsInFiletree = _.find(docs, existingDoc =>
+ existingDoc._id.equals(doc._id)
+ )
+ if (docExistsInFiletree || doc.inS3) {
+ continue
+ }
+ const sizeBound = JSON.stringify(doc.lines)
+ if (docIsTooLarge(sizeBound, doc.lines, Settings.max_doc_length)) {
+ const docId = doc._id.toString()
+ if (!_.isEmpty(doc.ranges)) {
+ throw new Error(`found too large deleted doc with ranges: ${docId}`)
+ }
+ logger.warn({ projectId, docId }, 'converting large deleted doc')
+ await convertDeletedDocToFile(
+ projectId,
+ doc._id,
+ userId,
+ 'history-migration',
+ doc
+ )
+ convertedDocCount++
+ }
+ }
return convertedDocCount
}

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.11
# Patch: fix matching version when rewinding history
COPY fix-matching-version-error.patch .
RUN patch -p0 < fix-matching-version-error.patch

View File

@@ -0,0 +1,22 @@
--- services/track-changes/app/js/ZipManager.js
+++ services/track-changes/app/js/ZipManager.js
@@ -95,6 +95,19 @@ async function rewindDoc(projectId, docId, zipfile) {
continue
}
+ if (previousUpdate && update.v >= previousUpdate.v) {
+ logger.warn(
+ {
+ projectId,
+ docId,
+ previousUpdateVersion: previousUpdate.v,
+ updateVersion: update.v,
+ },
+ 'adjusting version for update with matching version'
+ )
+ update.v = previousUpdate.v - 1
+ }
+
const updatePath = `${id}/updates/${update.v}`
try {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.12
# Patch: fix soft history retry in cron job
COPY history_soft_retry.patch .
RUN patch -p0 < history_soft_retry.patch

View File

@@ -0,0 +1,8 @@
--- cron/project-history-retry-soft.sh
+++ cron/project-history-retry-soft.sh
@@ -8,4 +8,4 @@ echo "-----------------------------------"
PROJECT_HISTORY_URL='http://localhost:3054'
-curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000"
+curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000"

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.1
# Patch: improvements to history migration script
COPY migrate_history_fixes.patch .
RUN patch -p0 < migrate_history_fixes.patch

View File

@@ -0,0 +1,92 @@
--- services/track-changes/app/js/DiffGenerator.js
+++ services/track-changes/app/js/DiffGenerator.js
@@ -63,6 +63,7 @@ module.exports = DiffGenerator = {
if (p > max_p) {
logger.warn({ max_p, p }, 'truncating position to content length')
p = max_p
+ op.p = p // fix out of range offsets to avoid invalid history exports in ZipManager
}
const textToBeRemoved = content.slice(p, p + op.i.length)
@@ -74,6 +75,9 @@ module.exports = DiffGenerator = {
return content.slice(0, p) + content.slice(p + op.i.length)
} else if (op.d != null) {
+ if (op.p > content.length) {
+ op.p = content.length // fix out of range offsets to avoid invalid history exports in ZipManager
+ }
return content.slice(0, op.p) + op.d + content.slice(op.p)
} else {
return content
--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
+++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
@@ -107,6 +107,15 @@ async function upgradeProject(project, options) {
if (!upgradeFn) {
return { error: 'unsupported history type' }
}
+ if (options.forceClean) {
+ try {
+ const projectId = project._id
+ // delete any existing history stored in the mongo backend
+ await HistoryManager.promises.deleteProject(projectId, projectId)
+ } catch (err) {
+ // failed to delete existing history, but we can try to continue
+ }
+ }
const result = await upgradeFn(project, options)
result.historyType = historyType
return result
--- services/web/scripts/history/migrate_history.js
+++ services/web/scripts/history/migrate_history.js
@@ -2,6 +2,25 @@
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
+const fs = require('fs')
+
+if (fs.existsSync('/etc/container_environment.json')) {
+ try {
+ const envData = JSON.parse(
+ fs.readFileSync('/etc/container_environment.json', 'utf8')
+ )
+ for (const [key, value] of Object.entries(envData)) {
+ process.env[key] = value
+ }
+ } catch (err) {
+ console.error(
+ 'cannot read /etc/container_environment.json, the script needs to be run as root',
+ err
+ )
+ process.exit(1)
+ }
+}
+
const VERSION = '0.9.0-cli'
const {
countProjects,
@@ -11,7 +30,6 @@ const {
} = require('../../modules/history-migration/app/src/HistoryUpgradeHelper')
const { waitForDb } = require('../../app/src/infrastructure/mongodb')
const minimist = require('minimist')
-const fs = require('fs')
const util = require('util')
const pLimit = require('p-limit')
const logger = require('@overleaf/logger')
@@ -34,6 +52,7 @@ const argv = minimist(process.argv.slice(2), {
'use-query-hint',
'retry-failed',
'archive-on-failure',
+ 'force-clean',
],
string: ['output', 'user-id'],
alias: {
@@ -168,6 +187,7 @@ async function migrateProjects(projectsToMigrate) {
convertLargeDocsToFile: argv['convert-large-docs-to-file'],
userId: argv['user-id'],
reason: VERSION,
+ forceClean: argv['force-clean'],
}
async function _migrateProject(project) {
if (INTERRUPT) {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.2
# Patch: remove stats collection from history migration script
COPY remove_stats_collection.patch .
RUN patch -p0 < remove_stats_collection.patch

View File

@@ -0,0 +1,16 @@
--- services/web/scripts/history/migrate_history.js
+++ services/web/scripts/history/migrate_history.js
@@ -110,14 +110,6 @@ async function findProjectsToMigrate() {
process.exit(1)
}
- // Find the total number of history records for the projects we need to migrate
- let docHistoryCount = 0
- for await (const project of projectsToMigrate) {
- const count = await countDocHistory({ project_id: project._id })
- docHistoryCount += count
- }
-
- console.log('Total history records to migrate:', docHistoryCount)
return projectsToMigrate
}

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.3
# Patch: run primary email check in saas only
COPY primary_email_check_saas.patch .
RUN patch -p0 < primary_email_check_saas.patch

View File

@@ -0,0 +1,10 @@
--- services/web/app/src/Features/Project/ProjectController.js
+++ services/web/app/src/Features/Project/ProjectController.js
@@ -535,6 +535,7 @@ const ProjectController = {
if (
user &&
+ Features.hasFeature('saas') &&
UserPrimaryEmailCheckHandler.requiresPrimaryEmailCheck(user)
) {
return res.redirect('/user/emails/primary-email-check')

View File

@@ -0,0 +1,7 @@
FROM sharelatex/sharelatex:3.5.4
# Patch: fix shutdown sequence: flush document-updater before history services.
RUN cd /etc/my_init.pre_shutdown.d \
&& mv 02_flush_document_updater 01_flush_document_updater \
&& mv 01_flush_project_history 02_flush_project_history \
&& mv 01_flush_track_changes 02_flush_track_changes

View File

@@ -0,0 +1,8 @@
FROM sharelatex/sharelatex:3.5.5
# Patch: support trustProxyIps in Overleaf Community Edition/Server Pro
COPY trusted_proxy_ips.patch .
RUN patch -p0 --directory=/etc/sharelatex < trusted_proxy_ips.patch
# Patch: add script to cleanup legacy history data
ADD clean_sl_history_data.js /overleaf/services/web/scripts/history/clean_sl_history_data.js

View File

@@ -0,0 +1,60 @@
const { waitForDb, db } = require('../../app/src/infrastructure/mongodb')
async function main() {
await checkAllProjectsAreMigrated()
await setAllowDowngradeToFalse()
await deleteHistoryCollections()
console.log('Legacy history data cleaned up successfully')
process.exit(0)
}
async function checkAllProjectsAreMigrated() {
console.log('checking all projects are migrated to Full Project History')
const count = await db.projects.countDocuments({
'overleaf.history.display': { $ne: true },
})
if (count === 0) {
console.log('All projects are migrated to Full Project History')
} else {
console.error(
`There are ${count} projects that are not migrated to Full Project History` +
` please complete the migration before running this script again.`
)
process.exit(1)
}
}
async function setAllowDowngradeToFalse() {
console.log('unsetting `allowDowngrade` flag in all projects')
await db.projects.updateMany(
{
'overleaf.history.id': { $exists: true },
'overleaf.history.allowDowngrade': true,
},
{ $unset: { 'overleaf.history.allowDowngrade': 1 } }
)
console.log('unsetting `allowDowngrade` flag in all projects - Done')
}
async function deleteHistoryCollections() {
console.log('removing `docHistory` data')
await db.docHistory.deleteMany({})
console.log('removing `docHistory` data - Done')
console.log('removing `docHistoryIndex` data')
await db.docHistoryIndex.deleteMany({})
console.log('removing `docHistoryIndex` data - Done')
console.log('removing `projectHistoryMetaData` data')
await db.projectHistoryMetaData.deleteMany({})
console.log('removing `projectHistoryMetaData` data - Done')
}
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -0,0 +1,10 @@
--- settings.js
+++ settings.js
@@ -245,6 +245,7 @@ const settings = {
// address and http/https protocol information.
behindProxy: process.env.SHARELATEX_BEHIND_PROXY || false,
+ trustedProxyIps: process.env.SHARELATEX_TRUSTED_PROXY_IPS,
i18n: {
subdomainLang: {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.6
# Patch: clean up history id on `migrate_history.js --force-clean`
COPY force_clean_fix.patch .
RUN patch -p0 < force_clean_fix.patch

View File

@@ -0,0 +1,40 @@
--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
+++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
@@ -115,6 +115,11 @@ async function upgradeProject(project, options) {
const projectId = project._id
// delete any existing history stored in the mongo backend
await HistoryManager.promises.deleteProject(projectId, projectId)
+ // unset overleaf.history.id to prevent the migration script from failing on checks
+ await db.projects.updateOne(
+ { _id: projectId },
+ { $unset: { 'overleaf.history.id': '' } }
+ )
} catch (err) {
// failed to delete existing history, but we can try to continue
}
--- services/web/scripts/history/migrate_history.js
+++ services/web/scripts/history/migrate_history.js
@@ -147,7 +147,7 @@ async function migrateProjects(projectsToMigrate) {
}
// send log output for each migration to a file
const output = fs.createWriteStream(argv.output, { flags: 'a' })
- console.log(`Writing log output to ${argv.output}`)
+ console.log(`Writing log output to ${process.cwd()}/${argv.output}`)
const logger = new console.Console({ stdout: output })
function logJson(obj) {
logger.log(JSON.stringify(obj))
@@ -253,8 +253,12 @@ async function main() {
console.log('Projects migrated: ', projectsMigrated)
console.log('Projects failed: ', projectsFailed)
if (projectsFailed > 0) {
- console.log(`Log output written to ${argv.output}`)
- console.log('Please check the log for errors.')
+ console.log('------------------------------------------------------')
+ console.log(`Log output written to ${process.cwd()}/${argv.output}`)
+ console.log(
+ 'Please check the log for errors. Attach the content of the file when contacting support.'
+ )
+ console.log('------------------------------------------------------')
}
if (INTERRUPT) {
console.log('Migration interrupted, please run again to continue.')

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.7
# Patch: fixes anonymous edits breaking history
COPY pr_13574.patch .
RUN patch -p0 < pr_13574.patch

View File

@@ -0,0 +1,22 @@
--- services/project-history/app/js/UpdateTranslator.js
+++ services/project-history/app/js/UpdateTranslator.js
@@ -73,9 +73,18 @@ function _convertToChange(projectId, updateWithBlob) {
throw error
}
+ let v2Authors
+ if (update.meta.user_id === 'anonymous-user') {
+ // history-v1 uses null to represent an anonymous author
+ v2Authors = [null]
+ } else {
+ // user_id is missing on resync operations that update the contents of a doc
+ v2Authors = _.compact([update.meta.user_id])
+ }
+
const rawChange = {
operations,
- v2Authors: _.compact([update.meta.user_id]),
+ v2Authors,
timestamp: new Date(update.meta.ts).toISOString(),
projectVersion,
v2DocVersions: Object.keys(v2DocVersions).length ? v2DocVersions : null,

View File

@@ -0,0 +1,10 @@
FROM sharelatex/sharelatex:3.5.8
# Node update
RUN curl -sSL https://deb.nodesource.com/setup_16.x | bash - \
&& apt-get install -y nodejs
# Patch: fetch access tokens via endpoint
COPY pr_13485.patch .
RUN patch -p0 < pr_13485.patch
RUN node genScript compile | bash

View File

@@ -0,0 +1,389 @@
--- services/web/app/src/Features/Collaborators/CollaboratorsController.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsController.js
@@ -11,6 +11,7 @@ const Errors = require('../Errors/Errors')
const logger = require('@overleaf/logger')
const { expressify } = require('../../util/promises')
const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper')
+const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
module.exports = {
removeUserFromProject: expressify(removeUserFromProject),
@@ -18,6 +19,7 @@ module.exports = {
getAllMembers: expressify(getAllMembers),
setCollaboratorInfo: expressify(setCollaboratorInfo),
transferOwnership: expressify(transferOwnership),
+ getShareTokens: expressify(getShareTokens),
}
async function removeUserFromProject(req, res, next) {
@@ -114,3 +116,37 @@ async function _removeUserIdFromProject(projectId, userId) {
)
await TagsHandler.promises.removeProjectFromAllTags(userId, projectId)
}
+
+async function getShareTokens(req, res) {
+ const projectId = req.params.Project_id
+ const userId = SessionManager.getLoggedInUserId(req.session)
+
+ let tokens
+ if (userId) {
+ tokens = await CollaboratorsGetter.promises.getPublicShareTokens(
+ ObjectId(userId),
+ ObjectId(projectId)
+ )
+ } else {
+ // anonymous access, the token is already available in the session
+ const readOnly = TokenAccessHandler.getRequestToken(req, projectId)
+ tokens = { readOnly }
+ }
+ if (!tokens) {
+ return res.sendStatus(403)
+ }
+
+ if (tokens.readOnly || tokens.readAndWrite) {
+ logger.info(
+ {
+ projectId,
+ userId: userId || 'anonymous',
+ ip: req.ip,
+ tokens: Object.keys(tokens),
+ },
+ 'project tokens accessed'
+ )
+ }
+
+ res.json(tokens)
+}
--- services/web/app/src/Features/Collaborators/CollaboratorsGetter.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsGetter.js
@@ -25,6 +25,7 @@ module.exports = {
getInvitedCollaboratorCount: callbackify(getInvitedCollaboratorCount),
getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf),
isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject),
+ getPublicShareTokens: callbackify(getPublicShareTokens),
userIsTokenMember: callbackify(userIsTokenMember),
getAllInvitedMembers: callbackify(getAllInvitedMembers),
promises: {
@@ -37,6 +38,7 @@ module.exports = {
getInvitedCollaboratorCount,
getProjectsUserIsMemberOf,
isUserInvitedMemberOfProject,
+ getPublicShareTokens,
userIsTokenMember,
getAllInvitedMembers,
},
@@ -133,6 +135,40 @@ async function isUserInvitedMemberOfProject(userId, projectId) {
return false
}
+async function getPublicShareTokens(userId, projectId) {
+ const memberInfo = await Project.findOne(
+ {
+ _id: projectId,
+ },
+ {
+ isOwner: { $eq: ['$owner_ref', userId] },
+ hasTokenReadOnlyAccess: {
+ $and: [
+ { $in: [userId, '$tokenAccessReadOnly_refs'] },
+ { $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] },
+ ],
+ },
+ tokens: 1,
+ }
+ )
+ .lean()
+ .exec()
+
+ if (!memberInfo) {
+ return null
+ }
+
+ if (memberInfo.isOwner) {
+ return memberInfo.tokens
+ } else if (memberInfo.hasTokenReadOnlyAccess) {
+ return {
+ readOnly: memberInfo.tokens.readOnly,
+ }
+ } else {
+ return {}
+ }
+}
+
async function getProjectsUserIsMemberOf(userId, fields) {
const limit = pLimit(2)
const [readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly] =
--- services/web/app/src/Features/Collaborators/CollaboratorsRouter.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsRouter.js
@@ -22,6 +22,10 @@ const rateLimiters = {
points: 200,
duration: 60 * 10,
}),
+ getProjectTokens: new RateLimiter('get-project-tokens', {
+ points: 200,
+ duration: 60 * 10,
+ }),
}
module.exports = {
@@ -139,5 +143,12 @@ module.exports = {
CollaboratorsInviteController.acceptInvite,
AnalyticsRegistrationSourceMiddleware.clearSource()
)
+
+ webRouter.get(
+ '/project/:Project_id/tokens',
+ RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens),
+ AuthorizationMiddleware.ensureUserCanReadProject,
+ CollaboratorsController.getShareTokens
+ )
},
}
--- services/web/app/src/Features/Editor/EditorController.js
+++ services/web/app/src/Features/Editor/EditorController.js
@@ -581,20 +581,7 @@ const EditorController = {
{ newAccessLevel }
)
if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) {
- ProjectDetailsHandler.ensureTokensArePresent(
- projectId,
- function (err, tokens) {
- if (err) {
- return callback(err)
- }
- EditorRealTimeController.emitToRoom(
- projectId,
- 'project:tokens:changed',
- { tokens }
- )
- callback()
- }
- )
+ ProjectDetailsHandler.ensureTokensArePresent(projectId, callback)
} else {
callback()
}
--- services/web/app/src/Features/Editor/EditorHttpController.js
+++ services/web/app/src/Features/Editor/EditorHttpController.js
@@ -67,8 +67,6 @@ async function joinProject(req, res, next) {
if (!project) {
return res.sendStatus(403)
}
- // Hide access tokens if this is not the project owner
- TokenAccessHandler.protectTokens(project, privilegeLevel)
// Hide sensitive data if the user is restricted
if (isRestrictedUser) {
project.owner = { _id: project.owner._id }
--- services/web/app/src/Features/Project/ProjectController.js
+++ services/web/app/src/Features/Project/ProjectController.js
@@ -343,7 +343,7 @@ const ProjectController = {
const userId = SessionManager.getLoggedInUserId(req.session)
ProjectGetter.findAllUsersProjects(
userId,
- 'name lastUpdated publicAccesLevel archived trashed owner_ref tokens',
+ 'name lastUpdated publicAccesLevel archived trashed owner_ref',
(err, projects) => {
if (err != null) {
return next(err)
@@ -1072,7 +1072,6 @@ const ProjectController = {
// If a project is simultaneously trashed and archived, we will consider it archived but not trashed.
const trashed = ProjectHelper.isTrashed(project, userId) && !archived
- TokenAccessHandler.protectTokens(project, accessLevel)
const model = {
id: project._id,
name: project.name,
--- services/web/app/src/Features/Project/ProjectDetailsHandler.js
+++ services/web/app/src/Features/Project/ProjectDetailsHandler.js
@@ -207,14 +207,13 @@ async function ensureTokensArePresent(projectId) {
project.tokens.readOnly != null &&
project.tokens.readAndWrite != null
) {
- return project.tokens
+ return
}
await _generateTokens(project)
await Project.updateOne(
{ _id: projectId },
{ $set: { tokens: project.tokens } }
).exec()
- return project.tokens
}
async function clearTokens(projectId) {
--- services/web/app/src/Features/Project/ProjectEditorHandler.js
+++ services/web/app/src/Features/Project/ProjectEditorHandler.js
@@ -49,7 +49,6 @@ module.exports = ProjectEditorHandler = {
),
members: [],
invites,
- tokens: project.tokens,
imageName:
project.imageName != null
? Path.basename(project.imageName)
--- services/web/app/src/Features/TokenAccess/TokenAccessHandler.js
+++ services/web/app/src/Features/TokenAccess/TokenAccessHandler.js
@@ -246,22 +246,6 @@ const TokenAccessHandler = {
})
},
- protectTokens(project, privilegeLevel) {
- if (!project || !project.tokens) {
- return
- }
- if (privilegeLevel === PrivilegeLevels.OWNER) {
- return
- }
- if (privilegeLevel !== PrivilegeLevels.READ_AND_WRITE) {
- project.tokens.readAndWrite = ''
- project.tokens.readAndWritePrefix = ''
- }
- if (privilegeLevel !== PrivilegeLevels.READ_ONLY) {
- project.tokens.readOnly = ''
- }
- },
-
getV1DocPublishedInfo(token, callback) {
// default to allowing access
if (!Settings.apis.v1 || !Settings.apis.v1.url) {
@@ -304,7 +288,6 @@ TokenAccessHandler.promises = promisifyAll(TokenAccessHandler, {
'_projectFindOne',
'grantSessionTokenAccess',
'getRequestToken',
- 'protectTokens',
],
multiResult: {
validateTokenForAnonymousAccess: ['isValidReadAndWrite', 'isValidReadOnly'],
--- services/web/frontend/js/features/share-project-modal/components/link-sharing.js
+++ services/web/frontend/js/features/share-project-modal/components/link-sharing.js
@@ -1,4 +1,4 @@
-import { useCallback, useState } from 'react'
+import { useCallback, useState, useEffect } from 'react'
import PropTypes from 'prop-types'
import { Button, Col, Row } from 'react-bootstrap'
import { Trans } from 'react-i18next'
@@ -10,6 +10,8 @@ import CopyLink from '../../../shared/components/copy-link'
import { useProjectContext } from '../../../shared/context/project-context'
import * as eventTracking from '../../../infrastructure/event-tracking'
import { useUserContext } from '../../../shared/context/user-context'
+import { getJSON } from '../../../infrastructure/fetch-json'
+import useAbortController from '../../../shared/hooks/use-abort-controller'
export default function LinkSharing({ canAddCollaborators }) {
const [inflight, setInflight] = useState(false)
@@ -27,8 +29,7 @@ export default function LinkSharing({ canAddCollaborators }) {
)
.then(() => {
// NOTE: not calling `updateProject` here as it receives data via
- // project:publicAccessLevel:changed and project:tokens:changed
- // over the websocket connection
+ // project:publicAccessLevel:changed over the websocket connection
// TODO: eventTracking.sendMB('project-make-token-based') when newPublicAccessLevel is 'tokenBased'
})
.finally(() => {
@@ -106,7 +107,17 @@ PrivateSharing.propTypes = {
}
function TokenBasedSharing({ setAccessLevel, inflight, canAddCollaborators }) {
- const { tokens } = useProjectContext()
+ const { _id: projectId } = useProjectContext()
+
+ const [tokens, setTokens] = useState(null)
+
+ const { signal } = useAbortController()
+
+ useEffect(() => {
+ getJSON(`/project/${projectId}/tokens`, { signal })
+ .then(data => setTokens(data))
+ .catch(error => console.error(error))
+ }, [projectId, signal])
return (
<Row className="public-access-level">
@@ -194,7 +205,17 @@ LegacySharing.propTypes = {
}
export function ReadOnlyTokenLink() {
- const { tokens } = useProjectContext()
+ const { _id: projectId } = useProjectContext()
+
+ const [tokens, setTokens] = useState(null)
+
+ const { signal } = useAbortController()
+
+ useEffect(() => {
+ getJSON(`/project/${projectId}/tokens`, { signal })
+ .then(data => setTokens(data))
+ .catch(error => console.error(error))
+ }, [projectId, signal])
return (
<Row className="public-access-level">
--- services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js
+++ services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js
@@ -31,16 +31,6 @@ export default App.controller(
})
}
- /* tokens */
-
- ide.socket.on('project:tokens:changed', data => {
- if (data.tokens != null) {
- $scope.$applyAsync(() => {
- $scope.project.tokens = data.tokens
- })
- }
- })
-
ide.socket.on('project:membership:changed', data => {
if (data.members) {
listProjectMembers($scope.project._id)
--- services/web/frontend/js/shared/context/mock/mock-ide.js
+++ services/web/frontend/js/shared/context/mock/mock-ide.js
@@ -27,10 +27,6 @@ export const getMockIde = () => {
zotero: false,
},
publicAccessLevel: '',
- tokens: {
- readOnly: '',
- readAndWrite: '',
- },
owner: {
_id: '',
email: '',
--- services/web/frontend/js/shared/context/project-context.js
+++ services/web/frontend/js/shared/context/project-context.js
@@ -28,10 +28,6 @@ export const projectShape = {
versioning: PropTypes.bool,
}),
publicAccessLevel: PropTypes.string,
- tokens: PropTypes.shape({
- readOnly: PropTypes.string,
- readAndWrite: PropTypes.string,
- }),
owner: PropTypes.shape({
_id: PropTypes.string.isRequired,
email: PropTypes.string.isRequired,
@@ -81,7 +77,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccesLevel: publicAccessLevel,
- tokens,
owner,
} = project || projectFallback
@@ -94,7 +89,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccessLevel,
- tokens,
owner,
}
}, [
@@ -105,7 +99,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccessLevel,
- tokens,
owner,
])

View File

@@ -0,0 +1,9 @@
FROM sharelatex/sharelatex:4.0.0
# Patch: Block access to metrics endpoint
COPY pr_13229_ce_only.patch .
RUN patch -p0 --directory=/ < pr_13229_ce_only.patch
# Patch: Remove documentation link from editor outline
COPY outline_doc_icon.patch .
RUN patch -p0 < outline_doc_icon.patch

View File

@@ -0,0 +1,9 @@
--- services/web/app/views/project/editor/file-tree-react.pug
+++ services/web/app/views/project/editor/file-tree-react.pug
@@ -38,5 +38,3 @@ aside.editor-sidebar.full-size
highlighted-line="highlightedLine"
show="show"
)
-
- documentation-button
\ No newline at end of file

View File

@@ -0,0 +1,14 @@
--- etc/nginx/sites-enabled/sharelatex.conf
+++ etc/nginx/sites-enabled/sharelatex.conf
@@ -4,6 +4,11 @@ server {
root /overleaf/services/web/public/;
+ # block external access to prometheus /metrics
+ location /metrics {
+ internal;
+ }
+
location / {
proxy_pass http://127.0.0.1:3000;
proxy_http_version 1.1;

View File

@@ -0,0 +1,7 @@
# 4.0.1 was tagged as 4.0.2 in dockerhub to keep parity with Server Pro
FROM sharelatex/sharelatex:4.0.1
# Patch: fixes anonymous edits breaking history
COPY pr_13574.patch .
RUN patch -p0 < pr_13574.patch

View File

@@ -0,0 +1,22 @@
--- services/project-history/app/js/UpdateTranslator.js
+++ services/project-history/app/js/UpdateTranslator.js
@@ -73,9 +73,18 @@ function _convertToChange(projectId, updateWithBlob) {
throw error
}
+ let v2Authors
+ if (update.meta.user_id === 'anonymous-user') {
+ // history-v1 uses null to represent an anonymous author
+ v2Authors = [null]
+ } else {
+ // user_id is missing on resync operations that update the contents of a doc
+ v2Authors = _.compact([update.meta.user_id])
+ }
+
const rawChange = {
operations,
- v2Authors: _.compact([update.meta.user_id]),
+ v2Authors,
timestamp: new Date(update.meta.ts).toISOString(),
projectVersion,
v2DocVersions: Object.keys(v2DocVersions).length ? v2DocVersions : null,

View File

@@ -0,0 +1,10 @@
FROM sharelatex/sharelatex:4.0.3
# Node update
RUN curl -sSL https://deb.nodesource.com/setup_16.x | bash - \
&& apt-get install -y nodejs
# Patch: fetch access tokens via endpoint
COPY pr_13485.patch .
RUN patch -p0 < pr_13485.patch
RUN node genScript compile | bash

View File

@@ -0,0 +1,389 @@
--- services/web/app/src/Features/Collaborators/CollaboratorsController.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsController.js
@@ -11,6 +11,7 @@ const Errors = require('../Errors/Errors')
const logger = require('@overleaf/logger')
const { expressify } = require('../../util/promises')
const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper')
+const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
module.exports = {
removeUserFromProject: expressify(removeUserFromProject),
@@ -18,6 +19,7 @@ module.exports = {
getAllMembers: expressify(getAllMembers),
setCollaboratorInfo: expressify(setCollaboratorInfo),
transferOwnership: expressify(transferOwnership),
+ getShareTokens: expressify(getShareTokens),
}
async function removeUserFromProject(req, res, next) {
@@ -114,3 +116,37 @@ async function _removeUserIdFromProject(projectId, userId) {
)
await TagsHandler.promises.removeProjectFromAllTags(userId, projectId)
}
+
+async function getShareTokens(req, res) {
+ const projectId = req.params.Project_id
+ const userId = SessionManager.getLoggedInUserId(req.session)
+
+ let tokens
+ if (userId) {
+ tokens = await CollaboratorsGetter.promises.getPublicShareTokens(
+ ObjectId(userId),
+ ObjectId(projectId)
+ )
+ } else {
+ // anonymous access, the token is already available in the session
+ const readOnly = TokenAccessHandler.getRequestToken(req, projectId)
+ tokens = { readOnly }
+ }
+ if (!tokens) {
+ return res.sendStatus(403)
+ }
+
+ if (tokens.readOnly || tokens.readAndWrite) {
+ logger.info(
+ {
+ projectId,
+ userId: userId || 'anonymous',
+ ip: req.ip,
+ tokens: Object.keys(tokens),
+ },
+ 'project tokens accessed'
+ )
+ }
+
+ res.json(tokens)
+}
--- services/web/app/src/Features/Collaborators/CollaboratorsGetter.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsGetter.js
@@ -25,6 +25,7 @@ module.exports = {
getInvitedCollaboratorCount: callbackify(getInvitedCollaboratorCount),
getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf),
isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject),
+ getPublicShareTokens: callbackify(getPublicShareTokens),
userIsTokenMember: callbackify(userIsTokenMember),
getAllInvitedMembers: callbackify(getAllInvitedMembers),
promises: {
@@ -37,6 +38,7 @@ module.exports = {
getInvitedCollaboratorCount,
getProjectsUserIsMemberOf,
isUserInvitedMemberOfProject,
+ getPublicShareTokens,
userIsTokenMember,
getAllInvitedMembers,
},
@@ -133,6 +135,40 @@ async function isUserInvitedMemberOfProject(userId, projectId) {
return false
}
+async function getPublicShareTokens(userId, projectId) {
+ const memberInfo = await Project.findOne(
+ {
+ _id: projectId,
+ },
+ {
+ isOwner: { $eq: ['$owner_ref', userId] },
+ hasTokenReadOnlyAccess: {
+ $and: [
+ { $in: [userId, '$tokenAccessReadOnly_refs'] },
+ { $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] },
+ ],
+ },
+ tokens: 1,
+ }
+ )
+ .lean()
+ .exec()
+
+ if (!memberInfo) {
+ return null
+ }
+
+ if (memberInfo.isOwner) {
+ return memberInfo.tokens
+ } else if (memberInfo.hasTokenReadOnlyAccess) {
+ return {
+ readOnly: memberInfo.tokens.readOnly,
+ }
+ } else {
+ return {}
+ }
+}
+
async function getProjectsUserIsMemberOf(userId, fields) {
const limit = pLimit(2)
const [readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly] =
--- services/web/app/src/Features/Collaborators/CollaboratorsRouter.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsRouter.js
@@ -22,6 +22,10 @@ const rateLimiters = {
points: 200,
duration: 60 * 10,
}),
+ getProjectTokens: new RateLimiter('get-project-tokens', {
+ points: 200,
+ duration: 60 * 10,
+ }),
}
module.exports = {
@@ -139,5 +143,12 @@ module.exports = {
CollaboratorsInviteController.acceptInvite,
AnalyticsRegistrationSourceMiddleware.clearSource()
)
+
+ webRouter.get(
+ '/project/:Project_id/tokens',
+ RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens),
+ AuthorizationMiddleware.ensureUserCanReadProject,
+ CollaboratorsController.getShareTokens
+ )
},
}
--- services/web/app/src/Features/Editor/EditorController.js
+++ services/web/app/src/Features/Editor/EditorController.js
@@ -581,20 +581,7 @@ const EditorController = {
{ newAccessLevel }
)
if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) {
- ProjectDetailsHandler.ensureTokensArePresent(
- projectId,
- function (err, tokens) {
- if (err) {
- return callback(err)
- }
- EditorRealTimeController.emitToRoom(
- projectId,
- 'project:tokens:changed',
- { tokens }
- )
- callback()
- }
- )
+ ProjectDetailsHandler.ensureTokensArePresent(projectId, callback)
} else {
callback()
}
--- services/web/app/src/Features/Editor/EditorHttpController.js
+++ services/web/app/src/Features/Editor/EditorHttpController.js
@@ -67,8 +67,6 @@ async function joinProject(req, res, next) {
if (!project) {
return res.sendStatus(403)
}
- // Hide access tokens if this is not the project owner
- TokenAccessHandler.protectTokens(project, privilegeLevel)
// Hide sensitive data if the user is restricted
if (isRestrictedUser) {
project.owner = { _id: project.owner._id }
--- services/web/app/src/Features/Project/ProjectController.js
+++ services/web/app/src/Features/Project/ProjectController.js
@@ -343,7 +343,7 @@ const ProjectController = {
const userId = SessionManager.getLoggedInUserId(req.session)
ProjectGetter.findAllUsersProjects(
userId,
- 'name lastUpdated publicAccesLevel archived trashed owner_ref tokens',
+ 'name lastUpdated publicAccesLevel archived trashed owner_ref',
(err, projects) => {
if (err != null) {
return next(err)
@@ -1072,7 +1072,6 @@ const ProjectController = {
// If a project is simultaneously trashed and archived, we will consider it archived but not trashed.
const trashed = ProjectHelper.isTrashed(project, userId) && !archived
- TokenAccessHandler.protectTokens(project, accessLevel)
const model = {
id: project._id,
name: project.name,
--- services/web/app/src/Features/Project/ProjectDetailsHandler.js
+++ services/web/app/src/Features/Project/ProjectDetailsHandler.js
@@ -207,14 +207,13 @@ async function ensureTokensArePresent(projectId) {
project.tokens.readOnly != null &&
project.tokens.readAndWrite != null
) {
- return project.tokens
+ return
}
await _generateTokens(project)
await Project.updateOne(
{ _id: projectId },
{ $set: { tokens: project.tokens } }
).exec()
- return project.tokens
}
async function clearTokens(projectId) {
--- services/web/app/src/Features/Project/ProjectEditorHandler.js
+++ services/web/app/src/Features/Project/ProjectEditorHandler.js
@@ -49,7 +49,6 @@ module.exports = ProjectEditorHandler = {
),
members: [],
invites,
- tokens: project.tokens,
imageName:
project.imageName != null
? Path.basename(project.imageName)
--- services/web/app/src/Features/TokenAccess/TokenAccessHandler.js
+++ services/web/app/src/Features/TokenAccess/TokenAccessHandler.js
@@ -246,22 +246,6 @@ const TokenAccessHandler = {
})
},
- protectTokens(project, privilegeLevel) {
- if (!project || !project.tokens) {
- return
- }
- if (privilegeLevel === PrivilegeLevels.OWNER) {
- return
- }
- if (privilegeLevel !== PrivilegeLevels.READ_AND_WRITE) {
- project.tokens.readAndWrite = ''
- project.tokens.readAndWritePrefix = ''
- }
- if (privilegeLevel !== PrivilegeLevels.READ_ONLY) {
- project.tokens.readOnly = ''
- }
- },
-
getV1DocPublishedInfo(token, callback) {
// default to allowing access
if (!Settings.apis.v1 || !Settings.apis.v1.url) {
@@ -304,7 +288,6 @@ TokenAccessHandler.promises = promisifyAll(TokenAccessHandler, {
'_projectFindOne',
'grantSessionTokenAccess',
'getRequestToken',
- 'protectTokens',
],
multiResult: {
validateTokenForAnonymousAccess: ['isValidReadAndWrite', 'isValidReadOnly'],
--- services/web/frontend/js/features/share-project-modal/components/link-sharing.js
+++ services/web/frontend/js/features/share-project-modal/components/link-sharing.js
@@ -1,4 +1,4 @@
-import { useCallback, useState } from 'react'
+import { useCallback, useState, useEffect } from 'react'
import PropTypes from 'prop-types'
import { Button, Col, Row } from 'react-bootstrap'
import { Trans } from 'react-i18next'
@@ -10,6 +10,8 @@ import CopyLink from '../../../shared/components/copy-link'
import { useProjectContext } from '../../../shared/context/project-context'
import * as eventTracking from '../../../infrastructure/event-tracking'
import { useUserContext } from '../../../shared/context/user-context'
+import { getJSON } from '../../../infrastructure/fetch-json'
+import useAbortController from '../../../shared/hooks/use-abort-controller'
export default function LinkSharing({ canAddCollaborators }) {
const [inflight, setInflight] = useState(false)
@@ -27,8 +29,7 @@ export default function LinkSharing({ canAddCollaborators }) {
)
.then(() => {
// NOTE: not calling `updateProject` here as it receives data via
- // project:publicAccessLevel:changed and project:tokens:changed
- // over the websocket connection
+ // project:publicAccessLevel:changed over the websocket connection
// TODO: eventTracking.sendMB('project-make-token-based') when newPublicAccessLevel is 'tokenBased'
})
.finally(() => {
@@ -106,7 +107,17 @@ PrivateSharing.propTypes = {
}
function TokenBasedSharing({ setAccessLevel, inflight, canAddCollaborators }) {
- const { tokens } = useProjectContext()
+ const { _id: projectId } = useProjectContext()
+
+ const [tokens, setTokens] = useState(null)
+
+ const { signal } = useAbortController()
+
+ useEffect(() => {
+ getJSON(`/project/${projectId}/tokens`, { signal })
+ .then(data => setTokens(data))
+ .catch(error => console.error(error))
+ }, [projectId, signal])
return (
<Row className="public-access-level">
@@ -194,7 +205,17 @@ LegacySharing.propTypes = {
}
export function ReadOnlyTokenLink() {
- const { tokens } = useProjectContext()
+ const { _id: projectId } = useProjectContext()
+
+ const [tokens, setTokens] = useState(null)
+
+ const { signal } = useAbortController()
+
+ useEffect(() => {
+ getJSON(`/project/${projectId}/tokens`, { signal })
+ .then(data => setTokens(data))
+ .catch(error => console.error(error))
+ }, [projectId, signal])
return (
<Row className="public-access-level">
--- services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js
+++ services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js
@@ -31,16 +31,6 @@ export default App.controller(
})
}
- /* tokens */
-
- ide.socket.on('project:tokens:changed', data => {
- if (data.tokens != null) {
- $scope.$applyAsync(() => {
- $scope.project.tokens = data.tokens
- })
- }
- })
-
ide.socket.on('project:membership:changed', data => {
if (data.members) {
listProjectMembers($scope.project._id)
--- services/web/frontend/js/shared/context/mock/mock-ide.js
+++ services/web/frontend/js/shared/context/mock/mock-ide.js
@@ -27,10 +27,6 @@ export const getMockIde = () => {
zotero: false,
},
publicAccessLevel: '',
- tokens: {
- readOnly: '',
- readAndWrite: '',
- },
owner: {
_id: '',
email: '',
--- services/web/frontend/js/shared/context/project-context.js
+++ services/web/frontend/js/shared/context/project-context.js
@@ -28,10 +28,6 @@ export const projectShape = {
versioning: PropTypes.bool,
}),
publicAccessLevel: PropTypes.string,
- tokens: PropTypes.shape({
- readOnly: PropTypes.string,
- readAndWrite: PropTypes.string,
- }),
owner: PropTypes.shape({
_id: PropTypes.string.isRequired,
email: PropTypes.string.isRequired,
@@ -81,7 +77,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccesLevel: publicAccessLevel,
- tokens,
owner,
} = project || projectFallback
@@ -94,7 +89,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccessLevel,
- tokens,
owner,
}
}, [
@@ -105,7 +99,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccessLevel,
- tokens,
owner,
])

View File

@@ -0,0 +1,13 @@
FROM sharelatex/sharelatex:4.0.4
# Patch: clear invite and invite tokens through the websocket
COPY pr_13427.patch .
RUN patch -p0 < pr_13427.patch
# Patch: https://github.com/Automattic/mongoose/commit/f1efabf350522257364aa5c2cb36e441cf08f1a2
COPY mongoose_proto.patch .
RUN patch -p0 < mongoose_proto.patch
# Patch: Allow digits in PDF filenames
COPY pr_13122.patch .
RUN patch -p0 < pr_13122.patch

View File

@@ -0,0 +1,12 @@
--- services/web/node_modules/mongoose/lib/document.js
+++ services/web/node_modules/mongoose/lib/document.js
@@ -739,6 +739,10 @@ function init(self, obj, doc, opts, prefix) {
function _init(index) {
i = keys[index];
+ // avoid prototype pollution
+ if (i === '__proto__' || i === 'constructor') {
+ return;
+ }
path = prefix + i;
schemaType = docSchema.path(path);

View File

@@ -0,0 +1,11 @@
--- services/web/app/src/Features/Compile/CompileController.js
+++ services/web/app/src/Features/Compile/CompileController.js
@@ -371,7 +371,7 @@ module.exports = CompileController = {
},
_getSafeProjectName(project) {
- return project.name.replace(/\P{L}/gu, '_')
+ return project.name.replace(/[^\p{L}\p{Nd}]/gu, '_')
},
deleteAuxFiles(req, res, next) {

View File

@@ -0,0 +1,92 @@
--- services/web/app/src/Features/Editor/EditorHttpController.js
+++ services/web/app/src/Features/Editor/EditorHttpController.js
@@ -73,6 +73,7 @@ async function joinProject(req, res, next) {
if (isRestrictedUser) {
project.owner = { _id: project.owner._id }
project.members = []
+ project.invites = []
}
// Only show the 'renamed or deleted' message once
if (project.deletedByExternalDataSource) {
--- services/web/app/src/Features/Project/ProjectEditorHandler.js
+++ services/web/app/src/Features/Project/ProjectEditorHandler.js
@@ -48,19 +48,13 @@
deletedDocsFromDocstore
),
members: [],
- invites,
+ invites: this.buildInvitesView(invites),
imageName:
project.imageName != null
? Path.basename(project.imageName)
: undefined,
}
- if (result.invites == null) {
- result.invites = []
- }
- result.invites.forEach(invite => {
- delete invite.token
- })
;({ owner, ownerFeatures, members } =
this.buildOwnerAndMembersViews(members))
result.owner = owner
@@ -99,7 +93,7 @@
let owner = null
let ownerFeatures = null
const filteredMembers = []
- for (const member of Array.from(members || [])) {
+ for (const member of members || []) {
if (member.privilegeLevel === 'owner') {
ownerFeatures = member.user.features
owner = this.buildUserModelView(member.user, 'owner')
@@ -128,24 +122,15 @@
},
buildFolderModelView(folder) {
- let file
const fileRefs = _.filter(folder.fileRefs || [], file => file != null)
return {
_id: folder._id,
name: folder.name,
- folders: Array.from(folder.folders || []).map(childFolder =>
+ folders: (folder.folders || []).map(childFolder =>
this.buildFolderModelView(childFolder)
),
- fileRefs: (() => {
- const result = []
- for (file of Array.from(fileRefs)) {
- result.push(this.buildFileModelView(file))
- }
- return result
- })(),
- docs: Array.from(folder.docs || []).map(doc =>
- this.buildDocModelView(doc)
- ),
+ fileRefs: fileRefs.map(file => this.buildFileModelView(file)),
+ docs: (folder.docs || []).map(doc => this.buildDocModelView(doc)),
}
},
@@ -164,4 +149,21 @@
name: doc.name,
}
},
+
+ buildInvitesView(invites) {
+ if (invites == null) {
+ return []
+ }
+ return invites.map(invite =>
+ _.pick(invite, [
+ '_id',
+ 'createdAt',
+ 'email',
+ 'expires',
+ 'privileges',
+ 'projectId',
+ 'sendingUserId',
+ ])
+ )
+ },
}

View File

@@ -0,0 +1,4 @@
FROM sharelatex/sharelatex:4.0.5
# Patch: Drop the old history collections and increase mongo query timeout
ADD clean_sl_history_data.js /overleaf/services/web/scripts/history/clean_sl_history_data.js

View File

@@ -0,0 +1,70 @@
// Increase default mongo query timeout from 1min to 1h
process.env.MONGO_SOCKET_TIMEOUT = process.env.MONGO_SOCKET_TIMEOUT || '360000'
const { waitForDb, db } = require('../../app/src/infrastructure/mongodb')
async function main() {
await checkAllProjectsAreMigrated()
await setAllowDowngradeToFalse()
await deleteHistoryCollections()
console.log('Legacy history data cleaned up successfully')
process.exit(0)
}
async function checkAllProjectsAreMigrated() {
console.log('checking all projects are migrated to Full Project History')
const count = await db.projects.countDocuments({
'overleaf.history.display': { $ne: true },
})
if (count === 0) {
console.log('All projects are migrated to Full Project History')
} else {
console.error(
`There are ${count} projects that are not migrated to Full Project History` +
` please complete the migration before running this script again.`
)
process.exit(1)
}
}
async function setAllowDowngradeToFalse() {
console.log('unsetting `allowDowngrade` flag in all projects')
await db.projects.updateMany(
{
'overleaf.history.id': { $exists: true },
'overleaf.history.allowDowngrade': true,
},
{ $unset: { 'overleaf.history.allowDowngrade': 1 } }
)
console.log('unsetting `allowDowngrade` flag in all projects - Done')
}
async function deleteHistoryCollections() {
await gracefullyDropCollection(db.docHistory)
await gracefullyDropCollection(db.docHistoryIndex)
await gracefullyDropCollection(db.projectHistoryMetaData)
}
async function gracefullyDropCollection(collection) {
const collectionName = collection.collectionName
console.log(`removing \`${collectionName}\` data`)
try {
await collection.drop()
} catch (err) {
if (err.code === 26) {
// collection already deleted
console.log(`removing \`${collectionName}\` data - Already removed`)
} else {
throw err
}
}
console.log(`removing \`${collectionName}\` data - Done`)
}
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -0,0 +1 @@
FROM sharelatex/sharelatex:4.1.0

View File

@@ -0,0 +1 @@
FROM sharelatex/sharelatex:4.1.1

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:4.1.2
# Patch: fix soft history retry in cron job
COPY history_soft_retry.patch .
RUN patch -p0 < history_soft_retry.patch

View File

@@ -0,0 +1,8 @@
--- cron/project-history-retry-soft.sh
+++ cron/project-history-retry-soft.sh
@@ -8,4 +8,4 @@ echo "-----------------------------------"
PROJECT_HISTORY_URL='http://localhost:3054'
-curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000"
+curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000"

View File

@@ -0,0 +1,9 @@
FROM sharelatex/sharelatex:4.1.3
# Patch: Make history-v1 http request timeout configurable
COPY pr_15409.patch /
RUN cd / && patch -p0 < pr_15409.patch
# Patch: Add verbose logging for I/O in history-v1
COPY pr_15410.patch .
RUN patch -p0 < pr_15410.patch

View File

@@ -0,0 +1,90 @@
--- overleaf/services/history-v1/config/custom-environment-variables.json
+++ overleaf/services/history-v1/config/custom-environment-variables.json
@@ -43,5 +43,6 @@
},
"clusterWorkers": "CLUSTER_WORKERS",
"maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE",
- "httpsOnly": "HTTPS_ONLY"
+ "httpsOnly": "HTTPS_ONLY",
+ "httpRequestTimeout": "SHARELATEX_HISTORY_V1_HTTP_REQUEST_TIMEOUT"
}
--- etc/sharelatex/settings.js
+++ etc/sharelatex/settings.js
@@ -261,6 +261,10 @@ const settings = {
url: process.env.V1_HISTORY_URL || 'http://localhost:3100/api',
user: 'staging',
pass: process.env.STAGING_PASSWORD,
+ requestTimeout: parseInt(
+ process.env.SHARELATEX_HISTORY_V1_HTTP_REQUEST_TIMEOUT || '300000', // default is 5min
+ 10
+ ),
},
},
references: {},
diff --git a/services/history-v1/app.js b/services/history-v1/app.js
index 6b3a2ba8f89..2ad490fb6b6 100644
--- overleaf/services/history-v1/app.js
+++ overleaf/services/history-v1/app.js
@@ -5,6 +5,7 @@
// Metrics must be initialized before importing anything else
require('@overleaf/metrics/initialize')
+const config = require('config')
const Events = require('events')
const BPromise = require('bluebird')
const express = require('express')
@@ -47,9 +48,9 @@ app.use(cors())
security.setupSSL(app)
security.setupBasicHttpAuthForSwaggerDocs(app)
+const HTTP_REQUEST_TIMEOUT = parseInt(config.get('httpRequestTimeout'), 10)
app.use(function (req, res, next) {
- // use a 5 minute timeout on all responses
- res.setTimeout(5 * 60 * 1000)
+ res.setTimeout(HTTP_REQUEST_TIMEOUT)
next()
})
--- overleaf/services/history-v1/config/default.json
+++ overleaf/services/history-v1/config/default.json
@@ -25,5 +25,6 @@
"maxFileUploadSize": "52428800",
"databasePoolMin": "2",
"databasePoolMax": "10",
- "httpsOnly": "false"
+ "httpsOnly": "false",
+ "httpRequestTimeout": "300000"
}
--- overleaf/services/project-history/app/js/HistoryStoreManager.js
+++ overleaf/services/project-history/app/js/HistoryStoreManager.js
@@ -17,7 +17,7 @@ import * as Errors from './Errors.js'
import * as LocalFileWriter from './LocalFileWriter.js'
import * as HashManager from './HashManager.js'
-const HTTP_REQUEST_TIMEOUT = 300 * 1000 // 5 minutes
+const HTTP_REQUEST_TIMEOUT = Settings.apis.history_v1.requestTimeout
/**
* Container for functions that need to be mocked in tests
--- overleaf/services/project-history/config/settings.defaults.cjs
+++ overleaf/services/project-history/config/settings.defaults.cjs
@@ -20,6 +20,9 @@ module.exports = {
filestore: {
url: `http://${process.env.FILESTORE_HOST || 'localhost'}:3009`,
},
+ history_v1: {
+ requestTimeout: parseInt(process.env.V1_REQUEST_TIMEOUT || '300000', 10),
+ },
web: {
url: `http://${
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
--- overleaf/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js
+++ overleaf/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js
@@ -23,6 +23,7 @@ describe('HistoryStoreManager', function () {
filestore: {
url: 'http://filestore.sharelatex.production',
},
+ history_v1: { requestTimeout: 123 },
},
}
this.latestChunkRequestArgs = sinon.match({

View File

@@ -0,0 +1,153 @@
--- services/history-v1/api/controllers/projects.js
+++ services/history-v1/api/controllers/projects.js
@@ -194,18 +194,23 @@ async function getProjectBlob(req, res, next) {
const hash = req.swagger.params.hash.value
const blobStore = new BlobStore(projectId)
- let stream
+ logger.debug({ projectId, hash }, 'getProjectBlob started')
try {
- stream = await blobStore.getStream(hash)
- } catch (err) {
- if (err instanceof Blob.NotFoundError) {
- return render.notFound(res)
- } else {
- throw err
+ let stream
+ try {
+ stream = await blobStore.getStream(hash)
+ } catch (err) {
+ if (err instanceof Blob.NotFoundError) {
+ return render.notFound(res)
+ } else {
+ throw err
+ }
}
+ res.set('Content-Type', 'application/octet-stream')
+ await pipeline(stream, res)
+ } finally {
+ logger.debug({ projectId, hash }, 'getProjectBlob finished')
}
- res.set('Content-Type', 'application/octet-stream')
- await pipeline(stream, res)
}
async function getSnapshotAtVersion(projectId, version) {
--- services/history-v1/storage/lib/blob_store/index.js
+++ services/history-v1/storage/lib/blob_store/index.js
@@ -20,6 +20,7 @@ const projectKey = require('../project_key')
const streams = require('../streams')
const postgresBackend = require('./postgres')
const mongoBackend = require('./mongo')
+const logger = require('@overleaf/logger')
const GLOBAL_BLOBS = new Map()
@@ -34,9 +35,14 @@ function makeProjectKey(projectId, hash) {
async function uploadBlob(projectId, blob, stream) {
const bucket = config.get('blobStore.projectBucket')
const key = makeProjectKey(projectId, blob.getHash())
- await persistor.sendStream(bucket, key, stream, {
- contentType: 'application/octet-stream',
- })
+ logger.debug({ projectId, blob }, 'uploadBlob started')
+ try {
+ await persistor.sendStream(bucket, key, stream, {
+ contentType: 'application/octet-stream',
+ })
+ } finally {
+ logger.debug({ projectId, blob }, 'uploadBlob finished')
+ }
}
function getBlobLocation(projectId, hash) {
@@ -109,7 +115,12 @@ async function getStringLengthOfFile(byteLength, pathname) {
async function deleteBlobsInBucket(projectId) {
const bucket = config.get('blobStore.projectBucket')
const prefix = `${projectKey.format(projectId)}/`
- await persistor.deleteDirectory(bucket, prefix)
+ logger.debug({ projectId }, 'deleteBlobsInBucket started')
+ try {
+ await persistor.deleteDirectory(bucket, prefix)
+ } finally {
+ logger.debug({ projectId }, 'deleteBlobsInBucket finished')
+ }
}
async function loadGlobalBlobs() {
@@ -202,9 +213,15 @@ class BlobStore {
async getString(hash) {
assert.blobHash(hash, 'bad hash')
- const stream = await this.getStream(hash)
- const buffer = await streams.readStreamToBuffer(stream)
- return buffer.toString()
+ const projectId = this.projectId
+ logger.debug({ projectId, hash }, 'getString started')
+ try {
+ const stream = await this.getStream(hash)
+ const buffer = await streams.readStreamToBuffer(stream)
+ return buffer.toString()
+ } finally {
+ logger.debug({ projectId, hash }, 'getString finished')
+ }
}
/**
--- services/history-v1/storage/lib/history_store.js
+++ services/history-v1/storage/lib/history_store.js
@@ -8,6 +8,7 @@ const path = require('path')
const OError = require('@overleaf/o-error')
const objectPersistor = require('@overleaf/object-persistor')
+const logger = require('@overleaf/logger')
const assert = require('./assert')
const persistor = require('./persistor')
@@ -70,6 +71,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw(
const key = getKey(projectId, chunkId)
+ logger.debug({ projectId, chunkId }, 'loadRaw started')
return BPromise.resolve()
.then(() => persistor.getObjectStream(BUCKET, key))
.then(streams.gunzipStreamToBuffer)
@@ -80,6 +82,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw(
}
throw new HistoryStore.LoadError(projectId, chunkId).withCause(err)
})
+ .finally(() => logger.debug({ projectId, chunkId }, 'loadRaw finished'))
}
/**
@@ -102,6 +105,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
const key = getKey(projectId, chunkId)
const stream = streams.gzipStringToStream(JSON.stringify(rawHistory))
+ logger.debug({ projectId, chunkId }, 'storeRaw started')
return BPromise.resolve()
.then(() =>
persistor.sendStream(BUCKET, key, stream, {
@@ -112,6 +116,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
.catch(err => {
throw new HistoryStore.StoreError(projectId, chunkId).withCause(err)
})
+ .finally(() => logger.debug({ projectId, chunkId }, 'storeRaw finished'))
}
/**
@@ -121,12 +126,13 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
* @return {Promise}
*/
HistoryStore.prototype.deleteChunks = function historyDeleteChunks(chunks) {
+ logger.debug({ chunks }, 'deleteChunks started')
return BPromise.all(
chunks.map(chunk => {
const key = getKey(chunk.projectId, chunk.chunkId)
return persistor.deleteObject(BUCKET, key)
})
- )
+ ).finally(() => logger.debug({ chunks }, 'deleteChunks finished'))
}
module.exports = new HistoryStore()

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:4.1.4
# Patch: Change streaming compression in history-v1
COPY pr_15445.patch .
RUN patch -p0 < pr_15445.patch

View File

@@ -0,0 +1,44 @@
--- services/history-v1/storage/lib/history_store.js
+++ services/history-v1/storage/lib/history_store.js
@@ -103,11 +103,11 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw(
assert.object(rawHistory, 'bad rawHistory')
const key = getKey(projectId, chunkId)
- const stream = streams.gzipStringToStream(JSON.stringify(rawHistory))
logger.debug({ projectId, chunkId }, 'storeRaw started')
return BPromise.resolve()
- .then(() =>
+ .then(() => streams.gzipStringToStream(JSON.stringify(rawHistory)))
+ .then(stream =>
persistor.sendStream(BUCKET, key, stream, {
contentType: 'application/json',
contentEncoding: 'gzip',
--- services/history-v1/storage/lib/streams.js
+++ services/history-v1/storage/lib/streams.js
@@ -79,8 +79,15 @@ function gunzipStreamToBuffer(readStream) {
exports.gunzipStreamToBuffer = gunzipStreamToBuffer
function gzipStringToStream(string) {
- const gzip = zlib.createGzip()
- return new ReadableString(string).pipe(gzip)
+ return new BPromise(function (resolve, reject) {
+ zlib.gzip(Buffer.from(string), function (error, result) {
+ if (error) {
+ reject(error)
+ } else {
+ resolve(new ReadableString(result))
+ }
+ })
+ })
}
/**
@@ -88,6 +95,6 @@ function gzipStringToStream(string) {
*
* @function
* @param {string} string
- * @return {stream.Writable}
+ * @return {Promise.<stream.Readable>}
*/
exports.gzipStringToStream = gzipStringToStream

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:4.1.5
# Adds missing dependency patches
ADD patches /overleaf/patches
RUN npm run postinstall

View File

@@ -0,0 +1,30 @@
diff --git a/node_modules/@google-cloud/storage/node_modules/retry-request/index.js b/node_modules/@google-cloud/storage/node_modules/retry-request/index.js
index a293298..df21af6 100644
--- a/node_modules/@google-cloud/storage/node_modules/retry-request/index.js
+++ b/node_modules/@google-cloud/storage/node_modules/retry-request/index.js
@@ -1,6 +1,6 @@
'use strict';
-const {PassThrough} = require('stream');
+const { PassThrough, pipeline } = require('stream');
const debug = require('debug')('retry-request');
const extend = require('extend');
@@ -166,7 +166,7 @@ function retryRequest(requestOpts, opts, callback) {
})
.on('complete', retryStream.emit.bind(retryStream, 'complete'));
- requestStream.pipe(delayStream);
+ pipeline(requestStream, delayStream, () => {});
} else {
activeRequest = opts.request(requestOpts, onResponse);
}
@@ -232,7 +232,7 @@ function retryRequest(requestOpts, opts, callback) {
// No more attempts need to be made, just continue on.
if (streamMode) {
retryStream.emit('response', response);
- delayStream.pipe(retryStream);
+ pipeline(delayStream, retryStream, () => {});
requestStream.on('error', err => {
retryStream.destroy(err);
});

View File

@@ -0,0 +1,50 @@
diff --git a/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js b/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js
index a2251ca..e29e796 100644
--- a/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js
+++ b/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js
@@ -166,27 +166,27 @@ function teenyRequest(reqOpts, callback) {
}
if (callback === undefined) {
// Stream mode
- const requestStream = streamEvents(new stream_1.PassThrough());
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- let responseStream;
- requestStream.once('reading', () => {
- if (responseStream) {
- responseStream.pipe(requestStream);
- }
- else {
- requestStream.once('response', () => {
- responseStream.pipe(requestStream);
- });
- }
- });
+ const requestStream = new stream_1.PassThrough();
+ // // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ // let responseStream;
+ // requestStream.once('reading', () => {
+ // if (responseStream) {
+ // responseStream.pipe(requestStream);
+ // }
+ // else {
+ // requestStream.once('response', () => {
+ // responseStream.pipe(requestStream);
+ // });
+ // }
+ // });
options.compress = false;
teenyRequest.stats.requestStarting();
(0, node_fetch_1.default)(uri, options).then(res => {
- teenyRequest.stats.requestFinished();
- responseStream = res.body;
- responseStream.on('error', (err) => {
- requestStream.emit('error', err);
- });
+ teenyRequest.stats.requestFinished(); stream_1.pipeline(res.body, requestStream, () => {});
+ // responseStream = res.body;
+ // responseStream.on('error', (err) => {
+ // requestStream.emit('error', err);
+ // });
const response = fetchToRequestResponse(options, res);
requestStream.emit('response', response);
}, err => {

View File

@@ -0,0 +1 @@
The patches in this folder are applied by `patch-package` to dependencies, particularly those which need changes that are difficult to apply upstream.

View File

@@ -0,0 +1,44 @@
diff --git a/node_modules/body-parser/lib/read.js b/node_modules/body-parser/lib/read.js
index fce6283..6131c31 100644
--- a/node_modules/body-parser/lib/read.js
+++ b/node_modules/body-parser/lib/read.js
@@ -18,7 +18,7 @@ var iconv = require('iconv-lite')
var onFinished = require('on-finished')
var unpipe = require('unpipe')
var zlib = require('zlib')
-
+var Stream = require('stream')
/**
* Module exports.
*/
@@ -166,25 +166,25 @@ function contentstream (req, debug, inflate) {
case 'deflate':
stream = zlib.createInflate()
debug('inflate body')
- req.pipe(stream)
+ // req.pipe(stream)
break
case 'gzip':
stream = zlib.createGunzip()
debug('gunzip body')
- req.pipe(stream)
+ // req.pipe(stream)
break
case 'identity':
stream = req
stream.length = length
- break
+ return req
default:
throw createError(415, 'unsupported content encoding "' + encoding + '"', {
encoding: encoding,
type: 'encoding.unsupported'
})
}
-
- return stream
+ var pass = new Stream.PassThrough(); Stream.pipeline(req, stream, pass, () => {})
+ return pass
}
/**

View File

@@ -0,0 +1,13 @@
diff --git a/node_modules/express/node_modules/finalhandler/index.js b/node_modules/express/node_modules/finalhandler/index.js
index f628e42..72f17d6 100644
--- a/node_modules/express/node_modules/finalhandler/index.js
+++ b/node_modules/express/node_modules/finalhandler/index.js
@@ -125,7 +125,7 @@ function finalhandler (req, res, options) {
// cannot actually respond
if (headersSent(res)) {
debug('cannot %d after headers sent', status)
- req.socket.destroy()
+ if (req.socket) req.socket.destroy()
return
}

View File

@@ -0,0 +1,57 @@
diff --git a/node_modules/express/node_modules/send/index.js b/node_modules/express/node_modules/send/index.js
index 89afd7e..de56daf 100644
--- a/node_modules/express/node_modules/send/index.js
+++ b/node_modules/express/node_modules/send/index.js
@@ -789,29 +789,29 @@ SendStream.prototype.stream = function stream (path, options) {
// pipe
var stream = fs.createReadStream(path, options)
this.emit('stream', stream)
- stream.pipe(res)
-
- // cleanup
- function cleanup () {
- destroy(stream, true)
- }
-
- // response finished, cleanup
- onFinished(res, cleanup)
-
- // error handling
- stream.on('error', function onerror (err) {
- // clean up stream early
- cleanup()
-
- // error
- self.onStatError(err)
- })
-
- // end
- stream.on('end', function onend () {
- self.emit('end')
- })
+ Stream.pipeline(stream, res, err => { if (err) { self.onStatError(err) } else { self.emit('end') } })
+
+ // // cleanup
+ // function cleanup () {
+ // destroy(stream, true)
+ // }
+ //
+ // // response finished, cleanup
+ // onFinished(res, cleanup)
+ //
+ // // error handling
+ // stream.on('error', function onerror (err) {
+ // // clean up stream early
+ // cleanup()
+ //
+ // // error
+ // self.onStatError(err)
+ // })
+ //
+ // // end
+ // stream.on('end', function onend () {
+ // self.emit('end')
+ // })
}
/**

View File

@@ -0,0 +1,13 @@
diff --git a/node_modules/finalhandler/index.js b/node_modules/finalhandler/index.js
index 5673507..40f4684 100644
--- a/node_modules/finalhandler/index.js
+++ b/node_modules/finalhandler/index.js
@@ -125,7 +125,7 @@ function finalhandler (req, res, options) {
// cannot actually respond
if (headersSent(res)) {
debug('cannot %d after headers sent', status)
- req.socket.destroy()
+ if (req.socket) req.socket.destroy()
return
}

View File

@@ -0,0 +1,13 @@
diff --git a/node_modules/forwarded/index.js b/node_modules/forwarded/index.js
index b2b6bdd..75e6254 100644
--- a/node_modules/forwarded/index.js
+++ b/node_modules/forwarded/index.js
@@ -46,7 +46,7 @@ function forwarded (req) {
function getSocketAddr (req) {
return req.socket
? req.socket.remoteAddress
- : req.connection.remoteAddress
+ : req.connection && req.connection.remoteAddress
}
/**

View File

@@ -0,0 +1,9 @@
diff --git a/node_modules/ngcomponent/index.ts b/node_modules/ngcomponent/index.ts
index 5fe33c5..8e1c6fc 100644
--- a/node_modules/ngcomponent/index.ts
+++ b/node_modules/ngcomponent/index.ts
@@ -1,3 +1,4 @@
+// @ts-nocheck
import { IChangesObject } from 'angular'
import assign = require('lodash/assign')
import mapValues = require('lodash/mapValues')

View File

@@ -0,0 +1,76 @@
diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js
index e5b04f1..8c80924 100644
--- a/node_modules/node-fetch/lib/index.js
+++ b/node_modules/node-fetch/lib/index.js
@@ -545,8 +545,8 @@ function clone(instance) {
// tee instance body
p1 = new PassThrough();
p2 = new PassThrough();
- body.pipe(p1);
- body.pipe(p2);
+ Stream.pipeline(body, p1, () => {});
+ Stream.pipeline(body, p2, () => {});
// set instance body to teed body and return the other teed body
instance[INTERNALS].body = p1;
body = p2;
@@ -648,14 +648,14 @@ function writeToStream(dest, instance) {
// body is null
dest.end();
} else if (isBlob(body)) {
- body.stream().pipe(dest);
+ Stream.pipeline(body.stream(), dest, () => {});
} else if (Buffer.isBuffer(body)) {
// body is buffer
dest.write(body);
dest.end();
} else {
// body is stream
- body.pipe(dest);
+ Stream.pipeline(body, dest, () => {});
}
}
@@ -1594,7 +1594,7 @@ function fetch(url, opts) {
res.once('end', function () {
if (signal) signal.removeEventListener('abort', abortAndFinalize);
});
- let body = res.pipe(new PassThrough$1());
+ let body = new PassThrough$1(); setTimeout(() => Stream.pipeline(res, body, (err) => { if (err) req.abort() }), 0); // Note: let the call-site attach event handler to "body" before we start streaming.
const response_options = {
url: request.url,
@@ -1635,7 +1635,7 @@ function fetch(url, opts) {
// for gzip
if (codings == 'gzip' || codings == 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions));
+ const bodyGzip = zlib.createGunzip(zlibOptions); Stream.pipeline(body, bodyGzip, () => {}); body = bodyGzip;
response = new Response(body, response_options);
resolve(response);
return;
@@ -1645,13 +1645,13 @@ function fetch(url, opts) {
if (codings == 'deflate' || codings == 'x-deflate') {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough$1());
+ const raw = new PassThrough$1(); setTimeout(() => Stream.pipeline(res, raw, () => {}), 0); // Note: delay piping into "raw" until we start piping into "body".
raw.once('data', function (chunk) {
// see http://stackoverflow.com/questions/37519828
if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate());
+ const bodyDeflate = zlib.createInflate(); Stream.pipeline(body, bodyDeflate, () => {}); body = bodyDeflate;
} else {
- body = body.pipe(zlib.createInflateRaw());
+ const bodyDeflate = zlib.createInflateRaw(); Stream.pipeline(body, bodyDeflate, () => {}); body = bodyDeflate;
}
response = new Response(body, response_options);
resolve(response);
@@ -1661,7 +1661,7 @@ function fetch(url, opts) {
// for br
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
- body = body.pipe(zlib.createBrotliDecompress());
+ const bodyBrotli = zlib.createBrotliDecompress(); Stream.pipeline(body, bodyBrotli, () => {}); body = bodyBrotli;
response = new Response(body, response_options);
resolve(response);
return;

Some files were not shown because too many files have changed in this diff Show More