first commit

This commit is contained in:
2025-04-24 13:11:28 +08:00
commit ff9c54d5e4
5960 changed files with 834111 additions and 0 deletions

9
server-ce/.editorconfig Normal file
View File

@@ -0,0 +1,9 @@
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true

25
server-ce/.eslintrc Normal file
View File

@@ -0,0 +1,25 @@
{
"extends": [
"eslint:recommended",
"standard",
"prettier"
],
"plugins": [
"unicorn"
],
"parserOptions": {
"ecmaVersion": 2020
},
"env": {
"node": true
},
"rules": {
// Do not allow importing of implicit dependencies.
"import/no-extraneous-dependencies": "error",
"unicorn/prefer-node-protocol": "error"
},
"overrides": [
// Extra rules for Cypress tests
{ "files": ["**/*.spec.ts"], "extends": ["plugin:cypress/recommended"] }
]
}

143
server-ce/Dockerfile Normal file
View File

@@ -0,0 +1,143 @@
# ---------------------------------------------
# Overleaf Community Edition (overleaf/overleaf)
# ---------------------------------------------
ARG OVERLEAF_BASE_TAG=sharelatex/sharelatex-base:arm64
FROM $OVERLEAF_BASE_TAG
WORKDIR /overleaf
# Add required source files
# -------------------------
ADD server-ce/genScript.js /overleaf/genScript.js
ADD server-ce/services.js /overleaf/services.js
ADD package.json package-lock.json /overleaf/
ADD libraries/ /overleaf/libraries/
ADD services/ /overleaf/services/
# Add npm patches
# -----------------------
ADD patches/ /overleaf/patches
# Install npm dependencies and build webpack assets
# ------------------------
RUN --mount=type=cache,target=/root/.cache \
--mount=type=cache,target=/root/.npm \
--mount=type=cache,target=/overleaf/services/web/node_modules/.cache,id=server-ce-webpack-cache \
--mount=type=tmpfs,target=/tmp true \
&& node genScript install | bash \
&& node genScript compile | bash
# Copy runit service startup scripts to its location
# --------------------------------------------------
ADD server-ce/runit /etc/service
# Copy runit global settings to its location
# ------------------------------------------
ADD server-ce/config/env.sh /etc/overleaf/env.sh
# Configure nginx
# ---------------
ADD server-ce/nginx/nginx.conf.template /etc/nginx/templates/nginx.conf.template
ADD server-ce/nginx/overleaf.conf /etc/nginx/sites-enabled/overleaf.conf
ADD server-ce/nginx/clsi-nginx.conf /etc/nginx/sites-enabled/clsi-nginx.conf
# Configure log rotation
# ----------------------
ADD server-ce/logrotate/overleaf /etc/logrotate.d/overleaf
RUN chmod 644 /etc/logrotate.d/overleaf
# Configure cron tasks
# ----------------------
ADD server-ce/cron /overleaf/cron
ADD server-ce/config/crontab-history /etc/cron.d/crontab-history
RUN chmod 600 /etc/cron.d/crontab-history
ADD server-ce/config/crontab-deletion /etc/cron.d/crontab-deletion
RUN chmod 600 /etc/cron.d/crontab-deletion
# Copy Phusion Image startup and shutdown scripts to their locations
# ------------------------------------------------------------------
COPY server-ce/init_scripts/ /etc/my_init.d/
COPY server-ce/init_preshutdown_scripts/ /etc/my_init.pre_shutdown.d/
# Copy app settings files
# -----------------------
COPY server-ce/config/settings.js /etc/overleaf/settings.js
# Copy history-v1 files
# -----------------------
COPY server-ce/config/production.json /overleaf/services/history-v1/config/production.json
COPY server-ce/config/custom-environment-variables.json /overleaf/services/history-v1/config/custom-environment-variables.json
# Copy grunt thin wrapper
# -----------------------
ADD server-ce/bin/grunt /usr/local/bin/grunt
RUN chmod +x /usr/local/bin/grunt
# Fix error with envsubst
# -----------------------
RUN rm -rf /etc/apt/sources.list.d/ubuntu.sources
COPY server-ce/ubuntu.sources /etc/apt/sources.list.d/ubuntu.sources
RUN apt-get update \
&& apt-get install -y \
gettext-base
# Install full texlive
RUN apt-get install -y texlive-full
# Install pygments for minted
RUN apt-get install -y python3-pygments
# Node doesn't run with this file
# -----------------------
RUN find / -name 'crc32c.node' -type f -delete
# Copy history helper scripts
# ---------------------------
ADD server-ce/bin/flush-history-queues /overleaf/bin/flush-history-queues
RUN chmod +x /overleaf/bin/flush-history-queues
ADD server-ce/bin/force-history-resyncs /overleaf/bin/force-history-resyncs
RUN chmod +x /overleaf/bin/force-history-resyncs
# Copy Latexmkrc
# -----------------------
COPY server-ce/config/latexmkrc /usr/local/share/latexmk/LatexMk
# File that controls open|closed status of the site
# -------------------------------------------------
ENV SITE_MAINTENANCE_FILE="/etc/overleaf/site_status"
RUN touch $SITE_MAINTENANCE_FILE
# Set Environment Variables
# --------------------------------
ENV OVERLEAF_CONFIG=/etc/overleaf/settings.js
ENV WEB_API_USER="overleaf"
ENV ADMIN_PRIVILEGE_AVAILABLE="true"
ENV OVERLEAF_APP_NAME="Overleaf Community Edition"
ENV OPTIMISE_PDF="true"
# Phusion Image timeouts before sending SIGKILL to processes
# ----------------------------------------------------------
ENV KILL_PROCESS_TIMEOUT=55
ENV KILL_ALL_PROCESSES_TIMEOUT=55
ENV GRACEFUL_SHUTDOWN_DELAY_SECONDS=1
ENV NODE_ENV="production"
ENV LOG_LEVEL="info"
EXPOSE 80
ENTRYPOINT ["/sbin/my_init"]
# Store the revision
# ------------------
# This should be the last step to optimize docker image caching.
ARG MONOREPO_REVISION
RUN echo "monorepo-server-ce,$MONOREPO_REVISION" > /var/www/revisions.txt

91
server-ce/Dockerfile-base Normal file
View File

@@ -0,0 +1,91 @@
# --------------------------------------------------
# Overleaf Base Image (sharelatex/sharelatex-base)
# --------------------------------------------------
FROM phusion-baseimage:offline
# Makes sure LuaTex cache is writable
# -----------------------------------
ENV TEXMFVAR=/var/lib/overleaf/tmp/texmf-var
# Update to ensure dependencies are updated
# ------------------------------------------
ENV REBUILT_AFTER="2025-03-27"
# Install dependencies
# --------------------
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
# Technically, we are using potentially stale package-lists with the below line.
# Practically, apt refreshes the lists as needed and release builds run in fresh CI VMs without the cache.
--mount=type=cache,target=/var/lib/apt/lists,sharing=locked true \
# Enable caching: https://docs.docker.com/reference/dockerfile/#example-cache-apt-packages
&& rm -f /etc/apt/apt.conf.d/docker-clean && echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache \
&& apt-get update \
&& apt-get install -y \
unattended-upgrades \
build-essential wget net-tools unzip time imagemagick optipng strace nginx git python3 python-is-python3 zlib1g-dev libpcre3-dev gettext-base libwww-perl ca-certificates curl gnupg \
qpdf texlive-base latexmk texlive-extra-utils \
# upgrade base-image, batch all the upgrades together, rather than installing them on-by-one (which is slow!)
&& unattended-upgrade --verbose --no-minimal-upgrade-steps \
# install Node.js https://github.com/nodesource/distributions#nodejs
&& mkdir -p /etc/apt/keyrings \
&& curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg \
&& echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list \
&& apt-get update \
&& apt-get install -y nodejs \
\
&& rm -rf \
# We are adding a custom nginx config in the main Dockerfile.
/etc/nginx/nginx.conf \
/etc/nginx/sites-enabled/default
# Install TexLive
# ---------------
# CTAN mirrors occasionally fail, in that case install TexLive using a
# different server, for example https://ctan.crest.fr
#
# # docker build \
# --build-arg TEXLIVE_MIRROR=https://ctan.crest.fr/tex-archive/systems/texlive/tlnet \
# -f Dockerfile-base -t sharelatex/sharelatex-base .
# ARG TEXLIVE_MIRROR=https://mirror.ox.ac.uk/sites/ctan.org/systems/texlive/tlnet
# RUN mkdir /install-tl-unx \
# && wget --quiet https://tug.org/texlive/files/texlive.asc \
# && gpg --import texlive.asc \
# && rm texlive.asc \
# && wget --quiet ${TEXLIVE_MIRROR}/install-tl-unx.tar.gz \
# && wget --quiet ${TEXLIVE_MIRROR}/install-tl-unx.tar.gz.sha512 \
# && wget --quiet ${TEXLIVE_MIRROR}/install-tl-unx.tar.gz.sha512.asc \
# && gpg --verify install-tl-unx.tar.gz.sha512.asc \
# && sha512sum -c install-tl-unx.tar.gz.sha512 \
# && tar -xz -C /install-tl-unx --strip-components=1 -f install-tl-unx.tar.gz \
# && rm install-tl-unx.tar.gz* \
# && echo "tlpdbopt_autobackup 0" >> /install-tl-unx/texlive.profile \
# && echo "tlpdbopt_install_docfiles 0" >> /install-tl-unx/texlive.profile \
# && echo "tlpdbopt_install_srcfiles 0" >> /install-tl-unx/texlive.profile \
# && echo "selected_scheme scheme-basic" >> /install-tl-unx/texlive.profile \
# \
# && /install-tl-unx/install-tl \
# -profile /install-tl-unx/texlive.profile \
# -repository ${TEXLIVE_MIRROR} \
# \
# && $(find /usr/local/texlive -name tlmgr) path add \
# && tlmgr install --repository ${TEXLIVE_MIRROR} \
# latexmk \
# texcount \
# synctex \
# etoolbox \
# xetex \
# && tlmgr path add \
# && rm -rf /install-tl-unx
# Set up overleaf user and home directory
# -----------------------------------------
RUN adduser --system --group --home /overleaf --no-create-home overleaf && \
mkdir -p /var/lib/overleaf && \
chown www-data:www-data /var/lib/overleaf && \
mkdir -p /var/log/overleaf && \
chown www-data:www-data /var/log/overleaf && \
mkdir -p /var/lib/overleaf/data/template_files && \
chown www-data:www-data /var/lib/overleaf/data/template_files

60
server-ce/Makefile Normal file
View File

@@ -0,0 +1,60 @@
# Makefile
MONOREPO_ROOT := ../
HERE=$(shell pwd)
export MONOREPO_REVISION := $(shell git rev-parse HEAD)
export BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
export OVERLEAF_BASE_BRANCH ?= sharelatex/sharelatex-base:$(BRANCH_NAME)
export OVERLEAF_BASE_LATEST ?= sharelatex/sharelatex-base
export OVERLEAF_BASE_TAG ?= sharelatex/sharelatex-base:arm64
export OVERLEAF_BRANCH ?= sharelatex/sharelatex:$(BRANCH_NAME)
export OVERLEAF_LATEST ?= sharelatex/sharelatex
export OVERLEAF_TAG ?= sharelatex/sharelatex:arm64
all: build-base build-community
build-base:
cp .dockerignore $(MONOREPO_ROOT)
docker build \
--build-arg BUILDKIT_INLINE_CACHE=1 \
--progress=plain \
--file Dockerfile-base \
--tag $(OVERLEAF_BASE_TAG) \
--tag $(OVERLEAF_BASE_BRANCH) \
$(MONOREPO_ROOT)
build-community:
cp .dockerignore $(MONOREPO_ROOT)
docker build \
--build-arg BUILDKIT_INLINE_CACHE=1 \
--progress=plain \
--build-arg OVERLEAF_BASE_TAG \
--build-arg MONOREPO_REVISION \
--file Dockerfile \
--tag $(OVERLEAF_TAG) \
--tag $(OVERLEAF_BRANCH) \
$(MONOREPO_ROOT)
SHELLCHECK_OPTS = \
--shell=bash \
--external-sources \
--exclude=SC1091
SHELLCHECK_COLOR := $(if $(CI),--color=never,--color)
SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu
shellcheck:
@$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \
koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR)
shellcheck_fix:
@$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \
diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \
if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \
elif [ -n "$$diff" ]; then echo "$$file"; \
else echo "\033[2m$$file\033[0m"; fi \
done
.PHONY: all \
build-base build-community \
shellcheck shellcheck_fix

View File

@@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
source /etc/container_environment.sh
source /etc/overleaf/env.sh
cd /overleaf/services/project-history
node scripts/flush_all.js 100000

View File

@@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
source /etc/container_environment.sh
source /etc/overleaf/env.sh
cd /overleaf/services/project-history
node scripts/force_resync.js 1000 force

36
server-ce/bin/grunt Executable file
View File

@@ -0,0 +1,36 @@
#!/bin/bash
# Thin wrapper on old grunt tasks to ease migrating.
set -e
set -x
TASK="$1"
shift 1
cd /overleaf/services/web
case "$TASK" in
user:create-admin)
echo "The grunt command is deprecated, run the create-user script using node instead"
node modules/server-ce-scripts/scripts/create-user.mjs --admin "$@"
;;
user:delete)
echo "The grunt command is deprecated, run the delete-user script using node instead"
node modules/server-ce-scripts/scripts/delete-user.mjs "$@"
;;
check:mongo)
echo "The grunt command is deprecated, run the check-mongodb script using node instead"
node modules/server-ce-scripts/scripts/check-mongodb.mjs
;;
check:redis)
echo "The grunt command is deprecated, run the check-redis script using node instead"
node modules/server-ce-scripts/scripts/check-redis.mjs
;;
*)
echo "Unknown task $TASK"
exit 1
;;
esac

View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -euo pipefail
FILE=${1:-docker-compose.yml}
if [[ ! -f "$FILE" ]]; then
echo "Expected to find $FILE, are you in the wrong directory?"
exit 2
fi
BACKUP_FILE="$FILE.$(date '+%Y.%m.%d-%H.%M.%S')"
echo "Creating backup file $BACKUP_FILE"
cp "$FILE" "$BACKUP_FILE"
echo "Replacing 'SHARELATEX_' with 'OVERLEAF_' in $FILE"
sed -i "s/SHARELATEX_/OVERLEAF_/g" "$FILE"
echo "Done."

1
server-ce/bin/shared Symbolic link
View File

@@ -0,0 +1 @@
../../bin/shared/

View File

@@ -0,0 +1,77 @@
---
steps:
- id: build_base
name: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/cloud-builder
dir: server-ce
args:
- 'build-base'
- id: tag_main_latest_base
name: gcr.io/cloud-builders/docker
script: |
#!/usr/bin/env bash
set -e
if [ ${BRANCH_NAME} == main ]; then
docker tag ${_IMAGE_BASE} ${_IMAGE_BASE_LATEST};
docker push ${_IMAGE_BASE_LATEST};
fi
automapSubstitutions: true
- id: prefetch_ce
name: gcr.io/cloud-builders/docker
script: |
#!/usr/bin/env bash
set +e # Ignore pull failures
docker pull ${_IMAGE_TAG_BRANCH}
docker pull ${_IMAGE_TAG_LATEST}
exit 0
automapSubstitutions: true
waitFor:
- '-'
- id: build_community
name: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/cloud-builder
dir: server-ce
args:
- build-community
waitFor:
# do not wait for prefetch_ce, docker buildx will pull it as needed
- build_base
- id: tag_main_latest
name: gcr.io/cloud-builders/docker
script: |
#!/usr/bin/env bash
set -e
if [ $BRANCH_NAME == main ]; then
docker tag ${_IMAGE_TAG} ${_IMAGE_TAG_LATEST};
docker push ${_IMAGE_TAG_LATEST};
fi
automapSubstitutions: true
waitFor:
- build_community
timeout: 3600s
options:
machineType: E2_HIGHCPU_32
env:
- 'BRANCH_NAME=${BRANCH_NAME}'
# docker build
- 'OVERLEAF_BASE_BRANCH=${_IMAGE_BASE_BRANCH}'
- 'OVERLEAF_BASE_LATEST=${_IMAGE_BASE_LATEST}'
- 'OVERLEAF_BASE_TAG=${_IMAGE_BASE}'
- 'OVERLEAF_BRANCH=${_IMAGE_TAG_BRANCH}'
- 'OVERLEAF_LATEST=${_IMAGE_TAG_LATEST}'
- 'OVERLEAF_TAG=${_IMAGE_TAG}'
images:
- '${_IMAGE_BASE}'
- '${_IMAGE_BASE_BRANCH}'
- '${_IMAGE_TAG}'
- '${_IMAGE_TAG_BRANCH}'
substitutions:
_IMAGE_BASE: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}'
_IMAGE_BASE_BRANCH: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base:${BRANCH_NAME}'
_IMAGE_BASE_LATEST: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base:latest'
_IMAGE_TAG_BRANCH: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf:${BRANCH_NAME}'
_IMAGE_TAG_LATEST: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf:latest'
_IMAGE_TAG: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}'
tags:
- 'overleaf-public'
- '${BRANCH_NAME}'
- '${SHORT_SHA}'

View File

@@ -0,0 +1,3 @@
5 * * * * root /overleaf/cron/deactivate-projects.sh >> /var/log/overleaf/cron-deactivate-projects.log 2>&1
15 * * * * root /overleaf/cron/delete-users.sh >> /var/log/overleaf/cron-delete-users.log 2>&1
20 * * * * root /overleaf/cron/delete-projects.sh >> /var/log/overleaf/cron-delete-projects.log 2>&1

View File

@@ -0,0 +1,3 @@
*/20 * * * * root /overleaf/cron/project-history-periodic-flush.sh >> /var/log/overleaf/cron-project-history-periodic-flush.log 2>&1
30 * * * * root /overleaf/cron/project-history-retry-soft.sh >> /var/log/overleaf/project-history-retry-soft.log 2>&1
45 * * * * root /overleaf/cron/project-history-retry-hard.sh >> /var/log/overleaf/project-history-retry-hard.log 2>&1

View File

@@ -0,0 +1,61 @@
{
"databaseUrl": "HISTORY_CONNECTION_STRING",
"databaseUrlReadOnly": "HISTORY_FOLLOWER_CONNECTION_STRING",
"herokuDatabaseUrl": "DATABASE_URL",
"databasePoolMin": "DATABASE_POOL_MIN",
"databasePoolMax": "DATABASE_POOL_MAX",
"persistor": {
"backend": "OVERLEAF_HISTORY_BACKEND",
"s3": {
"key": "OVERLEAF_HISTORY_S3_ACCESS_KEY_ID",
"secret": "OVERLEAF_HISTORY_S3_SECRET_ACCESS_KEY",
"maxRetries": "OVERLEAF_HISTORY_S3_MAX_RETRIES",
"endpoint": "OVERLEAF_HISTORY_S3_ENDPOINT",
"pathStyle": "OVERLEAF_HISTORY_S3_PATH_STYLE",
"region": "OVERLEAF_HISTORY_S3_REGION",
"httpOptions": {
"timeout": "OVERLEAF_HISTORY_S3_TIMEOUT"
}
}
},
"blobStore": {
"globalBucket": "OVERLEAF_HISTORY_BLOBS_BUCKET",
"projectBucket": "OVERLEAF_HISTORY_PROJECT_BLOBS_BUCKET"
},
"chunkStore": {
"historyStoreConcurrency": "HISTORY_STORE_CONCURRENCY",
"bucket": "OVERLEAF_HISTORY_CHUNKS_BUCKET"
},
"zipStore": {
"bucket": "OVERLEAF_HISTORY_ZIPS_BUCKET",
"zipTimeoutMs": "ZIP_STORE_ZIP_TIMEOUT_MS"
},
"mongo": {
"uri": "OVERLEAF_MONGO_URL"
},
"basicHttpAuth": {
"password": "STAGING_PASSWORD",
"oldPassword": "BASIC_HTTP_AUTH_OLD_PASSWORD"
},
"jwtAuth": {
"key": "OT_JWT_AUTH_KEY",
"oldKey": "OT_JWT_AUTH_OLD_KEY",
"algorithm": "OT_JWT_AUTH_ALG"
},
"clusterWorkers": "CLUSTER_WORKERS",
"maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE",
"httpsOnly": "HTTPS_ONLY",
"httpRequestTimeout": "OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT",
"redis": {
"history": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
"port": "OVERLEAF_REDIS_PORT"
},
"lock": {
"host": "OVERLEAF_REDIS_HOST",
"password": "OVERLEAF_REDIS_PASS",
"port": "OVERLEAF_REDIS_PORT"
}
}
}

13
server-ce/config/env.sh Normal file
View File

@@ -0,0 +1,13 @@
export CHAT_HOST=127.0.0.1
export CLSI_HOST=127.0.0.1
export CONTACTS_HOST=127.0.0.1
export DOCSTORE_HOST=127.0.0.1
export DOCUMENT_UPDATER_HOST=127.0.0.1
export DOCUPDATER_HOST=127.0.0.1
export FILESTORE_HOST=127.0.0.1
export HISTORY_V1_HOST=127.0.0.1
export NOTIFICATIONS_HOST=127.0.0.1
export PROJECT_HISTORY_HOST=127.0.0.1
export REALTIME_HOST=127.0.0.1
export WEB_HOST=127.0.0.1
export WEB_API_HOST=127.0.0.1

View File

@@ -0,0 +1,3 @@
# equivalent to -gt option. Used to prevent latexmk from skipping recompilation
# of output.log and output.pdf
$go_mode = 3;

View File

@@ -0,0 +1,24 @@
{
"persistor": {
"backend": "fs",
"useSubdirectories": true
},
"basicHttpAuth": {
"password": "password"
},
"useDeleteObjects": "false",
"jwtAuth": {
"algorithm": "HS256"
},
"mongo": {},
"blobStore": {
"globalBucket": "/var/lib/overleaf/data/history/overleaf-global-blobs",
"projectBucket": "/var/lib/overleaf/data/history/overleaf-project-blobs"
},
"chunkStore": {
"bucket": "/var/lib/overleaf/data/history/overleaf-chunks"
},
"zipStore": {
"bucket": "/var/lib/overleaf/data/history/overleaf-zips"
}
}

View File

@@ -0,0 +1,473 @@
/* eslint-disable
camelcase,
no-cond-assign,
no-dupe-keys,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let redisConfig, siteUrl
let e
const Path = require('path')
// These credentials are used for authenticating api requests
// between services that may need to go over public channels
const httpAuthUser = process.env.WEB_API_USER
const httpAuthPass = process.env.WEB_API_PASSWORD
const httpAuthUsers = {}
if (httpAuthUser && httpAuthPass) {
httpAuthUsers[httpAuthUser] = httpAuthPass
}
const parse = function (option) {
if (option != null) {
try {
const opt = JSON.parse(option)
return opt
} catch (err) {
throw new Error(`problem parsing ${option}, invalid JSON`)
}
}
}
const parseIntOrFail = function (value) {
const parsedValue = parseInt(value, 10)
if (isNaN(parsedValue)) {
throw new Error(`'${value}' is an invalid integer`)
}
return parsedValue
}
const DATA_DIR = '/var/lib/overleaf/data'
const TMP_DIR = '/var/lib/overleaf/tmp'
const settings = {
clsi: {
optimiseInDocker: process.env.OPTIMISE_PDF === 'true',
},
brandPrefix: '',
allowAnonymousReadAndWriteSharing:
process.env.OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING === 'true',
// Databases
// ---------
// Overleaf Community Edition's main persistent data store is MongoDB (http://www.mongodb.org/)
// Documentation about the URL connection string format can be found at:
//
// http://docs.mongodb.org/manual/reference/connection-string/
//
// The following works out of the box with Mongo's default settings:
mongo: {
url: process.env.OVERLEAF_MONGO_URL || 'mongodb://dockerhost/sharelatex',
},
// Redis is used in Overleaf Community Edition for high volume queries, like real-time
// editing, and session management.
//
// The following config will work with Redis's default settings:
redis: {
web: (redisConfig = {
host: process.env.OVERLEAF_REDIS_HOST || 'dockerhost',
port: process.env.OVERLEAF_REDIS_PORT || '6379',
password: process.env.OVERLEAF_REDIS_PASS || undefined,
key_schema: {
// document-updater
blockingKey({ doc_id }) {
return `Blocking:${doc_id}`
},
docLines({ doc_id }) {
return `doclines:${doc_id}`
},
docOps({ doc_id }) {
return `DocOps:${doc_id}`
},
docVersion({ doc_id }) {
return `DocVersion:${doc_id}`
},
docHash({ doc_id }) {
return `DocHash:${doc_id}`
},
projectKey({ doc_id }) {
return `ProjectId:${doc_id}`
},
docsInProject({ project_id }) {
return `DocsIn:${project_id}`
},
ranges({ doc_id }) {
return `Ranges:${doc_id}`
},
// document-updater:realtime
pendingUpdates({ doc_id }) {
return `PendingUpdates:${doc_id}`
},
// document-updater:history
uncompressedHistoryOps({ doc_id }) {
return `UncompressedHistoryOps:${doc_id}`
},
docsWithHistoryOps({ project_id }) {
return `DocsWithHistoryOps:${project_id}`
},
// document-updater:lock
blockingKey({ doc_id }) {
return `Blocking:${doc_id}`
},
// realtime
clientsInProject({ project_id }) {
return `clients_in_project:${project_id}`
},
connectedUser({ project_id, client_id }) {
return `connected_user:${project_id}:${client_id}`
},
},
}),
fairy: redisConfig,
// document-updater
realtime: redisConfig,
documentupdater: redisConfig,
lock: redisConfig,
history: redisConfig,
websessions: redisConfig,
api: redisConfig,
pubsub: redisConfig,
project_history: redisConfig,
project_history_migration: {
host: redisConfig.host,
port: redisConfig.port,
password: redisConfig.password,
maxRetriesPerRequest: parseInt(
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
),
key_schema: {
projectHistoryOps({ projectId }) {
return `ProjectHistory:Ops:{${projectId}}` // NOTE: the extra braces are intentional
},
},
},
},
// Local disk caching
// ------------------
path: {
// If we ever need to write something to disk (e.g. incoming requests
// that need processing but may be too big for memory), then write
// them to disk here:
dumpFolder: Path.join(TMP_DIR, 'dumpFolder'),
// Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, 'uploads'),
// Where to write intermediate file for full project history migration
projectHistories: Path.join(TMP_DIR, 'projectHistories'),
// Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, 'compiles'),
// Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, 'cache'),
// Where to write the output files to disk after running LaTeX
outputDir: Path.join(DATA_DIR, 'output'),
},
// Server Config
// -------------
// Where your instance of Overleaf Community Edition can be found publicly. This is used
// when emails are sent out and in generated links:
siteUrl: (siteUrl = process.env.OVERLEAF_SITE_URL || 'http://localhost'),
// Status page URL as displayed on the maintenance/500 pages.
statusPageUrl: process.env.OVERLEAF_STATUS_PAGE_URL,
// The name this is used to describe your Overleaf Community Edition Installation
appName: process.env.OVERLEAF_APP_NAME || 'Overleaf Community Edition',
restrictInvitesToExistingAccounts:
process.env.OVERLEAF_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS === 'true',
nav: {
title:
process.env.OVERLEAF_NAV_TITLE ||
process.env.OVERLEAF_APP_NAME ||
'Overleaf Community Edition',
},
// The email address which users will be directed to as the main point of
// contact for this installation of Overleaf Community Edition.
adminEmail: process.env.OVERLEAF_ADMIN_EMAIL || 'placeholder@example.com',
// If provided, a sessionSecret is used to sign cookies so that they cannot be
// spoofed. This is recommended.
security: {
sessionSecret:
process.env.OVERLEAF_SESSION_SECRET || process.env.CRYPTO_RANDOM,
},
csp: {
enabled: process.env.OVERLEAF_CSP_ENABLED !== 'false',
},
rateLimit: {
subnetRateLimiterDisabled:
process.env.SUBNET_RATE_LIMITER_DISABLED !== 'false',
},
// These credentials are used for authenticating api requests
// between services that may need to go over public channels
httpAuthUsers,
// Should javascript assets be served minified or not.
useMinifiedJs: true,
// Should static assets be sent with a header to tell the browser to cache
// them. This should be false in development where changes are being made,
// but should be set to true in production.
cacheStaticAssets: true,
// If you are running Overleaf Community Edition over https, set this to true to send the
// cookie with a secure flag (recommended).
secureCookie: process.env.OVERLEAF_SECURE_COOKIE != null,
// If you are running Overleaf Community Edition behind a proxy (like Apache, Nginx, etc)
// then set this to true to allow it to correctly detect the forwarded IP
// address and http/https protocol information.
behindProxy: process.env.OVERLEAF_BEHIND_PROXY || false,
trustedProxyIps: process.env.OVERLEAF_TRUSTED_PROXY_IPS,
// The amount of time, in milliseconds, until the (rolling) cookie session expires
cookieSessionLength: parseInt(
process.env.OVERLEAF_COOKIE_SESSION_LENGTH || 5 * 24 * 60 * 60 * 1000, // default 5 days
10
),
redisLockTTLSeconds: parseInt(
process.env.OVERLEAF_REDIS_LOCK_TTL_SECONDS || '60',
10
),
i18n: {
subdomainLang: {
www: {
lngCode: process.env.OVERLEAF_SITE_LANGUAGE || 'en',
url: siteUrl,
},
},
defaultLng: process.env.OVERLEAF_SITE_LANGUAGE || 'en',
},
currentImageName: process.env.TEX_LIVE_DOCKER_IMAGE,
apis: {
web: {
url: 'http://127.0.0.1:3000',
user: httpAuthUser,
pass: httpAuthPass,
},
project_history: {
sendProjectStructureOps: true,
url: 'http://127.0.0.1:3054',
},
v1_history: {
url: process.env.V1_HISTORY_URL || 'http://127.0.0.1:3100/api',
user: 'staging',
pass: process.env.STAGING_PASSWORD,
requestTimeout: parseInt(
process.env.OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT || '300000', // default is 5min
10
),
},
},
references: {},
notifications: undefined,
defaultFeatures: {
collaborators: -1,
dropbox: true,
versioning: true,
compileTimeout: parseIntOrFail(process.env.COMPILE_TIMEOUT || 180),
compileGroup: 'standard',
trackChanges: true,
references: true,
},
}
// # OPTIONAL CONFIGURABLE SETTINGS
if (process.env.OVERLEAF_LEFT_FOOTER != null) {
try {
settings.nav.left_footer = JSON.parse(process.env.OVERLEAF_LEFT_FOOTER)
} catch (error) {
e = error
console.error('could not parse OVERLEAF_LEFT_FOOTER, not valid JSON')
}
}
if (process.env.OVERLEAF_RIGHT_FOOTER != null) {
settings.nav.right_footer = process.env.OVERLEAF_RIGHT_FOOTER
try {
settings.nav.right_footer = JSON.parse(process.env.OVERLEAF_RIGHT_FOOTER)
} catch (error1) {
e = error1
console.error('could not parse OVERLEAF_RIGHT_FOOTER, not valid JSON')
}
}
if (process.env.OVERLEAF_HEADER_IMAGE_URL != null) {
settings.nav.custom_logo = process.env.OVERLEAF_HEADER_IMAGE_URL
}
if (process.env.OVERLEAF_HEADER_EXTRAS != null) {
try {
settings.nav.header_extras = JSON.parse(process.env.OVERLEAF_HEADER_EXTRAS)
} catch (error2) {
e = error2
console.error('could not parse OVERLEAF_HEADER_EXTRAS, not valid JSON')
}
}
if (process.env.OVERLEAF_LOGIN_SUPPORT_TEXT != null) {
settings.nav.login_support_text = process.env.OVERLEAF_LOGIN_SUPPORT_TEXT
}
if (process.env.OVERLEAF_LOGIN_SUPPORT_TITLE != null) {
settings.nav.login_support_title = process.env.OVERLEAF_LOGIN_SUPPORT_TITLE
}
// Sending Email
// -------------
//
// You must configure a mail server to be able to send invite emails from
// Overleaf Community Edition. The config settings are passed to nodemailer. See the nodemailer
// documentation for available options:
//
// http://www.nodemailer.com/docs/transports
if (process.env.OVERLEAF_EMAIL_FROM_ADDRESS != null) {
settings.email = {
fromAddress: process.env.OVERLEAF_EMAIL_FROM_ADDRESS,
replyTo: process.env.OVERLEAF_EMAIL_REPLY_TO || '',
driver: process.env.OVERLEAF_EMAIL_DRIVER,
parameters: {
// AWS Creds
AWSAccessKeyID: process.env.OVERLEAF_EMAIL_AWS_SES_ACCESS_KEY_ID,
AWSSecretKey: process.env.OVERLEAF_EMAIL_AWS_SES_SECRET_KEY,
// SMTP Creds
host: process.env.OVERLEAF_EMAIL_SMTP_HOST,
port: process.env.OVERLEAF_EMAIL_SMTP_PORT,
secure: parse(process.env.OVERLEAF_EMAIL_SMTP_SECURE),
ignoreTLS: parse(process.env.OVERLEAF_EMAIL_SMTP_IGNORE_TLS),
name: process.env.OVERLEAF_EMAIL_SMTP_NAME,
logger: process.env.OVERLEAF_EMAIL_SMTP_LOGGER === 'true',
},
textEncoding: process.env.OVERLEAF_EMAIL_TEXT_ENCODING,
template: {
customFooter: process.env.OVERLEAF_CUSTOM_EMAIL_FOOTER,
},
}
if (process.env.OVERLEAF_EMAIL_AWS_SES_REGION != null) {
settings.email.parameters.region = process.env.OVERLEAF_EMAIL_AWS_SES_REGION
}
if (
process.env.OVERLEAF_EMAIL_SMTP_USER != null ||
process.env.OVERLEAF_EMAIL_SMTP_PASS != null
) {
settings.email.parameters.auth = {
user: process.env.OVERLEAF_EMAIL_SMTP_USER,
pass: process.env.OVERLEAF_EMAIL_SMTP_PASS,
}
}
if (process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH != null) {
settings.email.parameters.tls = {
rejectUnauthorized: parse(
process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH
),
}
}
}
// i18n
if (process.env.OVERLEAF_LANG_DOMAIN_MAPPING != null) {
settings.i18n.subdomainLang = parse(process.env.OVERLEAF_LANG_DOMAIN_MAPPING)
}
// Password Settings
// -----------
// These restrict the passwords users can use when registering
// opts are from http://antelle.github.io/passfield
if (
process.env.OVERLEAF_PASSWORD_VALIDATION_PATTERN ||
process.env.OVERLEAF_PASSWORD_VALIDATION_MIN_LENGTH ||
process.env.OVERLEAF_PASSWORD_VALIDATION_MAX_LENGTH
) {
settings.passwordStrengthOptions = {
pattern: process.env.OVERLEAF_PASSWORD_VALIDATION_PATTERN || 'aA$3',
length: {
min: process.env.OVERLEAF_PASSWORD_VALIDATION_MIN_LENGTH || 8,
max: process.env.OVERLEAF_PASSWORD_VALIDATION_MAX_LENGTH || 72,
},
}
}
// /References
// -----------
if (process.env.OVERLEAF_ELASTICSEARCH_URL != null) {
settings.references.elasticsearch = {
host: process.env.OVERLEAF_ELASTICSEARCH_URL,
}
}
// filestore
switch (process.env.OVERLEAF_FILESTORE_BACKEND) {
case 's3':
settings.filestore = {
backend: 's3',
stores: {
user_files: process.env.OVERLEAF_FILESTORE_USER_FILES_BUCKET_NAME,
template_files:
process.env.OVERLEAF_FILESTORE_TEMPLATE_FILES_BUCKET_NAME,
},
s3: {
key:
process.env.OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID ||
process.env.AWS_ACCESS_KEY_ID,
secret:
process.env.OVERLEAF_FILESTORE_S3_SECRET_ACCESS_KEY ||
process.env.AWS_SECRET_ACCESS_KEY,
endpoint: process.env.OVERLEAF_FILESTORE_S3_ENDPOINT,
pathStyle: process.env.OVERLEAF_FILESTORE_S3_PATH_STYLE === 'true',
region:
process.env.OVERLEAF_FILESTORE_S3_REGION ||
process.env.AWS_DEFAULT_REGION,
},
}
break
default:
settings.filestore = {
backend: 'fs',
stores: {
user_files: Path.join(DATA_DIR, 'user_files'),
template_files: Path.join(DATA_DIR, 'template_files'),
},
}
}
// With lots of incoming and outgoing HTTP connections to different services,
// sometimes long running, it is a good idea to increase the default number
// of sockets that Node will hold open.
const http = require('http')
http.globalAgent.maxSockets = 300
const https = require('https')
https.globalAgent.maxSockets = 300
module.exports = settings

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env bash
set -eux
echo "-------------------------"
echo "Deactivating old projects"
echo "-------------------------"
date
ENABLE_CRON_RESOURCE_DELETION=$(cat /etc/container_environment/ENABLE_CRON_RESOURCE_DELETION)
if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then
echo "Skipping old project deactivation due to ENABLE_CRON_RESOURCE_DELETION not set to true"
exit 0
fi
WEB_URL='http://127.0.0.1:3000'
USER=$(cat /etc/container_environment/WEB_API_USER)
PASS=$(cat /etc/container_environment/WEB_API_PASSWORD)
curl -v -X POST \
-u "${USER}:${PASS}" \
-H "Content-Type: application/json" \
-d '{"numberOfProjectsToArchive":"720","ageOfProjects":"7"}' \
"${WEB_URL}/internal/deactivateOldProjects"
echo "Done."

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
set -eux
echo "-------------------------"
echo "Expiring deleted projects"
echo "-------------------------"
date
ENABLE_CRON_RESOURCE_DELETION=$(cat /etc/container_environment/ENABLE_CRON_RESOURCE_DELETION)
if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then
echo "Skipping project expiration due to ENABLE_CRON_RESOURCE_DELETION not set to true"
exit 0
fi
WEB_URL='http://127.0.0.1:3000'
USER=$(cat /etc/container_environment/WEB_API_USER)
PASS=$(cat /etc/container_environment/WEB_API_PASSWORD)
curl -X POST -v -u "${USER}:${PASS}" \
"${WEB_URL}/internal/expire-deleted-projects-after-duration"
echo "Done."

25
server-ce/cron/delete-users.sh Executable file
View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
set -eux
echo "----------------------"
echo "Expiring deleted users"
echo "----------------------"
date
ENABLE_CRON_RESOURCE_DELETION=$(cat /etc/container_environment/ENABLE_CRON_RESOURCE_DELETION)
if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then
echo "Skipping user expiration due to ENABLE_CRON_RESOURCE_DELETION not set to true"
exit 0
fi
WEB_URL='http://127.0.0.1:3000'
USER=$(cat /etc/container_environment/WEB_API_USER)
PASS=$(cat /etc/container_environment/WEB_API_PASSWORD)
curl -X POST -v -u "${USER}:${PASS}" \
"${WEB_URL}/internal/expire-deleted-users-after-duration"
echo "Done."

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -eux
echo "--------------------------"
echo "Flush project-history queue"
echo "--------------------------"
date
PROJECT_HISTORY_URL='http://127.0.0.1:3054'
curl -X POST "${PROJECT_HISTORY_URL}/flush/old?timeout=3600000&limit=5000&background=1"

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -eux
echo "-----------------------------------"
echo "Retry project-history errors (hard)"
echo "-----------------------------------"
date
PROJECT_HISTORY_URL='http://127.0.0.1:3054'
curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000"

View File

@@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -eux
echo "-----------------------------------"
echo "Retry project-history errors (soft)"
echo "-----------------------------------"
PROJECT_HISTORY_URL='http://127.0.0.1:3054'
curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000"

42
server-ce/genScript.js Normal file
View File

@@ -0,0 +1,42 @@
const services = require('./services')
console.log('#!/bin/bash')
console.log('set -ex')
switch (process.argv.pop()) {
case 'install':
console.log('npm install --omit=dev')
break
case 'compile':
for (const service of services) {
console.log('pushd', `services/${service.name}`)
switch (service.name) {
case 'web':
// precompile pug in background
console.log('npm run precompile-pug &')
console.log('pug_precompile=$!')
// Avoid downloading of cypress
console.log('export CYPRESS_INSTALL_BINARY=0')
// install webpack and frontend dependencies
console.log('npm install --include=dev')
// run webpack
console.log('npm run webpack:production')
// uninstall webpack and frontend dependencies
console.log('npm install --omit=dev')
// Wait for pug precompile to finish
console.log('wait "$pug_precompile"')
break
default:
console.log(`echo ${service.name} does not require a compilation`)
}
console.log('popd')
}
break
default:
console.error('unknown command')
console.log('exit 101')
process.exit(101)
}

View File

@@ -0,0 +1,13 @@
FROM sharelatex/sharelatex:2.0.0
# Patch 1: Fixes project deletion (https://github.com/overleaf/overleaf/issues/644)
ADD disable_project_history.patch /etc/sharelatex/disable_project_history.patch
RUN cd /etc/sharelatex && \
patch < disable_project_history.patch
# Patch 2: Fixes admin creation via CLI (https://github.com/overleaf/overleaf/issues/647)
ADD create_and_destroy_users.patch /var/www/sharelatex/tasks/create_and_destroy_users.patch
RUN cd /var/www/sharelatex/tasks/ && \
patch < create_and_destroy_users.patch

View File

@@ -0,0 +1,11 @@
--- CreateAndDestoryUsers.coffee
+++ CreateAndDestoryUsers.coffee
@@ -21,7 +21,7 @@ module.exports = (grunt) ->
user.save (error) ->
throw error if error?
ONE_WEEK = 7 * 24 * 60 * 60 # seconds
- OneTimeTokenHandler.getNewToken user._id, { expiresIn: ONE_WEEK }, (err, token)->
+ OneTimeTokenHandler.getNewToken "password", { expiresIn: ONE_WEEK, email:user.email, user_id: user._id.toString() }, (err, token)->
return next(err) if err?
console.log ""

View File

@@ -0,0 +1,11 @@
--- settings.coffee
+++ settings.coffee
@@ -200,6 +200,8 @@ settings =
# is not available
v1:
url: ""
+ project_history:
+ enabled: false
references:{}
notifications:undefined

View File

@@ -0,0 +1,60 @@
--- UploadsRouter.js
+++ UploadsRouter.js
@@ -1,13 +1,3 @@
-/* eslint-disable
- no-unused-vars,
-*/
-// TODO: This file was created by bulk-decaffeinate.
-// Fix any style issues and re-enable lint.
-/*
- * decaffeinate suggestions:
- * DS102: Remove unnecessary code created because of implicit returns
- * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
- */
const AuthorizationMiddleware = require('../Authorization/AuthorizationMiddleware')
const AuthenticationController = require('../Authentication/AuthenticationController')
const ProjectUploadController = require('./ProjectUploadController')
@@ -28,18 +18,30 @@ module.exports = {
ProjectUploadController.uploadProject
)
- return webRouter.post(
- '/Project/:Project_id/upload',
- RateLimiterMiddleware.rateLimit({
- endpointName: 'file-upload',
- params: ['Project_id'],
- maxRequests: 200,
- timeInterval: 60 * 30
- }),
- AuthenticationController.requireLogin(),
- AuthorizationMiddleware.ensureUserCanWriteProjectContent,
- ProjectUploadController.multerMiddleware,
- ProjectUploadController.uploadFile
- )
+ const fileUploadEndpoint = '/Project/:Project_id/upload'
+ const fileUploadRateLimit = RateLimiterMiddleware.rateLimit({
+ endpointName: 'file-upload',
+ params: ['Project_id'],
+ maxRequests: 200,
+ timeInterval: 60 * 30
+ })
+ if (Settings.allowAnonymousReadAndWriteSharing) {
+ webRouter.post(
+ fileUploadEndpoint,
+ fileUploadRateLimit,
+ AuthorizationMiddleware.ensureUserCanWriteProjectContent,
+ ProjectUploadController.multerMiddleware,
+ ProjectUploadController.uploadFile
+ )
+ } else {
+ webRouter.post(
+ fileUploadEndpoint,
+ fileUploadRateLimit,
+ AuthenticationController.requireLogin(),
+ AuthorizationMiddleware.ensureUserCanWriteProjectContent,
+ ProjectUploadController.multerMiddleware,
+ ProjectUploadController.uploadFile
+ )
+ }
}
}

View File

@@ -0,0 +1,11 @@
--- TokenAccessHandler.js
+++ TokenAccessHandler.js
@@ -255,7 +255,7 @@ const TokenAccessHandler = {
getV1DocPublishedInfo(token, callback) {
// default to allowing access
- if (!Settings.apis || !Settings.apis.v1) {
+ if (!Settings.apis.v1 || !Settings.apis.v1.url) {
return callback(null, { allow: true })
}
V1Api.request(

View File

@@ -0,0 +1,11 @@
--- Features.js
+++ Features.js
@@ -53,6 +53,8 @@ module.exports = Features = {
return Settings.apis.references.url != null
case 'saml':
return Settings.enableSaml
+ case 'link-url':
+ return Settings.apis.linkedUrlProxy && Settings.apis.linkedUrlProxy.url
default:
throw new Error(`unknown feature: ${feature}`)
}

View File

@@ -0,0 +1,20 @@
--- new-file-modal.pug
+++ new-file-modal.pug
@@ -21,11 +21,12 @@ script(type='text/ng-template', id='newFileModalTemplate')
i.fa.fa-fw.fa-folder-open
|
| From Another Project
- li(ng-class="type == 'url' ? 'active' : null")
- a(href, ng-click="type = 'url'")
- i.fa.fa-fw.fa-globe
- |
- | From External URL
+ if hasFeature('link-url')
+ li(ng-class="type == 'url' ? 'active' : null")
+ a(href, ng-click="type = 'url'")
+ i.fa.fa-fw.fa-globe
+ |
+ | From External URL
!= moduleIncludes("newFileModal:selector", locals)
td(class="modal-new-file--body modal-new-file--body-{{type}}")

View File

@@ -0,0 +1,26 @@
--- AnalyticsController.js
+++ AnalyticsController.js
@@ -3,9 +3,13 @@ const Errors = require('../Errors/Errors')
const AuthenticationController = require('../Authentication/AuthenticationController')
const InstitutionsAPI = require('../Institutions/InstitutionsAPI')
const GeoIpLookup = require('../../infrastructure/GeoIpLookup')
+const Features = require('../../infrastructure/Features')
module.exports = {
updateEditingSession(req, res, next) {
+ if (!Features.hasFeature('analytics')) {
+ return res.send(204)
+ }
const userId = AuthenticationController.getLoggedInUserId(req)
const { projectId } = req.params
let countryCode = null
@@ -28,6 +32,9 @@ module.exports = {
},
recordEvent(req, res, next) {
+ if (!Features.hasFeature('analytics')) {
+ return res.send(204)
+ }
const userId =
AuthenticationController.getLoggedInUserId(req) || req.sessionID
AnalyticsManager.recordEvent(userId, req.params.event, req.body, error =>

View File

@@ -0,0 +1,10 @@
--- Features.js
+++ Features.js
@@ -41,6 +41,7 @@ module.exports = Features = {
case 'templates-server-pro':
return Settings.overleaf == null
case 'affiliations':
+ case 'analytics':
// Checking both properties is needed for the time being to allow
// enabling the feature in web-api and disabling in Server Pro
// see https://github.com/overleaf/web-internal/pull/2127

View File

@@ -0,0 +1,31 @@
FROM sharelatex/sharelatex:2.0.1
# Patch 1: Fixes anonymous link sharing
ADD 1-anon-upload.patch /var/www/sharelatex/web/app/src/Features/Uploads/1-anon-upload.patch
RUN cd /var/www/sharelatex/web/app/src/Features/Uploads/ && \
patch < 1-anon-upload.patch
# Patch 2: Fixes read-only access
ADD 2-read-only-access.patch /var/www/sharelatex/web/app/src/Features/TokenAccess/3-read-only-access.patch
RUN cd /var/www/sharelatex/web/app/src/Features/TokenAccess/ && \
patch < 3-read-only-access.patch
# Patch 3: Fixes url linking
ADD 3-url-linking-1.patch /var/www/sharelatex/web/app/src/infrastructure/6-url-linking-1.patch
RUN cd /var/www/sharelatex/web/app/src/infrastructure/ && \
patch < 6-url-linking-1.patch
ADD 4-url-linking-2.patch /var/www/sharelatex/web/app/views/project/editor/7-url-linking-2.patch
RUN cd /var/www/sharelatex/web/app/views/project/editor/ && \
patch < 7-url-linking-2.patch
# Patch 4: Disables analytics
ADD 5-disable-analytics-1.patch /var/www/sharelatex/web/app/src/Features/Analytics/8-disable-analytics-1.patch
RUN cd /var/www/sharelatex/web/app/src/Features/Analytics/ && \
patch < 8-disable-analytics-1.patch
ADD 6-disable-analytics-2.patch /var/www/sharelatex/web/app/src/infrastructure/9-disable-analytics-2.patch
RUN cd /var/www/sharelatex/web/app/src/infrastructure/ && \
patch < 9-disable-analytics-2.patch

View File

@@ -0,0 +1,8 @@
FROM sharelatex/sharelatex:2.1.0
# Patch: defines recaptcha config to fix share-related issues
# - https://github.com/overleaf/overleaf/issues/684
ADD add-recaptcha-config.patch /etc/sharelatex/add-recaptcha-config.patch
RUN cd /etc/sharelatex/ && \
patch < add-recaptcha-config.patch

View File

@@ -0,0 +1,14 @@
--- a/settings.coffee
+++ b/settings.coffee
@@ -180,6 +180,11 @@ settings =
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
+ recaptcha:
+ disabled:
+ invite: true
+ register: true
+
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.

View File

@@ -0,0 +1,7 @@
FROM sharelatex/sharelatex:2.3.0
# Patch: Fixes NPE when invoking synctex (https://github.com/overleaf/overleaf/issues/756)
ADD check-clsi-setting-exists.patch /var/www/sharelatex/clsi/app/js/check-clsi-setting-exists.patch
RUN cd /var/www/sharelatex/clsi/app/js && \
patch < check-clsi-setting-exists.patch

View File

@@ -0,0 +1,11 @@
--- a/app/js/CompileManager.js
+++ b/app/js/CompileManager.js
@@ -536,7 +536,7 @@ module.exports = CompileManager = {
compileName,
command,
directory,
- Settings.clsi != null ? Settings.clsi.docker.image : undefined,
+ Settings.clsi && Settings.clsi.docker ? Settings.clsi.docker.image : undefined,
timeout,
{},
function(error, output) {

View File

@@ -0,0 +1,6 @@
FROM sharelatex/sharelatex:2.4.0
# Patch: Fixes missing dependencies on web startup (https://github.com/overleaf/overleaf/issues/767)
RUN cd /var/www/sharelatex/web && \
npm install i18next@^19.6.3 i18next-fs-backend@^1.0.7 i18next-http-middleware@^3.0.2

View File

@@ -0,0 +1,10 @@
FROM sharelatex/sharelatex:2.4.1
# Patch: Fixes anonymous read/write sharing
COPY anonymous-metadata.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < anonymous-metadata.patch
# Patch: Fixes left footer with html text
COPY left-footer-skip-translation.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < left-footer-skip-translation.patch

View File

@@ -0,0 +1,43 @@
--- /var/www/sharelatex/web/app/src/router.js 2020-09-14 20:21:39.741433000 +0000
+++ /var/www/sharelatex/web/app/src/router.js 2020-09-14 20:13:08.000000000 +0000
@@ -607,16 +607,17 @@
ProjectDownloadsController.downloadMultipleProjects
)
+ console.log(`allowAnonymousReadAndWriteSharing: ${Settings.allowAnonymousReadAndWriteSharing}`)
webRouter.get(
'/project/:project_id/metadata',
AuthorizationMiddleware.ensureUserCanReadProject,
- AuthenticationController.requireLogin(),
+ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
MetaController.getMetadata
- )
+ )
webRouter.post(
'/project/:project_id/doc/:doc_id/metadata',
AuthorizationMiddleware.ensureUserCanReadProject,
- AuthenticationController.requireLogin(),
+ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
MetaController.broadcastMetadataForDoc
)
privateApiRouter.post(
--- /var/www/sharelatex/web/app/src/Features/Contacts/ContactRouter.js 2020-09-14 20:21:52.243779000 +0000
+++ /var/www/sharelatex/web/app/src/Features/Contacts/ContactRouter.js 2020-09-14 20:13:08.000000000 +0000
@@ -5,6 +5,8 @@
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
+const Settings = require('settings-sharelatex')
+
const AuthenticationController = require('../Authentication/AuthenticationController')
const ContactController = require('./ContactController')
@@ -12,7 +14,7 @@
apply(webRouter, apiRouter) {
return webRouter.get(
'/user/contacts',
- AuthenticationController.requireLogin(),
+ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(),
ContactController.getContacts
)
}

View File

@@ -0,0 +1,12 @@
--- /var/www/sharelatex/web/app/views/layout/footer.pug
+++ /var/www/sharelatex/web/app/app/views/layout/footer.pug
@@ -32,7 +32,7 @@ footer.site-footer
if item.url
a(href=item.url, class=item.class) !{translate(item.text)}
else
- | !{translate(item.text)}
+ | !{item.text}
ul.col-md-3.text-right

View File

@@ -0,0 +1,13 @@
FROM sharelatex/sharelatex:2.5.0
# Patch #826: Fixes log path for contacts service to be picked up by logrotate
COPY contacts-run.patch /etc/service/contacts-sharelatex
RUN cd /etc/service/contacts-sharelatex && patch < contacts-run.patch
# Patch #826: delete old logs for the contacts service
COPY delete-old-logs.patch /etc/my_init.d
RUN cd /etc/my_init.d && patch < delete-old-logs.patch \
&& chmod +x /etc/my_init.d/10_delete_old_logs.sh
# Patch #827: fix logrotate file permissions
RUN chmod 644 /etc/logrotate.d/sharelatex

View File

@@ -0,0 +1,8 @@
--- a/run
+++ b/run
@@ -7,4 +7,4 @@ if [ "$DEBUG_NODE" == "true" ]; then
NODE_PARAMS="--inspect=0.0.0.0:30360"
fi
-exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts 2>&1
+exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts.log 2>&1

View File

@@ -0,0 +1,10 @@
--- /dev/null
+++ b/10_delete_old_logs.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+set -e
+
+# Up to version 2.5.0 the logs of the contacts service were written into a
+# file that was not picked up by logrotate.
+# The service is stable and we can safely discard any logs.
+rm -vf /var/log/sharelatex/contacts

View File

@@ -0,0 +1,29 @@
const Settings = require('settings-sharelatex')
const mongojs = require('mongojs')
const db = mongojs(Settings.mongo.url, ['tokens'])
// eslint-disable-next-line import/no-extraneous-dependencies
const async = require('async')
exports.migrate = (client, done) => {
console.log(`>> Updating 'data.email' to lower case in tokens`)
db.tokens.find({}, { 'data.email': 1 }, (err, tokens) => {
if (err) {
return done(err)
}
async.eachSeries(
tokens,
(token, callback) => {
db.tokens.update(
{ _id: token._id },
{ $set: { 'data.email': token.data.email.toLowerCase() } },
callback
)
},
done
)
})
}
exports.rollback = (client, done) => done()

View File

@@ -0,0 +1,8 @@
FROM sharelatex/sharelatex:2.5.1
# Patch: fixes registration token creation
COPY create-token-lowercase-email.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < create-token-lowercase-email.patch
# Migration for tokens with invalid email addresses
ADD 12_update_token_email.js /var/www/sharelatex/migrations/12_update_token_email.js

View File

@@ -0,0 +1,11 @@
--- /var/www/sharelatex/web/app/src/Features/User/UserRegistrationHandler.js
+++ /var/www/sharelatex/web/app/src/Features/User/UserRegistrationHandler.js
@@ -122,7 +122,7 @@ const UserRegistrationHandler = {
const ONE_WEEK = 7 * 24 * 60 * 60 // seconds
OneTimeTokenHandler.getNewToken(
'password',
- { user_id: user._id.toString(), email },
+ { user_id: user._id.toString(), email: user.email },
{ expiresIn: ONE_WEEK },
(err, token) => {
if (err != null) {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:2.6.0-RC1
# Patch: fixes Project restore inserts bad projectId into deletedFiles
COPY document-deleter-object-id.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < document-deleter-object-id.patch

View File

@@ -0,0 +1,10 @@
--- /var/www/sharelatex/web/app/src/Features/Project/ProjectDeleter.js
+++ /var/www/sharelatex/web/app/src/Features/Project/ProjectDeleter.js
@@ -278,6 +278,7 @@ async function deleteProject(projectId, options = {}) {
}
async function undeleteProject(projectId, options = {}) {
+ projectId = ObjectId(projectId)
let deletedProject = await DeletedProject.findOne({
'deleterData.deletedProjectId': projectId
}).exec()

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:2.6.1
# Patch: fixes overleaf.com onboarding email being sent in CE/SP
COPY onboarding-email.patch ${baseDir}
RUN cd ${baseDir} && patch -p0 < onboarding-email.patch

View File

@@ -0,0 +1,25 @@
--- /var/www/sharelatex/web/app/src/Features/User/UserCreator.js
+++ /var/www/sharelatex/web/app/src/Features/User/UserCreator.js
@@ -85,13 +85,15 @@ async function createNewUser(attributes, options = {}) {
}
Analytics.recordEvent(user._id, 'user-registered')
- try {
- await UserOnboardingEmailQueueManager.scheduleOnboardingEmail(user)
- } catch (error) {
- logger.error(
- `Failed to schedule sending of onboarding email for user '${user._id}'`,
- error
- )
+ if(Features.hasFeature('saas')) {
+ try {
+ await UserOnboardingEmailQueueManager.scheduleOnboardingEmail(user)
+ } catch (error) {
+ logger.error(
+ `Failed to schedule sending of onboarding email for user '${user._id}'`,
+ error
+ )
+ }
}
return user

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:2.7.0
# Patch: fixes overleaf.com onboarding email being sent in CE/SP
COPY remove-disconnect-endpoint.patch .
RUN patch -p0 < remove-disconnect-endpoint.patch

View File

@@ -0,0 +1,14 @@
--- /var/www/sharelatex/web/app/src/router.js
--- /var/www/sharelatex/web/app/src/router.js
@@ -995,11 +995,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
AdminController.unregisterServiceWorker
)
- privateApiRouter.post(
- '/disconnectAllUsers',
- AdminController.disconnectAllUsers
- )
-
privateApiRouter.get('/perfTest', (req, res) => res.send('hello'))
publicApiRouter.get('/status', (req, res) => {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.0.0
# Patch: fixes overleaf.com onboarding email being sent in CE/SP
COPY remove-disconnect-endpoint.patch .
RUN patch -p0 < remove-disconnect-endpoint.patch

View File

@@ -0,0 +1,14 @@
--- /var/www/sharelatex/web/app/src/router.js
--- /var/www/sharelatex/web/app/src/router.js
@@ -995,11 +995,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
AdminController.unregisterServiceWorker
)
- privateApiRouter.post(
- '/disconnectAllUsers',
- AdminController.disconnectAllUsers
- )
-
privateApiRouter.get('/perfTest', (req, res) => res.send('hello'))
publicApiRouter.get('/status', (req, res) => {

View File

@@ -0,0 +1,11 @@
FROM sharelatex/sharelatex:3.1.0
# Patch: fixes Sharelatex History navigation
# https://github.com/overleaf/overleaf/issues/1035
COPY fix-history-navigation.patch .
RUN patch -p0 < fix-history-navigation.patch
# Rebuild client
# --------------
RUN node genScript compile | bash

View File

@@ -0,0 +1,16 @@
--- services/web/frontend/js/ide/history/controllers/HistoryListController.js
+++ services/web/frontend/js/ide/history/controllers/HistoryListController.js
@@ -62,7 +62,12 @@ App.controller('HistoryListController', function ($scope, $modal, ide) {
return (() => {
const result = []
for (const update of Array.from($scope.history.updates)) {
- let inSelection
+
+ // replacing this declaration with `let` introduces a bug in history point selection:
+ // https://github.com/overleaf/overleaf/issues/1035
+ // eslint-disable-next-line no-var
+ var inSelection
+
if (update.selectedTo) {
inSelection = true
beforeSelection = false

View File

@@ -0,0 +1,6 @@
FROM sharelatex/sharelatex:3.2.0
# Patch: fixes source editor broken
# https://github.com/overleaf/overleaf/issues/1043
COPY disable-codemirror.patch .
RUN patch -p0 < disable-codemirror.patch

View File

@@ -0,0 +1,15 @@
--- services/web/app/src/Features/Project/ProjectController.js
--- services/web/app/src/Features/Project/ProjectController.js
@@ -1134,11 +1134,7 @@ const ProjectController = {
detachRole = req.params.detachRole
}
- const showNewSourceEditorOption =
- (newSourceEditorAssignment &&
- newSourceEditorAssignment.variant === 'codemirror') ||
- user.betaProgram ||
- shouldDisplayFeature('new_source_editor', false) // also allow override via ?new_source_editor=true
+ const showNewSourceEditorOption = false // disabled in CE/SP (Hotfix 3.2.1)
const showSymbolPalette =
!Features.hasFeature('saas') ||

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.2.1
# Fixes compilation errors in embedded compiles
# https://github.com/overleaf/overleaf/issues/1044
ENV PATH="${PATH}:/usr/local/texlive/2022/bin/x86_64-linux"

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.3.0
# Patch: add migration for convert_archived_state script
COPY pr_10442.patch .
RUN patch -p0 < pr_10442.patch

View File

@@ -0,0 +1,132 @@
--- services/web/scripts/convert_archived_state.js
+++ services/web/scripts/convert_archived_state.js
@@ -6,62 +6,77 @@
const { promiseMapWithLimit } = require('../app/src/util/promises')
// $ node scripts/convert_archived_state.js FIRST,SECOND
-const STAGE = process.argv.pop()
-async function main() {
- if (STAGE.includes('FIRST')) {
- await batchedUpdate(
- 'projects',
- { archived: false },
- {
- $set: { archived: [] },
- }
- )
+async function main(STAGE) {
+ for (const FIELD of ['archived', 'trashed']) {
+ if (STAGE.includes('FIRST')) {
+ await batchedUpdate(
+ 'projects',
+ { [FIELD]: false },
+ {
+ $set: { [FIELD]: [] },
+ }
+ )
- console.error('Done, with first part')
- }
+ console.error('Done, with first part for field:', FIELD)
+ }
- if (STAGE.includes('SECOND')) {
- await batchedUpdate('projects', { archived: true }, performUpdate, {
- _id: 1,
- owner_ref: 1,
- collaberator_refs: 1,
- readOnly_refs: 1,
- tokenAccessReadAndWrite_refs: 1,
- tokenAccessReadOnly_refs: 1,
- })
+ if (STAGE.includes('SECOND')) {
+ await batchedUpdate(
+ 'projects',
+ { [FIELD]: true },
+ async function performUpdate(collection, nextBatch) {
+ await promiseMapWithLimit(
+ WRITE_CONCURRENCY,
+ nextBatch,
+ async project => {
+ try {
+ await upgradeFieldToArray({ collection, project, FIELD })
+ } catch (err) {
+ console.error(project._id, err)
+ throw err
+ }
+ }
+ )
+ },
+ {
+ _id: 1,
+ owner_ref: 1,
+ collaberator_refs: 1,
+ readOnly_refs: 1,
+ tokenAccessReadAndWrite_refs: 1,
+ tokenAccessReadOnly_refs: 1,
+ }
+ )
- console.error('Done, with second part')
+ console.error('Done, with second part for field:', FIELD)
+ }
}
}
-main()
- .then(() => {
- process.exit(0)
- })
- .catch(error => {
- console.error({ error })
- process.exit(1)
- })
-
-async function performUpdate(collection, nextBatch) {
- await promiseMapWithLimit(WRITE_CONCURRENCY, nextBatch, project =>
- setArchived(collection, project)
- )
+module.exports = main
+
+if (require.main === module) {
+ main(process.argv.pop())
+ .then(() => {
+ process.exit(0)
+ })
+ .catch(error => {
+ console.error({ error })
+ process.exit(1)
+ })
}
-async function setArchived(collection, project) {
- const archived = calculateArchivedArray(project)
-
+async function upgradeFieldToArray({ collection, project, FIELD }) {
return collection.updateOne(
{ _id: project._id },
{
- $set: { archived },
+ $set: { [FIELD]: getAllUserIds(project) },
}
)
}
-function calculateArchivedArray(project) {
+function getAllUserIds(project) {
return _.unionWith(
[project.owner_ref],
project.collaberator_refs,
--- /dev/null
+++ services/web/migrations/20221111111111_ce_sp_convert_archived_state.js
@@ -0,0 +1,9 @@
+const runScript = require('../scripts/convert_archived_state')
+
+exports.tags = ['server-ce', 'server-pro']
+
+exports.migrate = async () => {
+ await runScript('FIRST,SECOND')
+}
+
+exports.rollback = async () => {}

View File

@@ -0,0 +1,6 @@
FROM sharelatex/sharelatex:3.5.0
# Patch: fix German locales
COPY fix_de_locales.patch .
RUN patch -p0 < fix_de_locales.patch
RUN node genScript compile | bash

View File

@@ -0,0 +1,10 @@
--- services/web/locales/de.json
+++ services/web/locales/de.json
@@ -348,7 +348,6 @@
"edit_dictionary_empty": "Dein benutzerdefiniertes Wörterbuch ist leer.",
"edit_dictionary_remove": "Aus Wörterbuch entfernen",
"editing": "Bearbeitung",
- "editor_and_pdf": "Editor &amp; PDF",
"editor_disconected_click_to_reconnect": "Editor wurde getrennt",
"editor_only_hide_pdf": "Nur Editor <0>(PDF ausblenden)</0>",
"editor_resources": "Editor-Literatur",

View File

@@ -0,0 +1,9 @@
FROM sharelatex/sharelatex:3.5.9
# Patch: clear invite and invite tokens through the websocket
COPY pr_13427.patch .
RUN patch -p0 < pr_13427.patch
# Patch: https://github.com/Automattic/mongoose/commit/f1efabf350522257364aa5c2cb36e441cf08f1a2
COPY mongoose_proto.patch .
RUN patch -p0 < mongoose_proto.patch

View File

@@ -0,0 +1,12 @@
--- node_modules/mongoose/lib/document.js
+++ node_modules/mongoose/lib/document.js
@@ -689,6 +689,10 @@ function init(self, obj, doc, opts, prefix) {
function _init(index) {
i = keys[index];
+ // avoid prototype pollution
+ if (i === '__proto__' || i === 'constructor') {
+ return;
+ }
path = prefix + i;
schema = self.$__schema.path(path);

View File

@@ -0,0 +1,92 @@
--- services/web/app/src/Features/Editor/EditorHttpController.js
+++ services/web/app/src/Features/Editor/EditorHttpController.js
@@ -73,6 +73,7 @@ async function joinProject(req, res, next) {
if (isRestrictedUser) {
project.owner = { _id: project.owner._id }
project.members = []
+ project.invites = []
}
// Only show the 'renamed or deleted' message once
if (project.deletedByExternalDataSource) {
--- services/web/app/src/Features/Project/ProjectEditorHandler.js
+++ services/web/app/src/Features/Project/ProjectEditorHandler.js
@@ -48,19 +48,13 @@
deletedDocsFromDocstore
),
members: [],
- invites,
+ invites: this.buildInvitesView(invites),
imageName:
project.imageName != null
? Path.basename(project.imageName)
: undefined,
}
- if (result.invites == null) {
- result.invites = []
- }
- result.invites.forEach(invite => {
- delete invite.token
- })
;({ owner, ownerFeatures, members } =
this.buildOwnerAndMembersViews(members))
result.owner = owner
@@ -99,7 +93,7 @@
let owner = null
let ownerFeatures = null
const filteredMembers = []
- for (const member of Array.from(members || [])) {
+ for (const member of members || []) {
if (member.privilegeLevel === 'owner') {
ownerFeatures = member.user.features
owner = this.buildUserModelView(member.user, 'owner')
@@ -128,24 +122,15 @@
},
buildFolderModelView(folder) {
- let file
const fileRefs = _.filter(folder.fileRefs || [], file => file != null)
return {
_id: folder._id,
name: folder.name,
- folders: Array.from(folder.folders || []).map(childFolder =>
+ folders: (folder.folders || []).map(childFolder =>
this.buildFolderModelView(childFolder)
),
- fileRefs: (() => {
- const result = []
- for (file of Array.from(fileRefs)) {
- result.push(this.buildFileModelView(file))
- }
- return result
- })(),
- docs: Array.from(folder.docs || []).map(doc =>
- this.buildDocModelView(doc)
- ),
+ fileRefs: fileRefs.map(file => this.buildFileModelView(file)),
+ docs: (folder.docs || []).map(doc => this.buildDocModelView(doc)),
}
},
@@ -164,4 +149,21 @@
name: doc.name,
}
},
+
+ buildInvitesView(invites) {
+ if (invites == null) {
+ return []
+ }
+ return invites.map(invite =>
+ _.pick(invite, [
+ '_id',
+ 'createdAt',
+ 'email',
+ 'expires',
+ 'privileges',
+ 'projectId',
+ 'sendingUserId',
+ ])
+ )
+ },
}

View File

@@ -0,0 +1,8 @@
FROM sharelatex/sharelatex:3.5.10
# Patch: Drop the old history collections and increase mongo query timeout
ADD clean_sl_history_data.js /overleaf/services/web/scripts/history/clean_sl_history_data.js
# Patch: convert large deleted docs to files
COPY pr_14200.patch .
RUN patch -p0 < pr_14200.patch

View File

@@ -0,0 +1,70 @@
// Increase default mongo query timeout from 1min to 1h
process.env.MONGO_SOCKET_TIMEOUT = process.env.MONGO_SOCKET_TIMEOUT || '360000'
const { waitForDb, db } = require('../../app/src/infrastructure/mongodb')
async function main() {
await checkAllProjectsAreMigrated()
await setAllowDowngradeToFalse()
await deleteHistoryCollections()
console.log('Legacy history data cleaned up successfully')
process.exit(0)
}
async function checkAllProjectsAreMigrated() {
console.log('checking all projects are migrated to Full Project History')
const count = await db.projects.countDocuments({
'overleaf.history.display': { $ne: true },
})
if (count === 0) {
console.log('All projects are migrated to Full Project History')
} else {
console.error(
`There are ${count} projects that are not migrated to Full Project History` +
` please complete the migration before running this script again.`
)
process.exit(1)
}
}
async function setAllowDowngradeToFalse() {
console.log('unsetting `allowDowngrade` flag in all projects')
await db.projects.updateMany(
{
'overleaf.history.id': { $exists: true },
'overleaf.history.allowDowngrade': true,
},
{ $unset: { 'overleaf.history.allowDowngrade': 1 } }
)
console.log('unsetting `allowDowngrade` flag in all projects - Done')
}
async function deleteHistoryCollections() {
await gracefullyDropCollection(db.docHistory)
await gracefullyDropCollection(db.docHistoryIndex)
await gracefullyDropCollection(db.projectHistoryMetaData)
}
async function gracefullyDropCollection(collection) {
const collectionName = collection.collectionName
console.log(`removing \`${collectionName}\` data`)
try {
await collection.drop()
} catch (err) {
if (err.code === 26) {
// collection already deleted
console.log(`removing \`${collectionName}\` data - Already removed`)
} else {
throw err
}
}
console.log(`removing \`${collectionName}\` data - Done`)
}
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -0,0 +1,95 @@
--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
+++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
@@ -1,6 +1,9 @@
+const _ = require('lodash')
+const fs = require('fs')
const { ReadPreference, ObjectId } = require('mongodb')
const { db } = require('../../../../app/src/infrastructure/mongodb')
const Settings = require('@overleaf/settings')
+const logger = require('@overleaf/logger')
const ProjectHistoryHandler = require('../../../../app/src/Features/Project/ProjectHistoryHandler')
const HistoryManager = require('../../../../app/src/Features/History/HistoryManager')
@@ -8,6 +11,8 @@ const ProjectHistoryController = require('./ProjectHistoryController')
const ProjectEntityHandler = require('../../../../app/src/Features/Project/ProjectEntityHandler')
const ProjectEntityUpdateHandler = require('../../../../app/src/Features/Project/ProjectEntityUpdateHandler')
const DocumentUpdaterHandler = require('../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler')
+const { Doc } = require('../../../../app/src/models/Doc')
+const FileWriter = require('../../../../app/src/infrastructure/FileWriter')
// Timestamp of when 'Enable history for SL in background' release
const ID_WHEN_FULL_PROJECT_HISTORY_ENABLED =
@@ -340,9 +345,33 @@ async function anyDocHistoryIndexExists(project) {
)
}
+async function convertDeletedDocToFile(projectId, docId, userId, source, doc) {
+ // write the doc to a temporary file and upload to filestore
+ const tmpFilePath = await FileWriter.promises.writeLinesToDisk(
+ projectId,
+ doc.lines
+ )
+ await ProjectEntityUpdateHandler.promises.upsertFileWithPath(
+ projectId,
+ `/_deleted/${docId}/${doc.name}`,
+ tmpFilePath,
+ null,
+ userId,
+ source
+ )
+ // hard delete the original doc, otherwise it will get picked up again
+ // by readDeletedDocs in ProjectHistoryController and the final
+ // resync of the history will fail.
+ await db.docs.deleteOne({ _id: docId })
+ await db.docOps.deleteOne({ doc_id: docId })
+ // clean up the temporary file
+ await fs.promises.unlink(tmpFilePath)
+}
+
async function convertLargeDocsToFile(projectId, userId) {
- const docs = await ProjectEntityHandler.promises.getAllDocs(projectId)
let convertedDocCount = 0
+ const docs = await ProjectEntityHandler.promises.getAllDocs(projectId)
+ // Convert large docs to files
for (const doc of Object.values(docs)) {
const sizeBound = JSON.stringify(doc.lines)
if (docIsTooLarge(sizeBound, doc.lines, Settings.max_doc_length)) {
@@ -355,6 +384,39 @@ async function convertLargeDocsToFile(projectId, userId) {
convertedDocCount++
}
}
+ // Convert deleted docs to files, these cannot be converted by
+ // ProjectEntityUpdateHandler so we do it manually
+ const docsCursor = Doc.find({
+ project_id: ObjectId(projectId),
+ })
+ .lean()
+ .cursor()
+ for await (const doc of docsCursor) {
+ // check whether the doc is present in the filetree instead of
+ // relying on the deletedAt property
+ const docExistsInFiletree = _.find(docs, existingDoc =>
+ existingDoc._id.equals(doc._id)
+ )
+ if (docExistsInFiletree || doc.inS3) {
+ continue
+ }
+ const sizeBound = JSON.stringify(doc.lines)
+ if (docIsTooLarge(sizeBound, doc.lines, Settings.max_doc_length)) {
+ const docId = doc._id.toString()
+ if (!_.isEmpty(doc.ranges)) {
+ throw new Error(`found too large deleted doc with ranges: ${docId}`)
+ }
+ logger.warn({ projectId, docId }, 'converting large deleted doc')
+ await convertDeletedDocToFile(
+ projectId,
+ doc._id,
+ userId,
+ 'history-migration',
+ doc
+ )
+ convertedDocCount++
+ }
+ }
return convertedDocCount
}

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.11
# Patch: fix matching version when rewinding history
COPY fix-matching-version-error.patch .
RUN patch -p0 < fix-matching-version-error.patch

View File

@@ -0,0 +1,22 @@
--- services/track-changes/app/js/ZipManager.js
+++ services/track-changes/app/js/ZipManager.js
@@ -95,6 +95,19 @@ async function rewindDoc(projectId, docId, zipfile) {
continue
}
+ if (previousUpdate && update.v >= previousUpdate.v) {
+ logger.warn(
+ {
+ projectId,
+ docId,
+ previousUpdateVersion: previousUpdate.v,
+ updateVersion: update.v,
+ },
+ 'adjusting version for update with matching version'
+ )
+ update.v = previousUpdate.v - 1
+ }
+
const updatePath = `${id}/updates/${update.v}`
try {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.12
# Patch: fix soft history retry in cron job
COPY history_soft_retry.patch .
RUN patch -p0 < history_soft_retry.patch

View File

@@ -0,0 +1,8 @@
--- cron/project-history-retry-soft.sh
+++ cron/project-history-retry-soft.sh
@@ -8,4 +8,4 @@ echo "-----------------------------------"
PROJECT_HISTORY_URL='http://localhost:3054'
-curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000"
+curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000"

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.1
# Patch: improvements to history migration script
COPY migrate_history_fixes.patch .
RUN patch -p0 < migrate_history_fixes.patch

View File

@@ -0,0 +1,92 @@
--- services/track-changes/app/js/DiffGenerator.js
+++ services/track-changes/app/js/DiffGenerator.js
@@ -63,6 +63,7 @@ module.exports = DiffGenerator = {
if (p > max_p) {
logger.warn({ max_p, p }, 'truncating position to content length')
p = max_p
+ op.p = p // fix out of range offsets to avoid invalid history exports in ZipManager
}
const textToBeRemoved = content.slice(p, p + op.i.length)
@@ -74,6 +75,9 @@ module.exports = DiffGenerator = {
return content.slice(0, p) + content.slice(p + op.i.length)
} else if (op.d != null) {
+ if (op.p > content.length) {
+ op.p = content.length // fix out of range offsets to avoid invalid history exports in ZipManager
+ }
return content.slice(0, op.p) + op.d + content.slice(op.p)
} else {
return content
--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
+++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
@@ -107,6 +107,15 @@ async function upgradeProject(project, options) {
if (!upgradeFn) {
return { error: 'unsupported history type' }
}
+ if (options.forceClean) {
+ try {
+ const projectId = project._id
+ // delete any existing history stored in the mongo backend
+ await HistoryManager.promises.deleteProject(projectId, projectId)
+ } catch (err) {
+ // failed to delete existing history, but we can try to continue
+ }
+ }
const result = await upgradeFn(project, options)
result.historyType = historyType
return result
--- services/web/scripts/history/migrate_history.js
+++ services/web/scripts/history/migrate_history.js
@@ -2,6 +2,25 @@
process.env.MONGO_SOCKET_TIMEOUT =
parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000
+const fs = require('fs')
+
+if (fs.existsSync('/etc/container_environment.json')) {
+ try {
+ const envData = JSON.parse(
+ fs.readFileSync('/etc/container_environment.json', 'utf8')
+ )
+ for (const [key, value] of Object.entries(envData)) {
+ process.env[key] = value
+ }
+ } catch (err) {
+ console.error(
+ 'cannot read /etc/container_environment.json, the script needs to be run as root',
+ err
+ )
+ process.exit(1)
+ }
+}
+
const VERSION = '0.9.0-cli'
const {
countProjects,
@@ -11,7 +30,6 @@ const {
} = require('../../modules/history-migration/app/src/HistoryUpgradeHelper')
const { waitForDb } = require('../../app/src/infrastructure/mongodb')
const minimist = require('minimist')
-const fs = require('fs')
const util = require('util')
const pLimit = require('p-limit')
const logger = require('@overleaf/logger')
@@ -34,6 +52,7 @@ const argv = minimist(process.argv.slice(2), {
'use-query-hint',
'retry-failed',
'archive-on-failure',
+ 'force-clean',
],
string: ['output', 'user-id'],
alias: {
@@ -168,6 +187,7 @@ async function migrateProjects(projectsToMigrate) {
convertLargeDocsToFile: argv['convert-large-docs-to-file'],
userId: argv['user-id'],
reason: VERSION,
+ forceClean: argv['force-clean'],
}
async function _migrateProject(project) {
if (INTERRUPT) {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.2
# Patch: remove stats collection from history migration script
COPY remove_stats_collection.patch .
RUN patch -p0 < remove_stats_collection.patch

View File

@@ -0,0 +1,16 @@
--- services/web/scripts/history/migrate_history.js
+++ services/web/scripts/history/migrate_history.js
@@ -110,14 +110,6 @@ async function findProjectsToMigrate() {
process.exit(1)
}
- // Find the total number of history records for the projects we need to migrate
- let docHistoryCount = 0
- for await (const project of projectsToMigrate) {
- const count = await countDocHistory({ project_id: project._id })
- docHistoryCount += count
- }
-
- console.log('Total history records to migrate:', docHistoryCount)
return projectsToMigrate
}

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.3
# Patch: run primary email check in saas only
COPY primary_email_check_saas.patch .
RUN patch -p0 < primary_email_check_saas.patch

View File

@@ -0,0 +1,10 @@
--- services/web/app/src/Features/Project/ProjectController.js
+++ services/web/app/src/Features/Project/ProjectController.js
@@ -535,6 +535,7 @@ const ProjectController = {
if (
user &&
+ Features.hasFeature('saas') &&
UserPrimaryEmailCheckHandler.requiresPrimaryEmailCheck(user)
) {
return res.redirect('/user/emails/primary-email-check')

View File

@@ -0,0 +1,7 @@
FROM sharelatex/sharelatex:3.5.4
# Patch: fix shutdown sequence: flush document-updater before history services.
RUN cd /etc/my_init.pre_shutdown.d \
&& mv 02_flush_document_updater 01_flush_document_updater \
&& mv 01_flush_project_history 02_flush_project_history \
&& mv 01_flush_track_changes 02_flush_track_changes

View File

@@ -0,0 +1,8 @@
FROM sharelatex/sharelatex:3.5.5
# Patch: support trustProxyIps in Overleaf Community Edition/Server Pro
COPY trusted_proxy_ips.patch .
RUN patch -p0 --directory=/etc/sharelatex < trusted_proxy_ips.patch
# Patch: add script to cleanup legacy history data
ADD clean_sl_history_data.js /overleaf/services/web/scripts/history/clean_sl_history_data.js

View File

@@ -0,0 +1,60 @@
const { waitForDb, db } = require('../../app/src/infrastructure/mongodb')
async function main() {
await checkAllProjectsAreMigrated()
await setAllowDowngradeToFalse()
await deleteHistoryCollections()
console.log('Legacy history data cleaned up successfully')
process.exit(0)
}
async function checkAllProjectsAreMigrated() {
console.log('checking all projects are migrated to Full Project History')
const count = await db.projects.countDocuments({
'overleaf.history.display': { $ne: true },
})
if (count === 0) {
console.log('All projects are migrated to Full Project History')
} else {
console.error(
`There are ${count} projects that are not migrated to Full Project History` +
` please complete the migration before running this script again.`
)
process.exit(1)
}
}
async function setAllowDowngradeToFalse() {
console.log('unsetting `allowDowngrade` flag in all projects')
await db.projects.updateMany(
{
'overleaf.history.id': { $exists: true },
'overleaf.history.allowDowngrade': true,
},
{ $unset: { 'overleaf.history.allowDowngrade': 1 } }
)
console.log('unsetting `allowDowngrade` flag in all projects - Done')
}
async function deleteHistoryCollections() {
console.log('removing `docHistory` data')
await db.docHistory.deleteMany({})
console.log('removing `docHistory` data - Done')
console.log('removing `docHistoryIndex` data')
await db.docHistoryIndex.deleteMany({})
console.log('removing `docHistoryIndex` data - Done')
console.log('removing `projectHistoryMetaData` data')
await db.projectHistoryMetaData.deleteMany({})
console.log('removing `projectHistoryMetaData` data - Done')
}
waitForDb()
.then(main)
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -0,0 +1,10 @@
--- settings.js
+++ settings.js
@@ -245,6 +245,7 @@ const settings = {
// address and http/https protocol information.
behindProxy: process.env.SHARELATEX_BEHIND_PROXY || false,
+ trustedProxyIps: process.env.SHARELATEX_TRUSTED_PROXY_IPS,
i18n: {
subdomainLang: {

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.6
# Patch: clean up history id on `migrate_history.js --force-clean`
COPY force_clean_fix.patch .
RUN patch -p0 < force_clean_fix.patch

View File

@@ -0,0 +1,40 @@
--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
+++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js
@@ -115,6 +115,11 @@ async function upgradeProject(project, options) {
const projectId = project._id
// delete any existing history stored in the mongo backend
await HistoryManager.promises.deleteProject(projectId, projectId)
+ // unset overleaf.history.id to prevent the migration script from failing on checks
+ await db.projects.updateOne(
+ { _id: projectId },
+ { $unset: { 'overleaf.history.id': '' } }
+ )
} catch (err) {
// failed to delete existing history, but we can try to continue
}
--- services/web/scripts/history/migrate_history.js
+++ services/web/scripts/history/migrate_history.js
@@ -147,7 +147,7 @@ async function migrateProjects(projectsToMigrate) {
}
// send log output for each migration to a file
const output = fs.createWriteStream(argv.output, { flags: 'a' })
- console.log(`Writing log output to ${argv.output}`)
+ console.log(`Writing log output to ${process.cwd()}/${argv.output}`)
const logger = new console.Console({ stdout: output })
function logJson(obj) {
logger.log(JSON.stringify(obj))
@@ -253,8 +253,12 @@ async function main() {
console.log('Projects migrated: ', projectsMigrated)
console.log('Projects failed: ', projectsFailed)
if (projectsFailed > 0) {
- console.log(`Log output written to ${argv.output}`)
- console.log('Please check the log for errors.')
+ console.log('------------------------------------------------------')
+ console.log(`Log output written to ${process.cwd()}/${argv.output}`)
+ console.log(
+ 'Please check the log for errors. Attach the content of the file when contacting support.'
+ )
+ console.log('------------------------------------------------------')
}
if (INTERRUPT) {
console.log('Migration interrupted, please run again to continue.')

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:3.5.7
# Patch: fixes anonymous edits breaking history
COPY pr_13574.patch .
RUN patch -p0 < pr_13574.patch

View File

@@ -0,0 +1,22 @@
--- services/project-history/app/js/UpdateTranslator.js
+++ services/project-history/app/js/UpdateTranslator.js
@@ -73,9 +73,18 @@ function _convertToChange(projectId, updateWithBlob) {
throw error
}
+ let v2Authors
+ if (update.meta.user_id === 'anonymous-user') {
+ // history-v1 uses null to represent an anonymous author
+ v2Authors = [null]
+ } else {
+ // user_id is missing on resync operations that update the contents of a doc
+ v2Authors = _.compact([update.meta.user_id])
+ }
+
const rawChange = {
operations,
- v2Authors: _.compact([update.meta.user_id]),
+ v2Authors,
timestamp: new Date(update.meta.ts).toISOString(),
projectVersion,
v2DocVersions: Object.keys(v2DocVersions).length ? v2DocVersions : null,

View File

@@ -0,0 +1,10 @@
FROM sharelatex/sharelatex:3.5.8
# Node update
RUN curl -sSL https://deb.nodesource.com/setup_16.x | bash - \
&& apt-get install -y nodejs
# Patch: fetch access tokens via endpoint
COPY pr_13485.patch .
RUN patch -p0 < pr_13485.patch
RUN node genScript compile | bash

View File

@@ -0,0 +1,389 @@
--- services/web/app/src/Features/Collaborators/CollaboratorsController.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsController.js
@@ -11,6 +11,7 @@ const Errors = require('../Errors/Errors')
const logger = require('@overleaf/logger')
const { expressify } = require('../../util/promises')
const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper')
+const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
module.exports = {
removeUserFromProject: expressify(removeUserFromProject),
@@ -18,6 +19,7 @@ module.exports = {
getAllMembers: expressify(getAllMembers),
setCollaboratorInfo: expressify(setCollaboratorInfo),
transferOwnership: expressify(transferOwnership),
+ getShareTokens: expressify(getShareTokens),
}
async function removeUserFromProject(req, res, next) {
@@ -114,3 +116,37 @@ async function _removeUserIdFromProject(projectId, userId) {
)
await TagsHandler.promises.removeProjectFromAllTags(userId, projectId)
}
+
+async function getShareTokens(req, res) {
+ const projectId = req.params.Project_id
+ const userId = SessionManager.getLoggedInUserId(req.session)
+
+ let tokens
+ if (userId) {
+ tokens = await CollaboratorsGetter.promises.getPublicShareTokens(
+ ObjectId(userId),
+ ObjectId(projectId)
+ )
+ } else {
+ // anonymous access, the token is already available in the session
+ const readOnly = TokenAccessHandler.getRequestToken(req, projectId)
+ tokens = { readOnly }
+ }
+ if (!tokens) {
+ return res.sendStatus(403)
+ }
+
+ if (tokens.readOnly || tokens.readAndWrite) {
+ logger.info(
+ {
+ projectId,
+ userId: userId || 'anonymous',
+ ip: req.ip,
+ tokens: Object.keys(tokens),
+ },
+ 'project tokens accessed'
+ )
+ }
+
+ res.json(tokens)
+}
--- services/web/app/src/Features/Collaborators/CollaboratorsGetter.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsGetter.js
@@ -25,6 +25,7 @@ module.exports = {
getInvitedCollaboratorCount: callbackify(getInvitedCollaboratorCount),
getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf),
isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject),
+ getPublicShareTokens: callbackify(getPublicShareTokens),
userIsTokenMember: callbackify(userIsTokenMember),
getAllInvitedMembers: callbackify(getAllInvitedMembers),
promises: {
@@ -37,6 +38,7 @@ module.exports = {
getInvitedCollaboratorCount,
getProjectsUserIsMemberOf,
isUserInvitedMemberOfProject,
+ getPublicShareTokens,
userIsTokenMember,
getAllInvitedMembers,
},
@@ -133,6 +135,40 @@ async function isUserInvitedMemberOfProject(userId, projectId) {
return false
}
+async function getPublicShareTokens(userId, projectId) {
+ const memberInfo = await Project.findOne(
+ {
+ _id: projectId,
+ },
+ {
+ isOwner: { $eq: ['$owner_ref', userId] },
+ hasTokenReadOnlyAccess: {
+ $and: [
+ { $in: [userId, '$tokenAccessReadOnly_refs'] },
+ { $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] },
+ ],
+ },
+ tokens: 1,
+ }
+ )
+ .lean()
+ .exec()
+
+ if (!memberInfo) {
+ return null
+ }
+
+ if (memberInfo.isOwner) {
+ return memberInfo.tokens
+ } else if (memberInfo.hasTokenReadOnlyAccess) {
+ return {
+ readOnly: memberInfo.tokens.readOnly,
+ }
+ } else {
+ return {}
+ }
+}
+
async function getProjectsUserIsMemberOf(userId, fields) {
const limit = pLimit(2)
const [readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly] =
--- services/web/app/src/Features/Collaborators/CollaboratorsRouter.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsRouter.js
@@ -22,6 +22,10 @@ const rateLimiters = {
points: 200,
duration: 60 * 10,
}),
+ getProjectTokens: new RateLimiter('get-project-tokens', {
+ points: 200,
+ duration: 60 * 10,
+ }),
}
module.exports = {
@@ -139,5 +143,12 @@ module.exports = {
CollaboratorsInviteController.acceptInvite,
AnalyticsRegistrationSourceMiddleware.clearSource()
)
+
+ webRouter.get(
+ '/project/:Project_id/tokens',
+ RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens),
+ AuthorizationMiddleware.ensureUserCanReadProject,
+ CollaboratorsController.getShareTokens
+ )
},
}
--- services/web/app/src/Features/Editor/EditorController.js
+++ services/web/app/src/Features/Editor/EditorController.js
@@ -581,20 +581,7 @@ const EditorController = {
{ newAccessLevel }
)
if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) {
- ProjectDetailsHandler.ensureTokensArePresent(
- projectId,
- function (err, tokens) {
- if (err) {
- return callback(err)
- }
- EditorRealTimeController.emitToRoom(
- projectId,
- 'project:tokens:changed',
- { tokens }
- )
- callback()
- }
- )
+ ProjectDetailsHandler.ensureTokensArePresent(projectId, callback)
} else {
callback()
}
--- services/web/app/src/Features/Editor/EditorHttpController.js
+++ services/web/app/src/Features/Editor/EditorHttpController.js
@@ -67,8 +67,6 @@ async function joinProject(req, res, next) {
if (!project) {
return res.sendStatus(403)
}
- // Hide access tokens if this is not the project owner
- TokenAccessHandler.protectTokens(project, privilegeLevel)
// Hide sensitive data if the user is restricted
if (isRestrictedUser) {
project.owner = { _id: project.owner._id }
--- services/web/app/src/Features/Project/ProjectController.js
+++ services/web/app/src/Features/Project/ProjectController.js
@@ -343,7 +343,7 @@ const ProjectController = {
const userId = SessionManager.getLoggedInUserId(req.session)
ProjectGetter.findAllUsersProjects(
userId,
- 'name lastUpdated publicAccesLevel archived trashed owner_ref tokens',
+ 'name lastUpdated publicAccesLevel archived trashed owner_ref',
(err, projects) => {
if (err != null) {
return next(err)
@@ -1072,7 +1072,6 @@ const ProjectController = {
// If a project is simultaneously trashed and archived, we will consider it archived but not trashed.
const trashed = ProjectHelper.isTrashed(project, userId) && !archived
- TokenAccessHandler.protectTokens(project, accessLevel)
const model = {
id: project._id,
name: project.name,
--- services/web/app/src/Features/Project/ProjectDetailsHandler.js
+++ services/web/app/src/Features/Project/ProjectDetailsHandler.js
@@ -207,14 +207,13 @@ async function ensureTokensArePresent(projectId) {
project.tokens.readOnly != null &&
project.tokens.readAndWrite != null
) {
- return project.tokens
+ return
}
await _generateTokens(project)
await Project.updateOne(
{ _id: projectId },
{ $set: { tokens: project.tokens } }
).exec()
- return project.tokens
}
async function clearTokens(projectId) {
--- services/web/app/src/Features/Project/ProjectEditorHandler.js
+++ services/web/app/src/Features/Project/ProjectEditorHandler.js
@@ -49,7 +49,6 @@ module.exports = ProjectEditorHandler = {
),
members: [],
invites,
- tokens: project.tokens,
imageName:
project.imageName != null
? Path.basename(project.imageName)
--- services/web/app/src/Features/TokenAccess/TokenAccessHandler.js
+++ services/web/app/src/Features/TokenAccess/TokenAccessHandler.js
@@ -246,22 +246,6 @@ const TokenAccessHandler = {
})
},
- protectTokens(project, privilegeLevel) {
- if (!project || !project.tokens) {
- return
- }
- if (privilegeLevel === PrivilegeLevels.OWNER) {
- return
- }
- if (privilegeLevel !== PrivilegeLevels.READ_AND_WRITE) {
- project.tokens.readAndWrite = ''
- project.tokens.readAndWritePrefix = ''
- }
- if (privilegeLevel !== PrivilegeLevels.READ_ONLY) {
- project.tokens.readOnly = ''
- }
- },
-
getV1DocPublishedInfo(token, callback) {
// default to allowing access
if (!Settings.apis.v1 || !Settings.apis.v1.url) {
@@ -304,7 +288,6 @@ TokenAccessHandler.promises = promisifyAll(TokenAccessHandler, {
'_projectFindOne',
'grantSessionTokenAccess',
'getRequestToken',
- 'protectTokens',
],
multiResult: {
validateTokenForAnonymousAccess: ['isValidReadAndWrite', 'isValidReadOnly'],
--- services/web/frontend/js/features/share-project-modal/components/link-sharing.js
+++ services/web/frontend/js/features/share-project-modal/components/link-sharing.js
@@ -1,4 +1,4 @@
-import { useCallback, useState } from 'react'
+import { useCallback, useState, useEffect } from 'react'
import PropTypes from 'prop-types'
import { Button, Col, Row } from 'react-bootstrap'
import { Trans } from 'react-i18next'
@@ -10,6 +10,8 @@ import CopyLink from '../../../shared/components/copy-link'
import { useProjectContext } from '../../../shared/context/project-context'
import * as eventTracking from '../../../infrastructure/event-tracking'
import { useUserContext } from '../../../shared/context/user-context'
+import { getJSON } from '../../../infrastructure/fetch-json'
+import useAbortController from '../../../shared/hooks/use-abort-controller'
export default function LinkSharing({ canAddCollaborators }) {
const [inflight, setInflight] = useState(false)
@@ -27,8 +29,7 @@ export default function LinkSharing({ canAddCollaborators }) {
)
.then(() => {
// NOTE: not calling `updateProject` here as it receives data via
- // project:publicAccessLevel:changed and project:tokens:changed
- // over the websocket connection
+ // project:publicAccessLevel:changed over the websocket connection
// TODO: eventTracking.sendMB('project-make-token-based') when newPublicAccessLevel is 'tokenBased'
})
.finally(() => {
@@ -106,7 +107,17 @@ PrivateSharing.propTypes = {
}
function TokenBasedSharing({ setAccessLevel, inflight, canAddCollaborators }) {
- const { tokens } = useProjectContext()
+ const { _id: projectId } = useProjectContext()
+
+ const [tokens, setTokens] = useState(null)
+
+ const { signal } = useAbortController()
+
+ useEffect(() => {
+ getJSON(`/project/${projectId}/tokens`, { signal })
+ .then(data => setTokens(data))
+ .catch(error => console.error(error))
+ }, [projectId, signal])
return (
<Row className="public-access-level">
@@ -194,7 +205,17 @@ LegacySharing.propTypes = {
}
export function ReadOnlyTokenLink() {
- const { tokens } = useProjectContext()
+ const { _id: projectId } = useProjectContext()
+
+ const [tokens, setTokens] = useState(null)
+
+ const { signal } = useAbortController()
+
+ useEffect(() => {
+ getJSON(`/project/${projectId}/tokens`, { signal })
+ .then(data => setTokens(data))
+ .catch(error => console.error(error))
+ }, [projectId, signal])
return (
<Row className="public-access-level">
--- services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js
+++ services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js
@@ -31,16 +31,6 @@ export default App.controller(
})
}
- /* tokens */
-
- ide.socket.on('project:tokens:changed', data => {
- if (data.tokens != null) {
- $scope.$applyAsync(() => {
- $scope.project.tokens = data.tokens
- })
- }
- })
-
ide.socket.on('project:membership:changed', data => {
if (data.members) {
listProjectMembers($scope.project._id)
--- services/web/frontend/js/shared/context/mock/mock-ide.js
+++ services/web/frontend/js/shared/context/mock/mock-ide.js
@@ -27,10 +27,6 @@ export const getMockIde = () => {
zotero: false,
},
publicAccessLevel: '',
- tokens: {
- readOnly: '',
- readAndWrite: '',
- },
owner: {
_id: '',
email: '',
--- services/web/frontend/js/shared/context/project-context.js
+++ services/web/frontend/js/shared/context/project-context.js
@@ -28,10 +28,6 @@ export const projectShape = {
versioning: PropTypes.bool,
}),
publicAccessLevel: PropTypes.string,
- tokens: PropTypes.shape({
- readOnly: PropTypes.string,
- readAndWrite: PropTypes.string,
- }),
owner: PropTypes.shape({
_id: PropTypes.string.isRequired,
email: PropTypes.string.isRequired,
@@ -81,7 +77,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccesLevel: publicAccessLevel,
- tokens,
owner,
} = project || projectFallback
@@ -94,7 +89,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccessLevel,
- tokens,
owner,
}
}, [
@@ -105,7 +99,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccessLevel,
- tokens,
owner,
])

View File

@@ -0,0 +1,9 @@
FROM sharelatex/sharelatex:4.0.0
# Patch: Block access to metrics endpoint
COPY pr_13229_ce_only.patch .
RUN patch -p0 --directory=/ < pr_13229_ce_only.patch
# Patch: Remove documentation link from editor outline
COPY outline_doc_icon.patch .
RUN patch -p0 < outline_doc_icon.patch

View File

@@ -0,0 +1,9 @@
--- services/web/app/views/project/editor/file-tree-react.pug
+++ services/web/app/views/project/editor/file-tree-react.pug
@@ -38,5 +38,3 @@ aside.editor-sidebar.full-size
highlighted-line="highlightedLine"
show="show"
)
-
- documentation-button
\ No newline at end of file

View File

@@ -0,0 +1,14 @@
--- etc/nginx/sites-enabled/sharelatex.conf
+++ etc/nginx/sites-enabled/sharelatex.conf
@@ -4,6 +4,11 @@ server {
root /overleaf/services/web/public/;
+ # block external access to prometheus /metrics
+ location /metrics {
+ internal;
+ }
+
location / {
proxy_pass http://127.0.0.1:3000;
proxy_http_version 1.1;

View File

@@ -0,0 +1,7 @@
# 4.0.1 was tagged as 4.0.2 in dockerhub to keep parity with Server Pro
FROM sharelatex/sharelatex:4.0.1
# Patch: fixes anonymous edits breaking history
COPY pr_13574.patch .
RUN patch -p0 < pr_13574.patch

View File

@@ -0,0 +1,22 @@
--- services/project-history/app/js/UpdateTranslator.js
+++ services/project-history/app/js/UpdateTranslator.js
@@ -73,9 +73,18 @@ function _convertToChange(projectId, updateWithBlob) {
throw error
}
+ let v2Authors
+ if (update.meta.user_id === 'anonymous-user') {
+ // history-v1 uses null to represent an anonymous author
+ v2Authors = [null]
+ } else {
+ // user_id is missing on resync operations that update the contents of a doc
+ v2Authors = _.compact([update.meta.user_id])
+ }
+
const rawChange = {
operations,
- v2Authors: _.compact([update.meta.user_id]),
+ v2Authors,
timestamp: new Date(update.meta.ts).toISOString(),
projectVersion,
v2DocVersions: Object.keys(v2DocVersions).length ? v2DocVersions : null,

View File

@@ -0,0 +1,10 @@
FROM sharelatex/sharelatex:4.0.3
# Node update
RUN curl -sSL https://deb.nodesource.com/setup_16.x | bash - \
&& apt-get install -y nodejs
# Patch: fetch access tokens via endpoint
COPY pr_13485.patch .
RUN patch -p0 < pr_13485.patch
RUN node genScript compile | bash

View File

@@ -0,0 +1,389 @@
--- services/web/app/src/Features/Collaborators/CollaboratorsController.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsController.js
@@ -11,6 +11,7 @@ const Errors = require('../Errors/Errors')
const logger = require('@overleaf/logger')
const { expressify } = require('../../util/promises')
const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper')
+const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
module.exports = {
removeUserFromProject: expressify(removeUserFromProject),
@@ -18,6 +19,7 @@ module.exports = {
getAllMembers: expressify(getAllMembers),
setCollaboratorInfo: expressify(setCollaboratorInfo),
transferOwnership: expressify(transferOwnership),
+ getShareTokens: expressify(getShareTokens),
}
async function removeUserFromProject(req, res, next) {
@@ -114,3 +116,37 @@ async function _removeUserIdFromProject(projectId, userId) {
)
await TagsHandler.promises.removeProjectFromAllTags(userId, projectId)
}
+
+async function getShareTokens(req, res) {
+ const projectId = req.params.Project_id
+ const userId = SessionManager.getLoggedInUserId(req.session)
+
+ let tokens
+ if (userId) {
+ tokens = await CollaboratorsGetter.promises.getPublicShareTokens(
+ ObjectId(userId),
+ ObjectId(projectId)
+ )
+ } else {
+ // anonymous access, the token is already available in the session
+ const readOnly = TokenAccessHandler.getRequestToken(req, projectId)
+ tokens = { readOnly }
+ }
+ if (!tokens) {
+ return res.sendStatus(403)
+ }
+
+ if (tokens.readOnly || tokens.readAndWrite) {
+ logger.info(
+ {
+ projectId,
+ userId: userId || 'anonymous',
+ ip: req.ip,
+ tokens: Object.keys(tokens),
+ },
+ 'project tokens accessed'
+ )
+ }
+
+ res.json(tokens)
+}
--- services/web/app/src/Features/Collaborators/CollaboratorsGetter.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsGetter.js
@@ -25,6 +25,7 @@ module.exports = {
getInvitedCollaboratorCount: callbackify(getInvitedCollaboratorCount),
getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf),
isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject),
+ getPublicShareTokens: callbackify(getPublicShareTokens),
userIsTokenMember: callbackify(userIsTokenMember),
getAllInvitedMembers: callbackify(getAllInvitedMembers),
promises: {
@@ -37,6 +38,7 @@ module.exports = {
getInvitedCollaboratorCount,
getProjectsUserIsMemberOf,
isUserInvitedMemberOfProject,
+ getPublicShareTokens,
userIsTokenMember,
getAllInvitedMembers,
},
@@ -133,6 +135,40 @@ async function isUserInvitedMemberOfProject(userId, projectId) {
return false
}
+async function getPublicShareTokens(userId, projectId) {
+ const memberInfo = await Project.findOne(
+ {
+ _id: projectId,
+ },
+ {
+ isOwner: { $eq: ['$owner_ref', userId] },
+ hasTokenReadOnlyAccess: {
+ $and: [
+ { $in: [userId, '$tokenAccessReadOnly_refs'] },
+ { $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] },
+ ],
+ },
+ tokens: 1,
+ }
+ )
+ .lean()
+ .exec()
+
+ if (!memberInfo) {
+ return null
+ }
+
+ if (memberInfo.isOwner) {
+ return memberInfo.tokens
+ } else if (memberInfo.hasTokenReadOnlyAccess) {
+ return {
+ readOnly: memberInfo.tokens.readOnly,
+ }
+ } else {
+ return {}
+ }
+}
+
async function getProjectsUserIsMemberOf(userId, fields) {
const limit = pLimit(2)
const [readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly] =
--- services/web/app/src/Features/Collaborators/CollaboratorsRouter.js
+++ services/web/app/src/Features/Collaborators/CollaboratorsRouter.js
@@ -22,6 +22,10 @@ const rateLimiters = {
points: 200,
duration: 60 * 10,
}),
+ getProjectTokens: new RateLimiter('get-project-tokens', {
+ points: 200,
+ duration: 60 * 10,
+ }),
}
module.exports = {
@@ -139,5 +143,12 @@ module.exports = {
CollaboratorsInviteController.acceptInvite,
AnalyticsRegistrationSourceMiddleware.clearSource()
)
+
+ webRouter.get(
+ '/project/:Project_id/tokens',
+ RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens),
+ AuthorizationMiddleware.ensureUserCanReadProject,
+ CollaboratorsController.getShareTokens
+ )
},
}
--- services/web/app/src/Features/Editor/EditorController.js
+++ services/web/app/src/Features/Editor/EditorController.js
@@ -581,20 +581,7 @@ const EditorController = {
{ newAccessLevel }
)
if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) {
- ProjectDetailsHandler.ensureTokensArePresent(
- projectId,
- function (err, tokens) {
- if (err) {
- return callback(err)
- }
- EditorRealTimeController.emitToRoom(
- projectId,
- 'project:tokens:changed',
- { tokens }
- )
- callback()
- }
- )
+ ProjectDetailsHandler.ensureTokensArePresent(projectId, callback)
} else {
callback()
}
--- services/web/app/src/Features/Editor/EditorHttpController.js
+++ services/web/app/src/Features/Editor/EditorHttpController.js
@@ -67,8 +67,6 @@ async function joinProject(req, res, next) {
if (!project) {
return res.sendStatus(403)
}
- // Hide access tokens if this is not the project owner
- TokenAccessHandler.protectTokens(project, privilegeLevel)
// Hide sensitive data if the user is restricted
if (isRestrictedUser) {
project.owner = { _id: project.owner._id }
--- services/web/app/src/Features/Project/ProjectController.js
+++ services/web/app/src/Features/Project/ProjectController.js
@@ -343,7 +343,7 @@ const ProjectController = {
const userId = SessionManager.getLoggedInUserId(req.session)
ProjectGetter.findAllUsersProjects(
userId,
- 'name lastUpdated publicAccesLevel archived trashed owner_ref tokens',
+ 'name lastUpdated publicAccesLevel archived trashed owner_ref',
(err, projects) => {
if (err != null) {
return next(err)
@@ -1072,7 +1072,6 @@ const ProjectController = {
// If a project is simultaneously trashed and archived, we will consider it archived but not trashed.
const trashed = ProjectHelper.isTrashed(project, userId) && !archived
- TokenAccessHandler.protectTokens(project, accessLevel)
const model = {
id: project._id,
name: project.name,
--- services/web/app/src/Features/Project/ProjectDetailsHandler.js
+++ services/web/app/src/Features/Project/ProjectDetailsHandler.js
@@ -207,14 +207,13 @@ async function ensureTokensArePresent(projectId) {
project.tokens.readOnly != null &&
project.tokens.readAndWrite != null
) {
- return project.tokens
+ return
}
await _generateTokens(project)
await Project.updateOne(
{ _id: projectId },
{ $set: { tokens: project.tokens } }
).exec()
- return project.tokens
}
async function clearTokens(projectId) {
--- services/web/app/src/Features/Project/ProjectEditorHandler.js
+++ services/web/app/src/Features/Project/ProjectEditorHandler.js
@@ -49,7 +49,6 @@ module.exports = ProjectEditorHandler = {
),
members: [],
invites,
- tokens: project.tokens,
imageName:
project.imageName != null
? Path.basename(project.imageName)
--- services/web/app/src/Features/TokenAccess/TokenAccessHandler.js
+++ services/web/app/src/Features/TokenAccess/TokenAccessHandler.js
@@ -246,22 +246,6 @@ const TokenAccessHandler = {
})
},
- protectTokens(project, privilegeLevel) {
- if (!project || !project.tokens) {
- return
- }
- if (privilegeLevel === PrivilegeLevels.OWNER) {
- return
- }
- if (privilegeLevel !== PrivilegeLevels.READ_AND_WRITE) {
- project.tokens.readAndWrite = ''
- project.tokens.readAndWritePrefix = ''
- }
- if (privilegeLevel !== PrivilegeLevels.READ_ONLY) {
- project.tokens.readOnly = ''
- }
- },
-
getV1DocPublishedInfo(token, callback) {
// default to allowing access
if (!Settings.apis.v1 || !Settings.apis.v1.url) {
@@ -304,7 +288,6 @@ TokenAccessHandler.promises = promisifyAll(TokenAccessHandler, {
'_projectFindOne',
'grantSessionTokenAccess',
'getRequestToken',
- 'protectTokens',
],
multiResult: {
validateTokenForAnonymousAccess: ['isValidReadAndWrite', 'isValidReadOnly'],
--- services/web/frontend/js/features/share-project-modal/components/link-sharing.js
+++ services/web/frontend/js/features/share-project-modal/components/link-sharing.js
@@ -1,4 +1,4 @@
-import { useCallback, useState } from 'react'
+import { useCallback, useState, useEffect } from 'react'
import PropTypes from 'prop-types'
import { Button, Col, Row } from 'react-bootstrap'
import { Trans } from 'react-i18next'
@@ -10,6 +10,8 @@ import CopyLink from '../../../shared/components/copy-link'
import { useProjectContext } from '../../../shared/context/project-context'
import * as eventTracking from '../../../infrastructure/event-tracking'
import { useUserContext } from '../../../shared/context/user-context'
+import { getJSON } from '../../../infrastructure/fetch-json'
+import useAbortController from '../../../shared/hooks/use-abort-controller'
export default function LinkSharing({ canAddCollaborators }) {
const [inflight, setInflight] = useState(false)
@@ -27,8 +29,7 @@ export default function LinkSharing({ canAddCollaborators }) {
)
.then(() => {
// NOTE: not calling `updateProject` here as it receives data via
- // project:publicAccessLevel:changed and project:tokens:changed
- // over the websocket connection
+ // project:publicAccessLevel:changed over the websocket connection
// TODO: eventTracking.sendMB('project-make-token-based') when newPublicAccessLevel is 'tokenBased'
})
.finally(() => {
@@ -106,7 +107,17 @@ PrivateSharing.propTypes = {
}
function TokenBasedSharing({ setAccessLevel, inflight, canAddCollaborators }) {
- const { tokens } = useProjectContext()
+ const { _id: projectId } = useProjectContext()
+
+ const [tokens, setTokens] = useState(null)
+
+ const { signal } = useAbortController()
+
+ useEffect(() => {
+ getJSON(`/project/${projectId}/tokens`, { signal })
+ .then(data => setTokens(data))
+ .catch(error => console.error(error))
+ }, [projectId, signal])
return (
<Row className="public-access-level">
@@ -194,7 +205,17 @@ LegacySharing.propTypes = {
}
export function ReadOnlyTokenLink() {
- const { tokens } = useProjectContext()
+ const { _id: projectId } = useProjectContext()
+
+ const [tokens, setTokens] = useState(null)
+
+ const { signal } = useAbortController()
+
+ useEffect(() => {
+ getJSON(`/project/${projectId}/tokens`, { signal })
+ .then(data => setTokens(data))
+ .catch(error => console.error(error))
+ }, [projectId, signal])
return (
<Row className="public-access-level">
--- services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js
+++ services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js
@@ -31,16 +31,6 @@ export default App.controller(
})
}
- /* tokens */
-
- ide.socket.on('project:tokens:changed', data => {
- if (data.tokens != null) {
- $scope.$applyAsync(() => {
- $scope.project.tokens = data.tokens
- })
- }
- })
-
ide.socket.on('project:membership:changed', data => {
if (data.members) {
listProjectMembers($scope.project._id)
--- services/web/frontend/js/shared/context/mock/mock-ide.js
+++ services/web/frontend/js/shared/context/mock/mock-ide.js
@@ -27,10 +27,6 @@ export const getMockIde = () => {
zotero: false,
},
publicAccessLevel: '',
- tokens: {
- readOnly: '',
- readAndWrite: '',
- },
owner: {
_id: '',
email: '',
--- services/web/frontend/js/shared/context/project-context.js
+++ services/web/frontend/js/shared/context/project-context.js
@@ -28,10 +28,6 @@ export const projectShape = {
versioning: PropTypes.bool,
}),
publicAccessLevel: PropTypes.string,
- tokens: PropTypes.shape({
- readOnly: PropTypes.string,
- readAndWrite: PropTypes.string,
- }),
owner: PropTypes.shape({
_id: PropTypes.string.isRequired,
email: PropTypes.string.isRequired,
@@ -81,7 +77,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccesLevel: publicAccessLevel,
- tokens,
owner,
} = project || projectFallback
@@ -94,7 +89,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccessLevel,
- tokens,
owner,
}
}, [
@@ -105,7 +99,6 @@ export function ProjectProvider({ children }) {
invites,
features,
publicAccessLevel,
- tokens,
owner,
])

View File

@@ -0,0 +1,13 @@
FROM sharelatex/sharelatex:4.0.4
# Patch: clear invite and invite tokens through the websocket
COPY pr_13427.patch .
RUN patch -p0 < pr_13427.patch
# Patch: https://github.com/Automattic/mongoose/commit/f1efabf350522257364aa5c2cb36e441cf08f1a2
COPY mongoose_proto.patch .
RUN patch -p0 < mongoose_proto.patch
# Patch: Allow digits in PDF filenames
COPY pr_13122.patch .
RUN patch -p0 < pr_13122.patch

Some files were not shown because too many files have changed in this diff Show More