first commit
This commit is contained in:
5
services/web/.eastrc
Normal file
5
services/web/.eastrc
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"adapter": "./migrations/lib/adapter.mjs",
|
||||
"migrationNumberFormat": "dateTime",
|
||||
"migrationExtension": "mjs"
|
||||
}
|
||||
12
services/web/.eslintignore
Normal file
12
services/web/.eslintignore
Normal file
@@ -0,0 +1,12 @@
|
||||
# NOTE: changing paths may require updating them in the Makefile too.
|
||||
data/
|
||||
scripts/translations/.cache/
|
||||
node_modules
|
||||
frontend/js/vendor
|
||||
modules/**/frontend/js/vendor
|
||||
/public/
|
||||
frontend/js/features/source-editor/lezer-latex/latex.mjs
|
||||
frontend/js/features/source-editor/lezer-latex/latex.terms.mjs
|
||||
frontend/js/features/source-editor/lezer-bibtex/bibtex.mjs
|
||||
frontend/js/features/source-editor/lezer-bibtex/bibtex.terms.mjs
|
||||
frontend/js/features/source-editor/hunspell/wasm/hunspell.mjs
|
||||
495
services/web/.eslintrc.js
Normal file
495
services/web/.eslintrc.js
Normal file
@@ -0,0 +1,495 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
parser: '@typescript-eslint/parser',
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'standard',
|
||||
'prettier',
|
||||
],
|
||||
plugins: ['@overleaf'],
|
||||
env: {
|
||||
es2020: true,
|
||||
},
|
||||
settings: {
|
||||
// Tell eslint-plugin-react to detect which version of React we are using
|
||||
react: {
|
||||
version: 'detect',
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
'no-constant-binary-expression': 'error',
|
||||
|
||||
// do not allow importing of implicit dependencies.
|
||||
'import/no-extraneous-dependencies': 'error',
|
||||
|
||||
'@overleaf/prefer-kebab-url': 'error',
|
||||
|
||||
// disable some TypeScript rules
|
||||
'@typescript-eslint/no-var-requires': 'off',
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-this-alias': 'off',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/ban-ts-comment': 'off',
|
||||
|
||||
'no-use-before-define': 'off',
|
||||
'@typescript-eslint/no-use-before-define': [
|
||||
'error',
|
||||
{ functions: false, classes: false, variables: false },
|
||||
],
|
||||
'react-hooks/exhaustive-deps': [
|
||||
'warn',
|
||||
{
|
||||
additionalHooks: '(useCommandProvider)',
|
||||
},
|
||||
],
|
||||
},
|
||||
overrides: [
|
||||
// NOTE: changing paths may require updating them in the Makefile too.
|
||||
{
|
||||
// Node
|
||||
files: [
|
||||
'**/app/src/**/*.{js,mjs}',
|
||||
'app.{js,mjs}',
|
||||
'i18next-scanner.config.js',
|
||||
'scripts/**/*.{js,mjs}',
|
||||
'webpack.config*.js',
|
||||
],
|
||||
env: {
|
||||
node: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
// Test specific rules
|
||||
files: ['**/test/**/*.*'],
|
||||
plugins: ['mocha', 'chai-expect', 'chai-friendly'],
|
||||
env: {
|
||||
mocha: true,
|
||||
},
|
||||
rules: {
|
||||
// mocha-specific rules
|
||||
'mocha/handle-done-callback': 'error',
|
||||
'mocha/no-exclusive-tests': 'error',
|
||||
'mocha/no-global-tests': 'error',
|
||||
'mocha/no-identical-title': 'error',
|
||||
'mocha/no-nested-tests': 'error',
|
||||
'mocha/no-pending-tests': 'error',
|
||||
'mocha/no-skipped-tests': 'error',
|
||||
'mocha/no-mocha-arrows': 'error',
|
||||
|
||||
// Swap the no-unused-expressions rule with a more chai-friendly one
|
||||
'no-unused-expressions': 'off',
|
||||
'chai-friendly/no-unused-expressions': 'error',
|
||||
|
||||
// chai-specific rules
|
||||
'chai-expect/missing-assertion': 'error',
|
||||
'chai-expect/terminating-properties': 'error',
|
||||
|
||||
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
|
||||
// we don't enforce this at the top-level - just in tests to manage `this` scope
|
||||
// based on mocha's context mechanism
|
||||
'mocha/prefer-arrow-callback': 'error',
|
||||
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
},
|
||||
},
|
||||
{
|
||||
// ES specific rules
|
||||
files: [
|
||||
'**/app/src/**/*.mjs',
|
||||
'modules/*/index.mjs',
|
||||
'app.mjs',
|
||||
'scripts/**/*.mjs',
|
||||
'migrations/**/*.mjs',
|
||||
],
|
||||
excludedFiles: [
|
||||
// migration template file
|
||||
'migrations/lib/template.mjs',
|
||||
],
|
||||
parserOptions: {
|
||||
sourceType: 'module',
|
||||
},
|
||||
plugins: ['unicorn'],
|
||||
rules: {
|
||||
'import/no-unresolved': [
|
||||
'error',
|
||||
{
|
||||
// eslint-plugin-import does not support exports directive in package.json
|
||||
// https://github.com/import-js/eslint-plugin-import/issues/1810
|
||||
ignore: ['^p-queue$'],
|
||||
},
|
||||
],
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
mjs: 'always',
|
||||
},
|
||||
],
|
||||
'unicorn/prefer-module': 'error',
|
||||
'unicorn/prefer-node-protocol': 'error',
|
||||
},
|
||||
},
|
||||
{
|
||||
// Backend specific rules
|
||||
files: ['**/app/src/**/*.{js,mjs}', 'app.{js,mjs}'],
|
||||
parserOptions: {
|
||||
tsconfigRootDir: __dirname,
|
||||
project: './tsconfig.backend.json',
|
||||
},
|
||||
rules: {
|
||||
// do not allow importing of implicit dependencies.
|
||||
'import/no-extraneous-dependencies': [
|
||||
'error',
|
||||
{
|
||||
// do not allow importing of devDependencies.
|
||||
devDependencies: false,
|
||||
},
|
||||
],
|
||||
'no-restricted-syntax': [
|
||||
'error',
|
||||
// do not allow node-fetch in backend code
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.name='require'] > .arguments[value='node-fetch']",
|
||||
message:
|
||||
'Requiring node-fetch is not allowed in production services, please use fetch-utils.',
|
||||
},
|
||||
// mongoose populate must set fields to populate
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.property.name='populate'][arguments.length<2]",
|
||||
message:
|
||||
"Populate without a second argument returns the whole document. Use populate('field',['prop1','prop2']) instead",
|
||||
},
|
||||
// Require `new` when constructing ObjectId (For mongo + mongoose upgrade)
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.name='ObjectId'], CallExpression[callee.property.name='ObjectId']",
|
||||
message:
|
||||
'Construct ObjectId with `new ObjectId()` instead of `ObjectId()`',
|
||||
},
|
||||
// Require `new` when mapping a list of ids to a list of ObjectId (For mongo + mongoose upgrade)
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.property.name='map'] Identifier[name='ObjectId']:first-child, CallExpression[callee.property.name='map'] MemberExpression[property.name='ObjectId']:first-child",
|
||||
message:
|
||||
"Don't map ObjectId directly. Use `id => new ObjectId(id)` instead",
|
||||
},
|
||||
// Catch incorrect usage of `await db.collection.find()`
|
||||
{
|
||||
selector:
|
||||
"AwaitExpression > CallExpression > MemberExpression[property.name='find'][object.object.name='db']",
|
||||
message:
|
||||
'Mongo find returns a cursor not a promise, use `for await (const result of cursor)` or `.toArray()` instead.',
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-floating-promises': [
|
||||
'error',
|
||||
{ checkThenables: true },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
// Backend scripts specific rules
|
||||
files: ['**/scripts/**/*.js'],
|
||||
rules: {
|
||||
'no-restricted-syntax': [
|
||||
'error',
|
||||
// Require `new` when constructing ObjectId (For mongo + mongoose upgrade)
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.name='ObjectId'], CallExpression[callee.property.name='ObjectId']",
|
||||
message:
|
||||
'Construct ObjectId with `new ObjectId()` instead of `ObjectId()`',
|
||||
},
|
||||
// Require `new` when mapping a list of ids to a list of ObjectId (For mongo + mongoose upgrade)
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.property.name='map'] Identifier[name='ObjectId']:first-child, CallExpression[callee.property.name='map'] MemberExpression[property.name='ObjectId']:first-child",
|
||||
message:
|
||||
"Don't map ObjectId directly. Use `id => new ObjectId(id)` instead",
|
||||
},
|
||||
// Catch incorrect usage of `await db.collection.find()`
|
||||
{
|
||||
selector:
|
||||
"AwaitExpression > CallExpression > MemberExpression[property.name='find'][object.object.name='db']",
|
||||
message:
|
||||
'Mongo find returns a cursor not a promise, use `for await (const result of cursor)` or `.toArray()` instead.',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
// Cypress specific rules
|
||||
files: [
|
||||
'cypress/**/*.{js,jsx,ts,tsx}',
|
||||
'**/test/frontend/**/*.spec.{js,jsx,ts,tsx}',
|
||||
],
|
||||
extends: ['plugin:cypress/recommended'],
|
||||
},
|
||||
{
|
||||
// Frontend test specific rules
|
||||
files: ['**/frontend/**/*.test.{js,jsx,ts,tsx}'],
|
||||
plugins: ['testing-library'],
|
||||
extends: ['plugin:testing-library/react'],
|
||||
rules: {
|
||||
'testing-library/no-await-sync-events': 'off',
|
||||
'testing-library/no-await-sync-queries': 'off',
|
||||
'testing-library/no-container': 'off',
|
||||
'testing-library/no-node-access': 'off',
|
||||
'testing-library/no-render-in-lifecycle': 'off',
|
||||
'testing-library/no-wait-for-multiple-assertions': 'off',
|
||||
'testing-library/no-wait-for-side-effects': 'off',
|
||||
'testing-library/prefer-query-by-disappearance': 'off',
|
||||
'testing-library/prefer-screen-queries': 'off',
|
||||
'testing-library/render-result-naming-convention': 'off',
|
||||
},
|
||||
},
|
||||
{
|
||||
// Frontend specific rules
|
||||
files: [
|
||||
'**/frontend/js/**/*.{js,jsx,ts,tsx}',
|
||||
'**/frontend/stories/**/*.{js,jsx,ts,tsx}',
|
||||
'**/*.stories.{js,jsx,ts,tsx}',
|
||||
'**/test/frontend/**/*.{js,jsx,ts,tsx}',
|
||||
'**/test/frontend/components/**/*.spec.{js,jsx,ts,tsx}',
|
||||
],
|
||||
env: {
|
||||
browser: true,
|
||||
},
|
||||
parserOptions: {
|
||||
sourceType: 'module',
|
||||
},
|
||||
plugins: ['jsx-a11y'],
|
||||
extends: [
|
||||
'plugin:react/recommended',
|
||||
'plugin:react-hooks/recommended',
|
||||
'plugin:jsx-a11y/recommended',
|
||||
'standard-jsx',
|
||||
'prettier',
|
||||
],
|
||||
globals: {
|
||||
__webpack_public_path__: true,
|
||||
$: true,
|
||||
ga: true,
|
||||
},
|
||||
rules: {
|
||||
// TODO: remove once https://github.com/standard/eslint-config-standard-react/issues/68 (support eslint@8) is fixed.
|
||||
// START: inline standard-react rules
|
||||
// "react/jsx-no-bind": ["error", {
|
||||
// "allowArrowFunctions": true,
|
||||
// "allowBind": false,
|
||||
// "ignoreRefs": true
|
||||
// },],
|
||||
'react/no-did-update-set-state': 'error',
|
||||
'react/no-unused-prop-types': 'error',
|
||||
'react/prop-types': 'error',
|
||||
// "react/react-in-jsx-scope": "error",
|
||||
// END: inline standard-react rules
|
||||
|
||||
'react/no-unknown-property': [
|
||||
'error',
|
||||
{
|
||||
ignore: ['dnd-container', 'dropdown-toggle'],
|
||||
},
|
||||
],
|
||||
|
||||
'react/jsx-no-target-blank': [
|
||||
'error',
|
||||
{
|
||||
allowReferrer: true,
|
||||
},
|
||||
],
|
||||
// Prevent usage of legacy string refs
|
||||
'react/no-string-refs': 'error',
|
||||
|
||||
// Prevent curly braces around strings (as they're unnecessary)
|
||||
'react/jsx-curly-brace-presence': [
|
||||
'error',
|
||||
{
|
||||
props: 'never',
|
||||
children: 'never',
|
||||
},
|
||||
],
|
||||
|
||||
// Don't import React for JSX; the JSX runtime is added by a Babel plugin
|
||||
'react/react-in-jsx-scope': 'off',
|
||||
'react/jsx-uses-react': 'off',
|
||||
|
||||
// Allow functions as JSX props
|
||||
'react/jsx-no-bind': 'off', // TODO: fix occurrences and re-enable this
|
||||
|
||||
// Fix conflict between prettier & standard by overriding to prefer
|
||||
// double quotes
|
||||
'jsx-quotes': ['error', 'prefer-double'],
|
||||
|
||||
// Override weird behaviour of jsx-a11y label-has-for (says labels must be
|
||||
// nested *and* have for/id attributes)
|
||||
'jsx-a11y/label-has-for': [
|
||||
'error',
|
||||
{
|
||||
required: {
|
||||
some: ['nesting', 'id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
// Require .jsx or .tsx file extension when using JSX
|
||||
'react/jsx-filename-extension': [
|
||||
'error',
|
||||
{
|
||||
extensions: ['.jsx', '.tsx'],
|
||||
},
|
||||
],
|
||||
'no-restricted-syntax': [
|
||||
'error',
|
||||
// prohibit direct calls to methods of window.localStorage
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.object.object.name='window'][callee.object.property.name='localStorage']",
|
||||
message:
|
||||
'Modify location via customLocalStorage instead of calling window.localStorage methods directly',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
// Sorting for Meta
|
||||
files: ['frontend/js/utils/meta.ts'],
|
||||
rules: {
|
||||
'@typescript-eslint/member-ordering': [
|
||||
'error',
|
||||
{ interfaces: { order: 'alphabetically' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
// React component specific rules
|
||||
//
|
||||
files: [
|
||||
'**/frontend/js/**/components/**/*.{js,jsx,ts,tsx}',
|
||||
'**/frontend/js/**/hooks/**/*.{js,jsx,ts,tsx}',
|
||||
],
|
||||
rules: {
|
||||
'@overleaf/no-unnecessary-trans': 'error',
|
||||
'@overleaf/should-unescape-trans': 'error',
|
||||
|
||||
// https://astexplorer.net/
|
||||
'no-restricted-syntax': [
|
||||
'error',
|
||||
// prohibit direct calls to methods of window.location
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.object.object.name='window'][callee.object.property.name='location']",
|
||||
message:
|
||||
'Modify location via useLocation instead of calling window.location methods directly',
|
||||
},
|
||||
// prohibit assignment to window.location
|
||||
{
|
||||
selector:
|
||||
"AssignmentExpression[left.object.name='window'][left.property.name='location']",
|
||||
message:
|
||||
'Modify location via useLocation instead of calling window.location methods directly',
|
||||
},
|
||||
// prohibit assignment to window.location.href
|
||||
{
|
||||
selector:
|
||||
"AssignmentExpression[left.object.object.name='window'][left.object.property.name='location'][left.property.name='href']",
|
||||
message:
|
||||
'Modify location via useLocation instead of calling window.location methods directly',
|
||||
},
|
||||
// prohibit using lookbehinds due to incidents with Safari simply crashing when the script is parsed
|
||||
{
|
||||
selector: 'Literal[regex.pattern=/\\(\\?<[!=]/]',
|
||||
message: 'Lookbehind is not supported in older Safari versions.',
|
||||
},
|
||||
// prohibit direct calls to methods of window.localStorage
|
||||
// NOTE: this rule is also defined for all frontend files, but those rules are overriden by the React component-specific config
|
||||
{
|
||||
selector:
|
||||
"CallExpression[callee.object.object.name='window'][callee.object.property.name='localStorage']",
|
||||
message:
|
||||
'Modify location via customLocalStorage instead of calling window.localStorage methods directly',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// React + TypeScript-specific rules
|
||||
{
|
||||
files: ['**/*.tsx'],
|
||||
rules: {
|
||||
'react/prop-types': 'off',
|
||||
'no-undef': 'off',
|
||||
},
|
||||
},
|
||||
// TypeScript-specific rules
|
||||
{
|
||||
files: ['**/*.ts'],
|
||||
rules: {
|
||||
'no-undef': 'off',
|
||||
},
|
||||
},
|
||||
// JavaScript-specific rules
|
||||
{
|
||||
files: ['**/*.js'],
|
||||
rules: {
|
||||
'@typescript-eslint/no-require-imports': 'off',
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['scripts/ukamf/*.js'],
|
||||
rules: {
|
||||
// Do not allow importing of any dependencies unless specified in either
|
||||
// - web/package.json
|
||||
// - web/scripts/ukamf/package.json
|
||||
'import/no-extraneous-dependencies': [
|
||||
'error',
|
||||
{ packageDir: ['.', 'scripts/ukamf'] },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['scripts/learn/checkSanitize/*.js'],
|
||||
rules: {
|
||||
// The checkSanitize script is used in the dev-env only.
|
||||
'import/no-extraneous-dependencies': [
|
||||
'error',
|
||||
{
|
||||
devDependencies: true,
|
||||
packageDir: ['.', '../../'],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
files: [
|
||||
// Backend: Use @overleaf/logger
|
||||
// Docs: https://manual.dev-overleaf.com/development/code/logging/#structured-logging
|
||||
'**/app/**/*.{js,cjs,mjs}',
|
||||
'app.{js,mjs}',
|
||||
'modules/*/*.{js,mjs}',
|
||||
// Frontend: Prefer debugConsole over bare console
|
||||
// Docs: https://manual.dev-overleaf.com/development/code/logging/#frontend
|
||||
'**/frontend/**/*.{js,jsx,ts,tsx}',
|
||||
// Tests
|
||||
'**/test/**/*.{js,cjs,mjs,jsx,ts,tsx}',
|
||||
],
|
||||
excludedFiles: [
|
||||
// Allow console logs in scripts
|
||||
'**/scripts/**/*.js',
|
||||
// Allow console logs in stories
|
||||
'**/stories/**/*.{js,jsx,ts,tsx}',
|
||||
// Workers do not have access to the search params for enabling ?debug=true.
|
||||
// self.location.url is the URL of the worker script.
|
||||
'*.worker.{js,ts}',
|
||||
],
|
||||
rules: {
|
||||
'no-console': 'error',
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
103
services/web/.gitignore
vendored
Normal file
103
services/web/.gitignore
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
# Compiled source #
|
||||
###################
|
||||
*.com
|
||||
*.class
|
||||
*.dll
|
||||
*.exe
|
||||
*.o
|
||||
*.so
|
||||
|
||||
# Packages #
|
||||
############
|
||||
# it's better to unpack these files and commit the raw source
|
||||
# git has its own built in compression methods
|
||||
*.7z
|
||||
*.dmg
|
||||
*.gz
|
||||
*.iso
|
||||
*.jar
|
||||
*.rar
|
||||
*.tar
|
||||
*.zip
|
||||
|
||||
# Logs and databases #
|
||||
######################
|
||||
*.log
|
||||
*.sql
|
||||
*.sqlite
|
||||
|
||||
# OS generated files #
|
||||
######################
|
||||
.DS_Store?
|
||||
ehthumbs.db
|
||||
Icon?
|
||||
Thumbs.db
|
||||
|
||||
# allow "icons"
|
||||
![Ii]cons
|
||||
|
||||
node_modules/*
|
||||
data/*
|
||||
coverage
|
||||
|
||||
cookies.txt
|
||||
requestQueueWorker.js
|
||||
TpdsWorker.js
|
||||
BackgroundJobsWorker.js
|
||||
UserAndProjectPopulator.coffee
|
||||
|
||||
public/manifest.json
|
||||
|
||||
public/js
|
||||
public/minjs
|
||||
public/stylesheets
|
||||
public/fonts
|
||||
public/images
|
||||
|
||||
Gemfile.lock
|
||||
|
||||
*.swp
|
||||
.DS_Store
|
||||
|
||||
docker-shared.yml
|
||||
|
||||
config/*.coffee
|
||||
!config/settings.defaults.coffee
|
||||
!config/settings.webpack.coffee
|
||||
config/*.js
|
||||
!config/settings.defaults.js
|
||||
!config/settings.webpack.js
|
||||
!config/settings.overrides.saas.js
|
||||
!config/settings.overrides.server-pro.js
|
||||
|
||||
modules/**/Makefile
|
||||
|
||||
# Precompiled pug files
|
||||
**/app/views/**/*.js
|
||||
|
||||
# Sentry secrets file (injected by CI)
|
||||
.sentryclirc
|
||||
|
||||
# via dev-environment
|
||||
.npmrc
|
||||
|
||||
# Intellij
|
||||
.idea
|
||||
.run
|
||||
|
||||
# Cypress
|
||||
cypress/screenshots/
|
||||
cypress/videos/
|
||||
cypress/downloads/
|
||||
cypress/results/
|
||||
|
||||
# Ace themes for conversion
|
||||
frontend/js/features/source-editor/themes/ace/
|
||||
|
||||
# Compiled parser files
|
||||
frontend/js/features/source-editor/lezer-latex/latex.mjs
|
||||
frontend/js/features/source-editor/lezer-latex/latex.terms.mjs
|
||||
frontend/js/features/source-editor/lezer-bibtex/bibtex.mjs
|
||||
frontend/js/features/source-editor/lezer-bibtex/bibtex.terms.mjs
|
||||
|
||||
!**/fixtures/**/*.log
|
||||
1
services/web/.nvmrc
Normal file
1
services/web/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
20.18.2
|
||||
14
services/web/.prettierignore
Normal file
14
services/web/.prettierignore
Normal file
@@ -0,0 +1,14 @@
|
||||
# NOTE: changing paths may require updating them in the Makefile too.
|
||||
data/
|
||||
scripts/translations/.cache/
|
||||
node_modules
|
||||
frontend/js/vendor
|
||||
modules/**/frontend/js/vendor
|
||||
public/js
|
||||
public/minjs
|
||||
frontend/stylesheets/components/nvd3.less
|
||||
frontend/js/features/source-editor/lezer-latex/latex.mjs
|
||||
frontend/js/features/source-editor/lezer-latex/latex.terms.mjs
|
||||
frontend/js/features/source-editor/lezer-bibtex/bibtex.mjs
|
||||
frontend/js/features/source-editor/lezer-bibtex/bibtex.terms.mjs
|
||||
frontend/js/features/source-editor/hunspell/wasm/hunspell.mjs
|
||||
9
services/web/.prettierrc
Normal file
9
services/web/.prettierrc
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"jsxSingleQuote": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"useTabs": false
|
||||
}
|
||||
129
services/web/.storybook/main.ts
Normal file
129
services/web/.storybook/main.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import type { StorybookConfig } from '@storybook/react-webpack5'
|
||||
import path from 'node:path'
|
||||
import MiniCssExtractPlugin from 'mini-css-extract-plugin'
|
||||
|
||||
const rootDir = path.resolve(__dirname, '..')
|
||||
|
||||
// NOTE: must be set before webpack config is imported
|
||||
process.env.OVERLEAF_CONFIG = path.join(rootDir, 'config/settings.webpack.js')
|
||||
|
||||
function getAbsolutePath(value: string): any {
|
||||
return path.dirname(require.resolve(path.join(value, 'package.json')))
|
||||
}
|
||||
|
||||
const config: StorybookConfig = {
|
||||
core: {
|
||||
disableTelemetry: true,
|
||||
},
|
||||
staticDirs: [path.join(rootDir, 'public')],
|
||||
stories: [
|
||||
path.join(rootDir, 'frontend/stories/**/*.stories.{js,jsx,ts,tsx}'),
|
||||
path.join(rootDir, 'modules/**/stories/**/*.stories.{js,jsx,ts,tsx}'),
|
||||
],
|
||||
addons: [
|
||||
getAbsolutePath('@storybook/addon-links'),
|
||||
getAbsolutePath('@storybook/addon-essentials'),
|
||||
getAbsolutePath('@storybook/addon-interactions'),
|
||||
getAbsolutePath('@storybook/addon-a11y'),
|
||||
getAbsolutePath('@storybook/addon-webpack5-compiler-babel'),
|
||||
{
|
||||
name: getAbsolutePath('@storybook/addon-styling-webpack'),
|
||||
options: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.css$/,
|
||||
use: [
|
||||
{ loader: MiniCssExtractPlugin.loader },
|
||||
{ loader: 'css-loader' },
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.less$/,
|
||||
use: [
|
||||
{ loader: MiniCssExtractPlugin.loader },
|
||||
{ loader: 'css-loader' },
|
||||
{ loader: 'less-loader' },
|
||||
],
|
||||
},
|
||||
{
|
||||
// Pass Sass files through sass-loader/css-loader/mini-css-extract-
|
||||
// plugin (note: run in reverse order)
|
||||
test: /\.s[ac]ss$/,
|
||||
use: [
|
||||
// Allows the CSS to be extracted to a separate .css file
|
||||
{ loader: MiniCssExtractPlugin.loader },
|
||||
// Resolves any CSS dependencies (e.g. url())
|
||||
{ loader: 'css-loader' },
|
||||
// Resolve relative paths sensibly in SASS
|
||||
{ loader: 'resolve-url-loader' },
|
||||
{
|
||||
// Runs autoprefixer on CSS via postcss
|
||||
loader: 'postcss-loader',
|
||||
options: {
|
||||
postcssOptions: {
|
||||
plugins: ['autoprefixer'],
|
||||
},
|
||||
},
|
||||
},
|
||||
// Compiles Sass to CSS
|
||||
{
|
||||
loader: 'sass-loader',
|
||||
options: { sourceMap: true }, // sourceMap: true is required for resolve-url-loader
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
plugins: [new MiniCssExtractPlugin()],
|
||||
},
|
||||
},
|
||||
],
|
||||
framework: {
|
||||
name: getAbsolutePath('@storybook/react-webpack5'),
|
||||
options: {},
|
||||
},
|
||||
docs: {
|
||||
autodocs: 'tag',
|
||||
},
|
||||
babel: (options: Record<string, any>) => {
|
||||
return {
|
||||
...options,
|
||||
plugins: [
|
||||
// ensure that TSX files are transformed before other plugins run
|
||||
['@babel/plugin-transform-typescript', { isTSX: true }],
|
||||
...(options.plugins ?? []),
|
||||
],
|
||||
}
|
||||
},
|
||||
webpackFinal: storybookConfig => {
|
||||
return {
|
||||
...storybookConfig,
|
||||
resolve: {
|
||||
...storybookConfig.resolve,
|
||||
fallback: {
|
||||
...storybookConfig.resolve?.fallback,
|
||||
fs: false,
|
||||
os: false,
|
||||
module: false,
|
||||
tty: require.resolve('tty-browserify'),
|
||||
},
|
||||
extensions: ['.js', '.jsx', '.mjs', '.ts', '.tsx', '.json'],
|
||||
alias: {
|
||||
...storybookConfig.resolve?.alias,
|
||||
// custom prefixes for import paths
|
||||
'@': path.join(rootDir, 'frontend/js/'),
|
||||
},
|
||||
},
|
||||
module: {
|
||||
...storybookConfig.module,
|
||||
rules: (storybookConfig.module?.rules ?? []).concat({
|
||||
test: /\.wasm$/,
|
||||
type: 'asset/resource',
|
||||
generator: {
|
||||
filename: 'js/[name]-[contenthash][ext]',
|
||||
},
|
||||
}),
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
export default config
|
||||
3
services/web/.storybook/manager.css
Normal file
3
services/web/.storybook/manager.css
Normal file
@@ -0,0 +1,3 @@
|
||||
.sidebar-container a[title='Overleaf'] {
|
||||
max-width: 100px;
|
||||
}
|
||||
15
services/web/.storybook/manager.ts
Normal file
15
services/web/.storybook/manager.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { addons } from '@storybook/manager-api'
|
||||
import { create } from '@storybook/theming/create'
|
||||
|
||||
import './manager.css'
|
||||
|
||||
import brandImage from '../public/img/ol-brand/overleaf.svg'
|
||||
|
||||
const theme = create({
|
||||
base: 'light',
|
||||
brandTitle: 'Overleaf',
|
||||
brandUrl: 'https://www.overleaf.com',
|
||||
brandImage,
|
||||
})
|
||||
|
||||
addons.setConfig({ theme })
|
||||
173
services/web/.storybook/preview.tsx
Normal file
173
services/web/.storybook/preview.tsx
Normal file
@@ -0,0 +1,173 @@
|
||||
import type { Preview } from '@storybook/react'
|
||||
|
||||
// Storybook does not (currently) support async loading of "stories". Therefore
|
||||
// the strategy in frontend/js/i18n.ts does not work (because we cannot wait on
|
||||
// the promise to resolve).
|
||||
// Therefore we have to use the synchronous method for configuring
|
||||
// react-i18next. Because this, we can only hard-code a single language.
|
||||
import i18n from 'i18next'
|
||||
import { initReactI18next } from 'react-i18next'
|
||||
// @ts-ignore
|
||||
import en from '../../../services/web/locales/en.json'
|
||||
|
||||
function resetMeta() {
|
||||
window.metaAttributesCache = new Map()
|
||||
window.metaAttributesCache.set('ol-i18n', { currentLangCode: 'en' })
|
||||
window.metaAttributesCache.set('ol-ExposedSettings', {
|
||||
adminEmail: 'placeholder@example.com',
|
||||
appName: 'Overleaf',
|
||||
cookieDomain: '.overleaf.stories',
|
||||
dropboxAppName: 'Overleaf-Stories',
|
||||
emailConfirmationDisabled: false,
|
||||
enableSubscriptions: true,
|
||||
hasAffiliationsFeature: false,
|
||||
hasLinkUrlFeature: true,
|
||||
hasLinkedProjectFileFeature: true,
|
||||
hasLinkedProjectOutputFileFeature: true,
|
||||
hasSamlFeature: true,
|
||||
ieeeBrandId: 15,
|
||||
isOverleaf: true,
|
||||
labsEnabled: true,
|
||||
maxEntitiesPerProject: 10,
|
||||
maxUploadSize: 5 * 1024 * 1024,
|
||||
recaptchaDisabled: {
|
||||
invite: true,
|
||||
login: true,
|
||||
passwordReset: true,
|
||||
register: true,
|
||||
addEmail: true,
|
||||
},
|
||||
sentryAllowedOriginRegex: '',
|
||||
siteUrl: 'http://localhost',
|
||||
templateLinks: [],
|
||||
textExtensions: [
|
||||
'tex',
|
||||
'latex',
|
||||
'sty',
|
||||
'cls',
|
||||
'bst',
|
||||
'bib',
|
||||
'bibtex',
|
||||
'txt',
|
||||
'tikz',
|
||||
'mtx',
|
||||
'rtex',
|
||||
'md',
|
||||
'asy',
|
||||
'lbx',
|
||||
'bbx',
|
||||
'cbx',
|
||||
'm',
|
||||
'lco',
|
||||
'dtx',
|
||||
'ins',
|
||||
'ist',
|
||||
'def',
|
||||
'clo',
|
||||
'ldf',
|
||||
'rmd',
|
||||
'lua',
|
||||
'gv',
|
||||
'mf',
|
||||
'lhs',
|
||||
'mk',
|
||||
'xmpdata',
|
||||
'cfg',
|
||||
'rnw',
|
||||
'ltx',
|
||||
'inc',
|
||||
],
|
||||
editableFilenames: ['latexmkrc', '.latexmkrc', 'makefile', 'gnumakefile'],
|
||||
validRootDocExtensions: ['tex', 'Rtex', 'ltx', 'Rnw'],
|
||||
fileIgnorePattern:
|
||||
'**/{{__MACOSX,.git,.texpadtmp,.R}{,/**},.!(latexmkrc),*.{dvi,aux,log,toc,out,pdfsync,synctex,synctex(busy),fdb_latexmk,fls,nlo,ind,glo,gls,glg,bbl,blg,doc,docx,gz,swp}}',
|
||||
projectUploadTimeout: 12000,
|
||||
})
|
||||
}
|
||||
|
||||
i18n.use(initReactI18next).init({
|
||||
lng: 'en',
|
||||
|
||||
// still using the v3 plural suffixes
|
||||
compatibilityJSON: 'v3',
|
||||
|
||||
resources: {
|
||||
en: { translation: en },
|
||||
},
|
||||
|
||||
react: {
|
||||
useSuspense: false,
|
||||
transSupportBasicHtmlNodes: false,
|
||||
},
|
||||
|
||||
interpolation: {
|
||||
prefix: '__',
|
||||
suffix: '__',
|
||||
unescapeSuffix: 'HTML',
|
||||
skipOnVariables: true,
|
||||
escapeValue: false,
|
||||
defaultVariables: {
|
||||
appName: 'Overleaf',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const preview: Preview = {
|
||||
parameters: {
|
||||
// Automatically mark prop-types like onClick, onToggle, etc as Storybook
|
||||
// "actions", so that they are logged in the Actions pane at the bottom of the
|
||||
// viewer
|
||||
actions: { argTypesRegex: '^on.*' },
|
||||
docs: {
|
||||
// render stories in iframes, to isolate modals
|
||||
inlineStories: false,
|
||||
},
|
||||
},
|
||||
globalTypes: {
|
||||
theme: {
|
||||
name: 'Theme',
|
||||
description: 'Editor theme',
|
||||
defaultValue: 'main-',
|
||||
toolbar: {
|
||||
icon: 'circlehollow',
|
||||
items: [
|
||||
{ value: 'main-', title: 'Default' },
|
||||
{ value: 'main-light-', title: 'Light' },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
loaders: [
|
||||
async () => {
|
||||
return {
|
||||
mainStyle: await import(
|
||||
// @ts-ignore
|
||||
`!!to-string-loader!css-loader!resolve-url-loader!sass-loader!../../../services/web/frontend/stylesheets/bootstrap-5/main-style.scss`
|
||||
),
|
||||
}
|
||||
},
|
||||
],
|
||||
decorators: [
|
||||
(Story, context) => {
|
||||
const { mainStyle } = context.loaded
|
||||
|
||||
resetMeta()
|
||||
|
||||
return (
|
||||
<div
|
||||
data-theme={
|
||||
context.globals.theme === 'main-light-' ? 'light' : 'default'
|
||||
}
|
||||
>
|
||||
{mainStyle && <style>{mainStyle.default}</style>}
|
||||
<Story {...context} />
|
||||
</div>
|
||||
)
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export default preview
|
||||
|
||||
// Populate meta for top-level access in modules on import
|
||||
resetMeta()
|
||||
34
services/web/.storybook/utils/with-split-tests.tsx
Normal file
34
services/web/.storybook/utils/with-split-tests.tsx
Normal file
@@ -0,0 +1,34 @@
|
||||
import type { Meta } from '@storybook/react'
|
||||
import _ from 'lodash'
|
||||
import { SplitTestContext } from '../../frontend/js/shared/context/split-test-context'
|
||||
|
||||
export const splitTestsArgTypes = {
|
||||
// to be able to use this utility, you need to add the argTypes for each split test in this object
|
||||
// Check the original implementation for an example: https://github.com/overleaf/internal/pull/17809
|
||||
}
|
||||
|
||||
export const withSplitTests = (
|
||||
story: Meta,
|
||||
splitTests: (keyof typeof splitTestsArgTypes)[] = []
|
||||
): Meta => {
|
||||
return {
|
||||
...story,
|
||||
argTypes: { ...story.argTypes, ..._.pick(splitTestsArgTypes, splitTests) },
|
||||
decorators: [
|
||||
(Story, { args }) => {
|
||||
const splitTestVariants = _.pick(args, splitTests)
|
||||
const value = { splitTestVariants, splitTestInfo: {} }
|
||||
return (
|
||||
<SplitTestContext.Provider value={value}>
|
||||
<Story />
|
||||
</SplitTestContext.Provider>
|
||||
)
|
||||
},
|
||||
...(story.decorators
|
||||
? Array.isArray(story.decorators)
|
||||
? story.decorators
|
||||
: [story.decorators]
|
||||
: []),
|
||||
],
|
||||
}
|
||||
}
|
||||
10
services/web/.stylelintrc.json
Normal file
10
services/web/.stylelintrc.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": ["stylelint-config-standard-scss"],
|
||||
"rules": {
|
||||
"function-url-quotes": null,
|
||||
"no-descending-specificity": null,
|
||||
"scss/at-extend-no-missing-placeholder": null,
|
||||
"scss/operator-no-newline-after": null,
|
||||
"property-no-vendor-prefix": [true, { "ignoreProperties": ["mask-image"] }]
|
||||
}
|
||||
}
|
||||
7
services/web/.vscode/settings.json
vendored
Normal file
7
services/web/.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"files.exclude": {
|
||||
"node_modules": true,
|
||||
"data": true
|
||||
},
|
||||
"cSpell.words": ["docstore", "Tpds"]
|
||||
}
|
||||
83
services/web/Dockerfile
Normal file
83
services/web/Dockerfile
Normal file
@@ -0,0 +1,83 @@
|
||||
# the base image is suitable for running web with /overleaf/services/web bind
|
||||
# mounted
|
||||
FROM node:20.18.2 AS base
|
||||
|
||||
WORKDIR /overleaf/services/web
|
||||
|
||||
# Google Cloud Storage needs a writable $HOME/.config for resumable uploads
|
||||
# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream)
|
||||
RUN mkdir /home/node/.config && chown node:node /home/node/.config
|
||||
|
||||
RUN mkdir -p /overleaf/services/web/data/dumpFolder \
|
||||
&& mkdir -p /overleaf/services/web/data/logs \
|
||||
&& mkdir -p /overleaf/services/web/data/pdf \
|
||||
&& mkdir -p /overleaf/services/web/data/uploads \
|
||||
&& mkdir -p /overleaf/services/web/data/zippedProjects \
|
||||
&& mkdir -p /overleaf/services/web/data/projectHistories \
|
||||
&& chmod -R 0755 /overleaf/services/web/data \
|
||||
&& chown -R node:node /overleaf/services/web/data
|
||||
|
||||
|
||||
# the deps image is used for caching npm ci
|
||||
FROM base AS deps-prod
|
||||
|
||||
COPY package.json package-lock.json /overleaf/
|
||||
COPY services/web/package.json /overleaf/services/web/
|
||||
COPY libraries/ /overleaf/libraries/
|
||||
COPY patches/ /overleaf/patches/
|
||||
|
||||
RUN cd /overleaf && NODE_ENV=production npm ci --quiet
|
||||
|
||||
FROM deps-prod AS deps
|
||||
|
||||
ENV CYPRESS_INSTALL_BINARY=0
|
||||
|
||||
COPY tsconfig.backend.json /overleaf/
|
||||
RUN cd /overleaf && npm install
|
||||
|
||||
|
||||
# the dev is suitable for running tests
|
||||
FROM deps AS dev
|
||||
|
||||
ARG SENTRY_RELEASE
|
||||
ENV SENTRY_RELEASE=$SENTRY_RELEASE
|
||||
COPY services/web /overleaf/services/web
|
||||
|
||||
# Build the latex parser
|
||||
RUN cd /overleaf/services/web && npm run 'lezer-latex:generate'
|
||||
|
||||
USER node
|
||||
|
||||
|
||||
# the webpack image has deps+src+webpack artifacts
|
||||
FROM dev AS webpack
|
||||
USER root
|
||||
RUN OVERLEAF_CONFIG=/overleaf/services/web/config/settings.webpack.js nice npm run webpack:production
|
||||
|
||||
|
||||
# intermediate image for removing source maps ahead of copying into final production image
|
||||
FROM webpack AS webpack-no-sourcemaps
|
||||
RUN nice find /overleaf/services/web/public -name '*.js.map' -delete
|
||||
|
||||
|
||||
# copy source code and precompile pug images
|
||||
FROM deps-prod AS pug
|
||||
COPY services/web /overleaf/services/web
|
||||
# Omit Server Pro/CE specific scripts from SaaS image
|
||||
RUN rm /overleaf/services/web/modules/server-ce-scripts -rf
|
||||
RUN OVERLEAF_CONFIG=/overleaf/services/web/config/settings.overrides.saas.js nice npm run precompile-pug
|
||||
|
||||
|
||||
# the web image with only production dependencies but no webpack production build, for development
|
||||
FROM pug AS app-only
|
||||
USER node
|
||||
CMD ["node", "--expose-gc", "app.mjs"]
|
||||
|
||||
|
||||
# the final production image, with webpack production build but without source maps
|
||||
FROM pug AS app
|
||||
ARG SENTRY_RELEASE
|
||||
ENV SENTRY_RELEASE=$SENTRY_RELEASE
|
||||
COPY --from=webpack-no-sourcemaps /overleaf/services/web/public /overleaf/services/web/public
|
||||
USER node
|
||||
CMD ["node", "--expose-gc", "app.mjs"]
|
||||
6
services/web/Dockerfile.frontend
Normal file
6
services/web/Dockerfile.frontend
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM node:20.18.2
|
||||
|
||||
# Install Google Chrome
|
||||
RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add -
|
||||
RUN sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list'
|
||||
RUN apt-get update && apt-get install -y google-chrome-stable
|
||||
11
services/web/Dockerfile.frontend.ci
Normal file
11
services/web/Dockerfile.frontend.ci
Normal file
@@ -0,0 +1,11 @@
|
||||
ARG PROJECT_NAME
|
||||
ARG BRANCH_NAME
|
||||
ARG BUILD_NUMBER
|
||||
|
||||
FROM ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
|
||||
USER root
|
||||
|
||||
RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
|
||||
echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list && \
|
||||
apt-get update && apt-get install -y google-chrome-stable
|
||||
661
services/web/LICENSE
Normal file
661
services/web/LICENSE
Normal file
@@ -0,0 +1,661 @@
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
571
services/web/Makefile
Normal file
571
services/web/Makefile
Normal file
@@ -0,0 +1,571 @@
|
||||
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||
|
||||
BUILD_NUMBER ?= local
|
||||
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
PROJECT_NAME = web
|
||||
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
|
||||
PWD = $(shell pwd)
|
||||
|
||||
export OVERLEAF_CONFIG ?= /overleaf/services/web/test/acceptance/config/settings.test.saas.js
|
||||
export BASE_CONFIG ?= ${OVERLEAF_CONFIG}
|
||||
|
||||
CFG_SAAS=/overleaf/services/web/test/acceptance/config/settings.test.saas.js
|
||||
CFG_SERVER_CE=/overleaf/services/web/test/acceptance/config/settings.test.server-ce.js
|
||||
CFG_SERVER_PRO=/overleaf/services/web/test/acceptance/config/settings.test.server-pro.js
|
||||
|
||||
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||
BRANCH_NAME=$(BRANCH_NAME) \
|
||||
PROJECT_NAME=$(PROJECT_NAME) \
|
||||
MOCHA_GREP=${MOCHA_GREP} \
|
||||
docker compose ${DOCKER_COMPOSE_FLAGS}
|
||||
|
||||
MODULE_DIRS := $(shell find modules -mindepth 1 -maxdepth 1 -type d -not -name '.git' )
|
||||
MODULE_MAKEFILES := $(MODULE_DIRS:=/Makefile)
|
||||
MODULE_NAME=$(shell basename $(MODULE))
|
||||
|
||||
$(MODULE_MAKEFILES): Makefile.module
|
||||
cp Makefile.module $@ || diff Makefile.module $@
|
||||
|
||||
#
|
||||
# Clean
|
||||
#
|
||||
|
||||
clean:
|
||||
-$(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=unit_test_parallel_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=unit_test_parallel_make_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_test_saas_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_test_server_ce_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_test_server_pro_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_1_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_2_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_3_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_4_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_modules_merged_server_ce_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=acceptance_modules_merged_server_pro_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=test_frontend_ct_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
-COMPOSE_PROJECT_NAME=tar_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local
|
||||
|
||||
clean_ci:
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
docker container list | grep 'days ago' | cut -d ' ' -f 1 - | xargs -r docker container stop
|
||||
docker image prune -af --filter "until=48h"
|
||||
docker network prune -f
|
||||
|
||||
#
|
||||
# Tests
|
||||
#
|
||||
|
||||
test: test_unit test_acceptance test_frontend test_frontend_ct
|
||||
|
||||
test_module: test_unit_module test_acceptance_module
|
||||
|
||||
#
|
||||
# Unit tests
|
||||
#
|
||||
|
||||
test_unit: test_unit_all
|
||||
test_unit_all: export COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME)
|
||||
test_unit_all:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:all
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
test_unit_all_silent: export COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME)
|
||||
test_unit_all_silent:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:all:silent
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
test_unit_app: export COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME)
|
||||
test_unit_app:
|
||||
$(DOCKER_COMPOSE) run --name unit_test_$(BUILD_DIR_NAME) --rm test_unit
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
TEST_SUITES = $(sort $(filter-out \
|
||||
$(wildcard test/unit/src/helpers/*), \
|
||||
$(wildcard test/unit/src/*/*)))
|
||||
|
||||
MOCHA_CMD_LINE = \
|
||||
mocha \
|
||||
--exit \
|
||||
--file test/unit/bootstrap.js \
|
||||
--grep=${MOCHA_GREP} \
|
||||
--reporter spec \
|
||||
--timeout 25000 \
|
||||
|
||||
.PHONY: $(TEST_SUITES)
|
||||
$(TEST_SUITES):
|
||||
$(MOCHA_CMD_LINE) $@
|
||||
|
||||
J ?= 1
|
||||
test_unit_app_parallel_gnu_make: $(TEST_SUITES)
|
||||
test_unit_app_parallel_gnu_make_docker: export COMPOSE_PROJECT_NAME = \
|
||||
unit_test_parallel_make_$(BUILD_DIR_NAME)
|
||||
test_unit_app_parallel_gnu_make_docker:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit \
|
||||
make test_unit_app_parallel_gnu_make --output-sync -j $(J)
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
TEST_UNIT_MODULES = $(MODULE_DIRS:=/test_unit)
|
||||
$(TEST_UNIT_MODULES): %/test_unit: %/Makefile
|
||||
test_unit_modules: $(TEST_UNIT_MODULES)
|
||||
|
||||
test_unit_module:
|
||||
$(MAKE) modules/$(MODULE_NAME)/test_unit
|
||||
|
||||
|
||||
#
|
||||
# Frontend tests
|
||||
#
|
||||
|
||||
test_frontend:
|
||||
COMPOSE_PROJECT_NAME=frontend_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm test_frontend
|
||||
COMPOSE_PROJECT_NAME=frontend_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
#
|
||||
# Frontend component tests in Cypress
|
||||
#
|
||||
# Local development: use $ make test_frontend_ct
|
||||
#
|
||||
TEST_FRONTEND_CT_VARIANTS = \
|
||||
test_frontend_ct \
|
||||
test_frontend_ct_core_other \
|
||||
test_frontend_ct_core_features \
|
||||
test_frontend_ct_modules \
|
||||
test_frontend_ct_editor_other \
|
||||
test_frontend_ct_editor_visual \
|
||||
|
||||
# Note: The below cypress targets are for CI only
|
||||
build_test_frontend_ct:
|
||||
docker run --rm --volume /dev/shm:/dev/shm --user root $(IMAGE_CI) bash -ec 'tar -cC / overleaf | tar -xC /dev/shm'
|
||||
|
||||
test_frontend_ct_core_other: export CYPRESS_RESULTS=./cypress/results/core
|
||||
test_frontend_ct_core_other: export CYPRESS_SPEC_PATTERN=./test/frontend/**/*.spec.{js,jsx,ts,tsx}
|
||||
test_frontend_ct_core_other: export CYPRESS_EXCLUDE_SPEC_PATTERN=./test/frontend/features/**/*.spec.{js,jsx,ts,tsx}
|
||||
|
||||
test_frontend_ct_core_features: export CYPRESS_RESULTS=./cypress/results/core
|
||||
test_frontend_ct_core_features: export CYPRESS_SPEC_PATTERN=./test/frontend/features/**/*.spec.{js,jsx,ts,tsx}
|
||||
test_frontend_ct_core_features: export CYPRESS_EXCLUDE_SPEC_PATTERN=./test/frontend/features/source-editor/**/*.spec.{js,jsx,ts,tsx}
|
||||
|
||||
test_frontend_ct_modules: export CYPRESS_RESULTS=./cypress/results/modules
|
||||
test_frontend_ct_modules: export CYPRESS_SPEC_PATTERN=./modules/**/test/frontend/**/*.spec.{js,jsx,ts,tsx}
|
||||
|
||||
test_frontend_ct_editor_other: export CYPRESS_RESULTS=./cypress/results/editor_other
|
||||
test_frontend_ct_editor_other: export CYPRESS_SPEC_PATTERN=./test/frontend/features/source-editor/**/*.spec.{js,jsx,ts,tsx}
|
||||
test_frontend_ct_editor_other: export CYPRESS_EXCLUDE_SPEC_PATTERN=./test/frontend/features/source-editor/components/codemirror-editor-visual*.spec.{js,jsx,ts,tsx}
|
||||
|
||||
test_frontend_ct_editor_visual: export CYPRESS_RESULTS=./cypress/results/editor_visual
|
||||
test_frontend_ct_editor_visual: export CYPRESS_SPEC_PATTERN=./test/frontend/features/source-editor/components/codemirror-editor-visual*.spec.{js,jsx,ts,tsx}
|
||||
|
||||
$(TEST_FRONTEND_CT_VARIANTS):
|
||||
COMPOSE_PROJECT_NAME=$@_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm test_frontend_ct
|
||||
COMPOSE_PROJECT_NAME=$@_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
#
|
||||
# Acceptance tests
|
||||
#
|
||||
|
||||
test_acceptance: test_acceptance_app test_acceptance_modules
|
||||
test_acceptance_saas: test_acceptance_app_saas test_acceptance_modules_merged_saas
|
||||
test_acceptance_server_ce: test_acceptance_app_server_ce test_acceptance_modules_merged_server_ce
|
||||
test_acceptance_server_pro: test_acceptance_app_server_pro test_acceptance_modules_merged_server_pro
|
||||
|
||||
TEST_ACCEPTANCE_APP := \
|
||||
test_acceptance_app_saas \
|
||||
test_acceptance_app_server_ce \
|
||||
test_acceptance_app_server_pro \
|
||||
|
||||
test_acceptance_app: $(TEST_ACCEPTANCE_APP)
|
||||
test_acceptance_app_saas: export COMPOSE_PROJECT_NAME=acceptance_test_saas_$(BUILD_DIR_NAME)
|
||||
test_acceptance_app_saas: export OVERLEAF_CONFIG=$(CFG_SAAS)
|
||||
test_acceptance_app_server_ce: export COMPOSE_PROJECT_NAME=acceptance_test_server_ce_$(BUILD_DIR_NAME)
|
||||
test_acceptance_app_server_ce: export OVERLEAF_CONFIG=$(CFG_SERVER_CE)
|
||||
test_acceptance_app_server_pro: export COMPOSE_PROJECT_NAME=acceptance_test_server_pro_$(BUILD_DIR_NAME)
|
||||
test_acceptance_app_server_pro: export OVERLEAF_CONFIG=$(CFG_SERVER_PRO)
|
||||
|
||||
$(TEST_ACCEPTANCE_APP):
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
# We are using _make magic_ for turning these file-targets into calls to
|
||||
# sub-Makefiles in the individual modules.
|
||||
# These sub-Makefiles need to be kept in sync with the template, hence we
|
||||
# add a dependency on each modules Makefile and cross-link that to the
|
||||
# template at the very top of this file.
|
||||
# Example: `web$ make modules/server-ce-scripts/test_acceptance_server_ce`
|
||||
# Description: Run the acceptance tests of the server-ce-scripts module in an
|
||||
# Overleaf Community Edition Environment.
|
||||
# Break down:
|
||||
# Target: modules/server-ce-scripts/test_acceptance_server_ce
|
||||
# -> depends on modules/server-ce-scripts/Makefile
|
||||
# -> add environment variable BASE_CONFIG=$(CFG_SERVER_CE)
|
||||
# -> BASE_CONFIG=/overleaf/services/web/test/acceptance/config/settings.test.server-ce.js
|
||||
# -> automatic target: `make -C server-ce-scripts test_acceptance_server_ce`
|
||||
# -> automatic target: run `make test_acceptance_server_ce` in module
|
||||
# Target: modules/server-ce-scripts/Makefile
|
||||
# -> depends on Makefile.module
|
||||
# -> automatic target: copies the file when changed
|
||||
TEST_ACCEPTANCE_MODULES = $(MODULE_DIRS:=/test_acceptance)
|
||||
$(TEST_ACCEPTANCE_MODULES): %/test_acceptance: %/Makefile
|
||||
$(TEST_ACCEPTANCE_MODULES): modules/%/test_acceptance:
|
||||
$(MAKE) test_acceptance_module MODULE_NAME=$*
|
||||
|
||||
TEST_ACCEPTANCE_MODULES_SAAS = $(MODULE_DIRS:=/test_acceptance_saas)
|
||||
$(TEST_ACCEPTANCE_MODULES_SAAS): %/test_acceptance_saas: %/Makefile
|
||||
$(TEST_ACCEPTANCE_MODULES_SAAS): export BASE_CONFIG = $(CFG_SAAS)
|
||||
|
||||
# This line adds `/test_acceptance_saas` suffix to all items in $(MODULE_DIRS).
|
||||
TEST_ACCEPTANCE_MODULES_SERVER_CE = $(MODULE_DIRS:=/test_acceptance_server_ce)
|
||||
# This line adds a dependency on the modules Makefile.
|
||||
$(TEST_ACCEPTANCE_MODULES_SERVER_CE): %/test_acceptance_server_ce: %/Makefile
|
||||
# This line adds the environment variable BASE_CONFIG=$(CFG_SERVER_CE) to all
|
||||
# invocations of `web$ make modules/foo/test_acceptance_server_ce`.
|
||||
$(TEST_ACCEPTANCE_MODULES_SERVER_CE): export BASE_CONFIG = $(CFG_SERVER_CE)
|
||||
|
||||
TEST_ACCEPTANCE_MODULES_SERVER_PRO = $(MODULE_DIRS:=/test_acceptance_server_pro)
|
||||
$(TEST_ACCEPTANCE_MODULES_SERVER_PRO): %/test_acceptance_server_pro: %/Makefile
|
||||
$(TEST_ACCEPTANCE_MODULES_SERVER_PRO): export BASE_CONFIG = $(CFG_SERVER_PRO)
|
||||
|
||||
CLEAN_TEST_ACCEPTANCE_MODULES = $(MODULE_DIRS:=/clean_test_acceptance)
|
||||
$(CLEAN_TEST_ACCEPTANCE_MODULES): %/clean_test_acceptance: %/Makefile
|
||||
clean_test_acceptance_modules: $(CLEAN_TEST_ACCEPTANCE_MODULES)
|
||||
clean_ci: clean_test_acceptance_modules
|
||||
|
||||
test_acceptance_module_noop:
|
||||
@echo
|
||||
@echo Module '$(MODULE_NAME)' does not run in ${LABEL}.
|
||||
@echo
|
||||
|
||||
TEST_ACCEPTANCE_MODULE_MAYBE_IN := \
|
||||
test_acceptance_module_maybe_in_saas \
|
||||
test_acceptance_module_maybe_in_server_ce \
|
||||
test_acceptance_module_maybe_in_server_pro \
|
||||
|
||||
test_acceptance_module: $(TEST_ACCEPTANCE_MODULE_MAYBE_IN)
|
||||
test_acceptance_module_maybe_in_saas: export BASE_CONFIG=$(CFG_SAAS)
|
||||
test_acceptance_module_maybe_in_server_ce: export BASE_CONFIG=$(CFG_SERVER_CE)
|
||||
test_acceptance_module_maybe_in_server_pro: export BASE_CONFIG=$(CFG_SERVER_PRO)
|
||||
|
||||
# We need to figure out whether the module is loaded in a given environment.
|
||||
# This information is stored in the (base-)settings.
|
||||
# We get the full list of modules and check for a matching module entry.
|
||||
# Either the grep will find and emit the module, or exits with code 1, which
|
||||
# we handle with a fallback to a noop make target.
|
||||
# Run the node command in a docker compose container which provides the needed
|
||||
# npm dependencies (from disk in dev-env or from the CI image in CI).
|
||||
# Pick the test_unit service which is very light-weight -- the test_acceptance
|
||||
# service would start mongo/redis.
|
||||
$(TEST_ACCEPTANCE_MODULE_MAYBE_IN): test_acceptance_module_maybe_in_%:
|
||||
$(MAKE) $(shell \
|
||||
OVERLEAF_CONFIG=$(BASE_CONFIG) \
|
||||
$(DOCKER_COMPOSE) run --rm test_unit \
|
||||
node test/acceptance/getModuleTargets test_acceptance_$* \
|
||||
| grep -e /$(MODULE_NAME)/ || echo test_acceptance_module_noop LABEL=$* \
|
||||
)
|
||||
|
||||
# See docs for test_acceptance_server_ce how this works.
|
||||
test_acceptance_module_saas: export BASE_CONFIG = $(CFG_SAAS)
|
||||
test_acceptance_module_saas:
|
||||
$(MAKE) modules/$(MODULE_NAME)/test_acceptance_saas
|
||||
|
||||
test_acceptance_module_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE)
|
||||
test_acceptance_module_server_ce:
|
||||
$(MAKE) modules/$(MODULE_NAME)/test_acceptance_server_ce
|
||||
|
||||
test_acceptance_module_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO)
|
||||
test_acceptance_module_server_pro:
|
||||
$(MAKE) modules/$(MODULE_NAME)/test_acceptance_server_pro
|
||||
|
||||
# See docs for test_acceptance_server_ce how this works.
|
||||
TEST_ACCEPTANCE_MODULES_MERGED_INNER = $(MODULE_DIRS:=/test_acceptance_merged_inner)
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_INNER): %/test_acceptance_merged_inner: %/Makefile
|
||||
test_acceptance_modules_merged_inner:
|
||||
$(MAKE) $(shell \
|
||||
OVERLEAF_CONFIG=$(BASE_CONFIG) \
|
||||
node test/acceptance/getModuleTargets test_acceptance_merged_inner \
|
||||
)
|
||||
|
||||
# inner loop for running saas tests in parallel
|
||||
no_more_targets:
|
||||
|
||||
# If we ever have more than 40 modules, we need to add _5 targets to all the places and have it START at 41.
|
||||
test_acceptance_modules_merged_inner_1: export START=1
|
||||
test_acceptance_modules_merged_inner_2: export START=11
|
||||
test_acceptance_modules_merged_inner_3: export START=21
|
||||
test_acceptance_modules_merged_inner_4: export START=31
|
||||
TEST_ACCEPTANCE_MODULES_MERGED_INNER_SPLIT = \
|
||||
test_acceptance_modules_merged_inner_1 \
|
||||
test_acceptance_modules_merged_inner_2 \
|
||||
test_acceptance_modules_merged_inner_3 \
|
||||
test_acceptance_modules_merged_inner_4 \
|
||||
|
||||
# The node script prints one module per line.
|
||||
# Using tail and head we skip over the first n=START entries and print the last 10.
|
||||
# Finally we check with grep for any targets in a batch and print a fallback if none were found.
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_INNER_SPLIT):
|
||||
$(MAKE) $(shell \
|
||||
OVERLEAF_CONFIG=$(BASE_CONFIG) \
|
||||
node test/acceptance/getModuleTargets test_acceptance_merged_inner \
|
||||
| tail -n+$(START) | head -n 10 \
|
||||
| grep -e . || echo no_more_targets \
|
||||
)
|
||||
|
||||
# See docs for test_acceptance_server_ce how this works.
|
||||
test_acceptance_modules_merged_saas: export COMPOSE_PROJECT_NAME = \
|
||||
acceptance_test_modules_merged_saas_$(BUILD_DIR_NAME)
|
||||
test_acceptance_modules_merged_saas: export BASE_CONFIG = $(CFG_SAAS)
|
||||
|
||||
test_acceptance_modules_merged_server_ce: export COMPOSE_PROJECT_NAME = \
|
||||
acceptance_test_modules_merged_server_ce_$(BUILD_DIR_NAME)
|
||||
test_acceptance_modules_merged_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE)
|
||||
|
||||
test_acceptance_modules_merged_server_pro: export COMPOSE_PROJECT_NAME = \
|
||||
acceptance_test_modules_merged_server_pro_$(BUILD_DIR_NAME)
|
||||
test_acceptance_modules_merged_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO)
|
||||
|
||||
# All these variants run the same command.
|
||||
# Each target has a different set of environment defined above.
|
||||
TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS = \
|
||||
test_acceptance_modules_merged_saas \
|
||||
test_acceptance_modules_merged_server_ce \
|
||||
test_acceptance_modules_merged_server_pro \
|
||||
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS):
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
# outer loop for running saas tests in parallel
|
||||
TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS = \
|
||||
test_acceptance_modules_merged_saas_1 \
|
||||
test_acceptance_modules_merged_saas_2 \
|
||||
test_acceptance_modules_merged_saas_3 \
|
||||
test_acceptance_modules_merged_saas_4 \
|
||||
|
||||
test_acceptance_modules_merged_saas_1: export COMPOSE_PROJECT_NAME = \
|
||||
acceptance_test_modules_merged_saas_1_$(BUILD_DIR_NAME)
|
||||
test_acceptance_modules_merged_saas_2: export COMPOSE_PROJECT_NAME = \
|
||||
acceptance_test_modules_merged_saas_2_$(BUILD_DIR_NAME)
|
||||
test_acceptance_modules_merged_saas_3: export COMPOSE_PROJECT_NAME = \
|
||||
acceptance_test_modules_merged_saas_3_$(BUILD_DIR_NAME)
|
||||
test_acceptance_modules_merged_saas_4: export COMPOSE_PROJECT_NAME = \
|
||||
acceptance_test_modules_merged_saas_4_$(BUILD_DIR_NAME)
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS): export BASE_CONFIG = $(CFG_SAAS)
|
||||
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS): test_acceptance_modules_merged_saas_%:
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner_$*
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
test_acceptance_modules: $(TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS)
|
||||
|
||||
#
|
||||
# CI tests
|
||||
#
|
||||
|
||||
ci:
|
||||
MOCHA_ARGS="--reporter tap" \
|
||||
$(MAKE) test
|
||||
|
||||
#
|
||||
# Lint & format
|
||||
#
|
||||
ORG_PATH = /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
RUN_LINT_FORMAT ?= \
|
||||
docker run --rm ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
|
||||
NODE_MODULES_PATH := ${PATH}:${PWD}/node_modules/.bin:/overleaf/services/web/node_modules/.bin
|
||||
WITH_NODE_MODULES_PATH = \
|
||||
format_backend \
|
||||
format_frontend \
|
||||
format_misc \
|
||||
format_styles \
|
||||
format_test_app_unit \
|
||||
format_test_app_rest \
|
||||
format_test_modules \
|
||||
$(TEST_SUITES) \
|
||||
|
||||
$(WITH_NODE_MODULES_PATH): export PATH=$(NODE_MODULES_PATH)
|
||||
|
||||
lint: lint_eslint
|
||||
lint_eslint:
|
||||
npm run lint
|
||||
|
||||
lint: lint_stylelint
|
||||
lint_stylelint:
|
||||
npm run lint:styles
|
||||
|
||||
lint: lint_pug
|
||||
lint_pug:
|
||||
bin/lint_pug_templates
|
||||
|
||||
lint: lint_locales
|
||||
lint_locales:
|
||||
bin/lint_locales
|
||||
|
||||
lint: check_extracted_translations
|
||||
check_extracted_translations:
|
||||
bin/check_extracted_translations
|
||||
|
||||
sort_locales:
|
||||
node scripts/translations/sort.js
|
||||
|
||||
cleanup_unused_locales:
|
||||
node scripts/translations/cleanupUnusedLocales.js
|
||||
|
||||
lint: lint_flag_res_send_usage
|
||||
lint_flag_res_send_usage:
|
||||
bin/lint_flag_res_send_usage
|
||||
|
||||
lint: lint_overleafModuleImports
|
||||
lint_overleafModuleImports:
|
||||
node scripts/check_overleafModuleImports.mjs
|
||||
|
||||
lint: typecheck_frontend
|
||||
typecheck_frontend:
|
||||
npm run --silent type-check
|
||||
|
||||
lint: typecheck_backend
|
||||
typecheck_backend:
|
||||
npm run --silent type-check:backend
|
||||
|
||||
lint_in_docker:
|
||||
$(RUN_LINT_FORMAT) make lint -j2 --output-sync
|
||||
|
||||
format: format_js
|
||||
format_js:
|
||||
npm run --silent format
|
||||
|
||||
format: format_styles
|
||||
format_styles:
|
||||
npm run --silent format:styles
|
||||
|
||||
format_fix:
|
||||
npm run --silent format:fix
|
||||
|
||||
format_styles_fix:
|
||||
npm run --silent format:styles:fix
|
||||
|
||||
format_in_docker:
|
||||
$(RUN_LINT_FORMAT) make format -j2 --output-sync
|
||||
|
||||
SHELLCHECK_OPTS = \
|
||||
--shell=bash \
|
||||
--external-sources
|
||||
SHELLCHECK_COLOR := $(if $(CI),--color=never,--color)
|
||||
SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu
|
||||
|
||||
shellcheck:
|
||||
@$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(PWD):/mnt -w /mnt \
|
||||
koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR)
|
||||
|
||||
shellcheck_fix:
|
||||
@$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \
|
||||
diff=$$(docker run --rm -v $(PWD):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \
|
||||
if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \
|
||||
elif [ -n "$$diff" ]; then echo "$$file"; \
|
||||
else echo "\033[2m$$file\033[0m"; fi \
|
||||
done
|
||||
|
||||
#
|
||||
# Build & publish
|
||||
#
|
||||
|
||||
IMAGE_CI ?= ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
IMAGE_REPO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME)
|
||||
IMAGE_REPO_BRANCH ?= $(IMAGE_REPO):$(BRANCH_NAME)
|
||||
IMAGE_REPO_MAIN ?= $(IMAGE_REPO):main
|
||||
IMAGE_REPO_FINAL ?= $(IMAGE_REPO_BRANCH)-$(BUILD_NUMBER)
|
||||
|
||||
export SENTRY_RELEASE ?= ${COMMIT_SHA}
|
||||
|
||||
build_deps:
|
||||
docker build --pull \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from $(IMAGE_REPO_BRANCH)-deps \
|
||||
--cache-from $(IMAGE_REPO_MAIN)-deps \
|
||||
--tag $(IMAGE_REPO_BRANCH)-deps \
|
||||
--target deps \
|
||||
--file Dockerfile \
|
||||
../..
|
||||
|
||||
build_dev:
|
||||
docker build \
|
||||
--build-arg SENTRY_RELEASE \
|
||||
--tag $(IMAGE_CI) \
|
||||
--tag $(IMAGE_CI)-dev \
|
||||
--target dev \
|
||||
--file Dockerfile \
|
||||
../..
|
||||
|
||||
build_webpack:
|
||||
$(MAKE) build_webpack_once \
|
||||
|| $(MAKE) build_webpack_once
|
||||
|
||||
build_webpack_once:
|
||||
docker build \
|
||||
--build-arg SENTRY_RELEASE \
|
||||
--cache-from $(IMAGE_CI)-dev \
|
||||
--cache-from $(IMAGE_CI)-webpack \
|
||||
--tag $(IMAGE_CI)-webpack \
|
||||
--target webpack \
|
||||
--file Dockerfile \
|
||||
../..
|
||||
|
||||
build_pug:
|
||||
docker build \
|
||||
--build-arg SENTRY_RELEASE \
|
||||
--cache-from $(IMAGE_CI)-dev \
|
||||
--tag $(IMAGE_CI)-pug \
|
||||
--target pug \
|
||||
--file Dockerfile \
|
||||
../..
|
||||
|
||||
build:
|
||||
docker build \
|
||||
--build-arg SENTRY_RELEASE \
|
||||
--cache-from $(IMAGE_CI)-webpack \
|
||||
--cache-from $(IMAGE_CI)-pug \
|
||||
--cache-from $(IMAGE_REPO_FINAL) \
|
||||
--tag $(IMAGE_REPO_FINAL) \
|
||||
--target app \
|
||||
--file Dockerfile \
|
||||
../..
|
||||
|
||||
publish:
|
||||
docker push $(IMAGE_REPO_FINAL)
|
||||
|
||||
tar:
|
||||
COMPOSE_PROJECT_NAME=tar_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm tar
|
||||
COMPOSE_PROJECT_NAME=tar_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
build_storybook:
|
||||
npm run lezer-latex:generate
|
||||
npm run build-storybook
|
||||
if [ -n "$(BRANCH_NAME)" ]; then \
|
||||
echo "Renaming storybook-static -> $(BRANCH_NAME)."; \
|
||||
d=$$(dirname "$(BRANCH_NAME)"); \
|
||||
mkdir -p "storybook-output/$$d"; \
|
||||
mv storybook-static "storybook-output/$$d/$$(basename "$(BRANCH_NAME)")/"; \
|
||||
fi
|
||||
|
||||
MODULE_TARGETS = \
|
||||
$(TEST_ACCEPTANCE_MODULES_SAAS) \
|
||||
$(TEST_ACCEPTANCE_MODULES_SERVER_CE) \
|
||||
$(TEST_ACCEPTANCE_MODULES_SERVER_PRO) \
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_INNER) \
|
||||
$(CLEAN_TEST_ACCEPTANCE_MODULES) \
|
||||
$(TEST_UNIT_MODULES) \
|
||||
|
||||
$(MODULE_TARGETS):
|
||||
$(MAKE) -C $(dir $@) $(notdir $@) BUILD_DIR_NAME=$(BUILD_DIR_NAME)
|
||||
|
||||
.PHONY:
|
||||
$(MODULE_TARGETS) \
|
||||
compile_modules compile_modules_full clean_ci \
|
||||
test test_module test_unit test_unit_app \
|
||||
test_unit_modules test_unit_module test_frontend \
|
||||
test_acceptance test_acceptance_app test_acceptance_modules \
|
||||
test_acceptance_module ci format format_fix lint \
|
||||
shellcheck shellcheck_fix \
|
||||
build publish tar
|
||||
66
services/web/Makefile.module
Normal file
66
services/web/Makefile.module
Normal file
@@ -0,0 +1,66 @@
|
||||
BUILD_DIR_NAME ?= web
|
||||
MODULE_NAME := $(notdir $(shell pwd))
|
||||
MODULE_DIR := modules/$(MODULE_NAME)
|
||||
PROJECT_NAME = web
|
||||
|
||||
export OVERLEAF_CONFIG = /overleaf/services/web/$(MODULE_DIR)/test/acceptance/config/settings.test.js
|
||||
export BASE_CONFIG ?= /overleaf/services/web/test/acceptance/config/settings.test.saas.js
|
||||
|
||||
CFG_SAAS=/overleaf/services/web/test/acceptance/config/settings.test.saas.js
|
||||
CFG_SERVER_CE=/overleaf/services/web/test/acceptance/config/settings.test.server-ce.js
|
||||
CFG_SERVER_PRO=/overleaf/services/web/test/acceptance/config/settings.test.server-pro.js
|
||||
|
||||
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||
DOCKER_COMPOSE := cd ../../ && \
|
||||
MODULE_DIR=$(MODULE_DIR) \
|
||||
BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||
BRANCH_NAME=$(BRANCH_NAME) \
|
||||
PROJECT_NAME=$(PROJECT_NAME) \
|
||||
MOCHA_GREP=${MOCHA_GREP} \
|
||||
docker compose ${DOCKER_COMPOSE_FLAGS}
|
||||
|
||||
ifeq (,$(wildcard test/unit))
|
||||
test_unit:
|
||||
|
||||
else
|
||||
test_unit: export COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME)_$(MODULE_NAME)
|
||||
test_unit:
|
||||
${DOCKER_COMPOSE} run --rm test_unit npm -q run test:unit:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/unit/src
|
||||
${DOCKER_COMPOSE} down
|
||||
|
||||
endif
|
||||
|
||||
ALL_TEST_ACCEPTANCE_VARIANTS := \
|
||||
test_acceptance \
|
||||
test_acceptance_saas \
|
||||
test_acceptance_server_ce \
|
||||
test_acceptance_server_pro \
|
||||
|
||||
ifeq (,$(wildcard test/acceptance))
|
||||
$(ALL_TEST_ACCEPTANCE_VARIANTS) test_acceptance_merged_inner:
|
||||
@echo
|
||||
@echo Module $(MODULE_NAME) does not have acceptance tests.
|
||||
@echo
|
||||
|
||||
clean_test_acceptance:
|
||||
|
||||
else
|
||||
test_acceptance_saas: export BASE_CONFIG = $(CFG_SAAS)
|
||||
test_acceptance_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE)
|
||||
test_acceptance_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO)
|
||||
|
||||
$(ALL_TEST_ACCEPTANCE_VARIANTS): export COMPOSE_PROJECT_NAME=acceptance_test_$(BUILD_DIR_NAME)_$(MODULE_NAME)
|
||||
$(ALL_TEST_ACCEPTANCE_VARIANTS):
|
||||
$(MAKE) --no-print-directory clean_test_acceptance
|
||||
${DOCKER_COMPOSE} run --rm test_acceptance npm -q run test:acceptance:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/acceptance/src
|
||||
$(MAKE) --no-print-directory clean_test_acceptance
|
||||
|
||||
test_acceptance_merged_inner:
|
||||
cd ../../ && \
|
||||
npm -q run test:acceptance:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/acceptance/src
|
||||
|
||||
clean_test_acceptance: export COMPOSE_PROJECT_NAME=acceptance_test_$(BUILD_DIR_NAME)_$(MODULE_NAME)
|
||||
clean_test_acceptance:
|
||||
${DOCKER_COMPOSE} down -v -t 0
|
||||
|
||||
endif
|
||||
130
services/web/README.md
Normal file
130
services/web/README.md
Normal file
@@ -0,0 +1,130 @@
|
||||
overleaf/web
|
||||
==============
|
||||
|
||||
overleaf/web is the front-end web service of the open-source web-based collaborative LaTeX editor,
|
||||
[Overleaf](https://www.overleaf.com).
|
||||
It serves all the HTML pages, CSS and javascript to the client. overleaf/web also contains
|
||||
a lot of logic around creating and editing projects, and account management.
|
||||
|
||||
|
||||
The rest of the Overleaf stack, along with information about contributing can be found in the
|
||||
[overleaf/overleaf](https://github.com/overleaf/overleaf) repository.
|
||||
|
||||
### Running the app
|
||||
|
||||
The app runs natively using npm and Node on the local system:
|
||||
|
||||
```
|
||||
$ npm install
|
||||
$ npm run start
|
||||
```
|
||||
|
||||
### Running Tests
|
||||
|
||||
To run all tests run:
|
||||
```
|
||||
make test
|
||||
```
|
||||
|
||||
To run both unit and acceptance tests for a module run:
|
||||
```
|
||||
make test_module MODULE=saas-authentication
|
||||
```
|
||||
|
||||
### Unit Tests
|
||||
|
||||
The test suites run in Docker.
|
||||
|
||||
Unit tests can be run in the `test_unit` container defined in `docker-compose.tests.yml`.
|
||||
|
||||
The makefile contains a short cut to run these:
|
||||
|
||||
```
|
||||
make test_unit
|
||||
```
|
||||
|
||||
During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI:
|
||||
|
||||
```
|
||||
make test_unit MOCHA_GREP='AuthorizationManager'
|
||||
```
|
||||
|
||||
To run only the unit tests for a single module do:
|
||||
```
|
||||
make test_unit_module MODULE=saas-authentication
|
||||
```
|
||||
|
||||
Module tests can also use a MOCHA_GREP argument:
|
||||
```
|
||||
make test_unit_module MODULE=saas-authentication MOCHA_GREP=SSO
|
||||
```
|
||||
|
||||
### Acceptance Tests
|
||||
|
||||
Acceptance tests are run against a live service, which runs in the `acceptance_test` container defined in `docker-compose.tests.yml`.
|
||||
|
||||
To run the tests out-of-the-box, the makefile defines:
|
||||
|
||||
```
|
||||
make test_acceptance
|
||||
```
|
||||
|
||||
However, during development it is often useful to leave the service running for rapid iteration on the acceptance tests. This can be done with:
|
||||
|
||||
```
|
||||
make test_acceptance_app_start_service
|
||||
make test_acceptance_app_run # Run as many times as needed during development
|
||||
make test_acceptance_app_stop_service
|
||||
```
|
||||
|
||||
`make test_acceptance` just runs these three commands in sequence and then runs `make test_acceptance_modules` which performs the tests for each module in the `modules` directory. (Note that there is not currently an equivalent to the `-start` / `-run` x _n_ / `-stop` series for modules.)
|
||||
|
||||
During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI:
|
||||
|
||||
```
|
||||
make test_acceptance_run MOCHA_GREP='AuthorizationManager'
|
||||
```
|
||||
|
||||
To run only the acceptance tests for a single module do:
|
||||
```
|
||||
make test_acceptance_module MODULE=saas-authentication
|
||||
```
|
||||
|
||||
Module tests can also use a MOCHA_GREP argument:
|
||||
```
|
||||
make test_acceptance_module MODULE=saas-authentication MOCHA_GREP=SSO
|
||||
```
|
||||
|
||||
Routes
|
||||
------
|
||||
|
||||
Run `bin/routes` to print out all routes in the project.
|
||||
|
||||
|
||||
License and Credits
|
||||
-------------------
|
||||
|
||||
This project is licensed under the [AGPLv3 license](http://www.gnu.org/licenses/agpl-3.0.html)
|
||||
|
||||
### Stylesheets
|
||||
|
||||
Overleaf is based on [Bootstrap](http://getbootstrap.com/), which is licensed under the
|
||||
[MIT license](http://opensource.org/licenses/MIT).
|
||||
All modifications (`*.less` files in `public/stylesheets`) are also licensed
|
||||
under the MIT license.
|
||||
|
||||
### Artwork
|
||||
|
||||
#### Silk icon set 1.3
|
||||
|
||||
We gratefully acknowledge [Mark James](http://www.famfamfam.com/lab/icons/silk/) for
|
||||
releasing his Silk icon set under the Creative Commons Attribution 2.5 license. Some
|
||||
of these icons are used within Overleaf inside the `public/img/silk` and
|
||||
`public/brand/icons` directories.
|
||||
|
||||
#### IconShock icons
|
||||
|
||||
We gratefully acknowledge [IconShock](http://www.iconshock.com) for use of the icons
|
||||
in the `public/img/iconshock` directory found via
|
||||
[findicons.com](http://findicons.com/icon/498089/height?id=526085#)
|
||||
|
||||
110
services/web/app.mjs
Normal file
110
services/web/app.mjs
Normal file
@@ -0,0 +1,110 @@
|
||||
// Metrics must be initialized before importing anything else
|
||||
import '@overleaf/metrics/initialize.js'
|
||||
|
||||
import Modules from './app/src/infrastructure/Modules.js'
|
||||
import metrics from '@overleaf/metrics'
|
||||
import Settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger'
|
||||
import PlansLocator from './app/src/Features/Subscription/PlansLocator.js'
|
||||
import HistoryManager from './app/src/Features/History/HistoryManager.js'
|
||||
import SiteAdminHandler from './app/src/infrastructure/SiteAdminHandler.js'
|
||||
import http from 'node:http'
|
||||
import https from 'node:https'
|
||||
import * as Serializers from './app/src/infrastructure/LoggerSerializers.js'
|
||||
import Server from './app/src/infrastructure/Server.mjs'
|
||||
import QueueWorkers from './app/src/infrastructure/QueueWorkers.js'
|
||||
import mongodb from './app/src/infrastructure/mongodb.js'
|
||||
import mongoose from './app/src/infrastructure/Mongoose.js'
|
||||
import { triggerGracefulShutdown } from './app/src/infrastructure/GracefulShutdown.js'
|
||||
import FileWriter from './app/src/infrastructure/FileWriter.js'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import Features from './app/src/infrastructure/Features.js'
|
||||
|
||||
logger.initialize(process.env.METRICS_APP_NAME || 'web')
|
||||
logger.logger.serializers.user = Serializers.user
|
||||
logger.logger.serializers.docs = Serializers.docs
|
||||
logger.logger.serializers.files = Serializers.files
|
||||
logger.logger.serializers.project = Serializers.project
|
||||
http.globalAgent.keepAlive = false
|
||||
http.globalAgent.maxSockets = Settings.limits.httpGlobalAgentMaxSockets
|
||||
https.globalAgent.keepAlive = false
|
||||
https.globalAgent.maxSockets = Settings.limits.httpsGlobalAgentMaxSockets
|
||||
|
||||
metrics.memory.monitor(logger)
|
||||
metrics.leaked_sockets.monitor(logger)
|
||||
metrics.open_sockets.monitor()
|
||||
|
||||
if (Settings.catchErrors) {
|
||||
process.removeAllListeners('uncaughtException')
|
||||
process.removeAllListeners('unhandledRejection')
|
||||
process
|
||||
.on('uncaughtException', error =>
|
||||
logger.error({ err: error }, 'uncaughtException')
|
||||
)
|
||||
.on('unhandledRejection', (reason, p) => {
|
||||
logger.error({ err: reason }, 'unhandledRejection at Promise', p)
|
||||
})
|
||||
}
|
||||
|
||||
// Create ./data/dumpFolder if needed
|
||||
FileWriter.ensureDumpFolderExists()
|
||||
|
||||
if (
|
||||
!Features.hasFeature('project-history-blobs') &&
|
||||
!Features.hasFeature('filestore')
|
||||
) {
|
||||
throw new Error(
|
||||
'invalid config: must enable either project-history-blobs (Settings.enableProjectHistoryBlobs=true) or enable filestore (Settings.disableFilestore=false)'
|
||||
)
|
||||
}
|
||||
|
||||
const port = Settings.port || Settings.internal.web.port || 3000
|
||||
const host = Settings.internal.web.host || '127.0.0.1'
|
||||
if (process.argv[1] === fileURLToPath(import.meta.url)) {
|
||||
// Called directly
|
||||
// We want to make sure that we provided a password through the environment.
|
||||
if (!process.env.WEB_API_USER || !process.env.WEB_API_PASSWORD) {
|
||||
throw new Error('No API user and password provided')
|
||||
}
|
||||
|
||||
PlansLocator.ensurePlansAreSetupCorrectly()
|
||||
|
||||
Promise.all([
|
||||
mongodb.connectionPromise,
|
||||
mongoose.connectionPromise,
|
||||
HistoryManager.promises.loadGlobalBlobs(),
|
||||
])
|
||||
.then(async () => {
|
||||
Server.server.listen(port, host, function () {
|
||||
logger.debug(`web starting up, listening on ${host}:${port}`)
|
||||
logger.debug(`${http.globalAgent.maxSockets} sockets enabled`)
|
||||
// wait until the process is ready before monitoring the event loop
|
||||
metrics.event_loop.monitor(logger)
|
||||
})
|
||||
QueueWorkers.start()
|
||||
await Modules.start()
|
||||
})
|
||||
.catch(err => {
|
||||
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
// initialise site admin tasks
|
||||
Promise.all([
|
||||
mongodb.connectionPromise,
|
||||
mongoose.connectionPromise,
|
||||
HistoryManager.promises.loadGlobalBlobs(),
|
||||
])
|
||||
.then(() => SiteAdminHandler.initialise())
|
||||
.catch(err => {
|
||||
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
// handle SIGTERM for graceful shutdown in kubernetes
|
||||
process.on('SIGTERM', function (signal) {
|
||||
triggerGracefulShutdown(Server.server, signal)
|
||||
})
|
||||
|
||||
export default Server.server
|
||||
@@ -0,0 +1,97 @@
|
||||
const mappings = new Map([
|
||||
['salesforce_id', generateSubscriptionToSalesforceMapping],
|
||||
['v1_id', generateSubscriptionToV1Mapping],
|
||||
['recurlySubscription_id', generateSubscriptionToRecurlyMapping],
|
||||
])
|
||||
|
||||
/**
|
||||
* @typedef {(import('./types.d.ts').AccountMapping)} AccountMapping
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} subscription
|
||||
* @param {Object} updatedSubscription
|
||||
* @return {Array<AccountMapping>}
|
||||
*/
|
||||
function extractAccountMappingsFromSubscription(
|
||||
subscription,
|
||||
updatedSubscription
|
||||
) {
|
||||
const accountMappings = []
|
||||
mappings.forEach((generateMapping, param) => {
|
||||
if (updatedSubscription[param] || updatedSubscription[param] === '') {
|
||||
if (subscription[param] !== updatedSubscription[param]) {
|
||||
accountMappings.push(
|
||||
generateMapping(subscription.id, updatedSubscription[param])
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
return accountMappings
|
||||
}
|
||||
|
||||
function generateV1Mapping(v1Id, salesforceId, createdAt) {
|
||||
return {
|
||||
source: 'salesforce',
|
||||
sourceEntity: 'account',
|
||||
sourceEntityId: salesforceId,
|
||||
target: 'v1',
|
||||
targetEntity: 'university',
|
||||
targetEntityId: v1Id,
|
||||
createdAt,
|
||||
}
|
||||
}
|
||||
|
||||
function generateSubscriptionToV1Mapping(subscriptionId, v1Id) {
|
||||
return {
|
||||
source: 'v1',
|
||||
sourceEntity: 'university',
|
||||
sourceEntityId: v1Id,
|
||||
target: 'v2',
|
||||
targetEntity: 'subscription',
|
||||
targetEntityId: subscriptionId,
|
||||
createdAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
function generateSubscriptionToSalesforceMapping(subscriptionId, salesforceId) {
|
||||
return {
|
||||
source: 'salesforce',
|
||||
sourceEntity: 'account',
|
||||
sourceEntityId: salesforceId,
|
||||
target: 'v2',
|
||||
targetEntity: 'subscription',
|
||||
targetEntityId: subscriptionId,
|
||||
createdAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} subscriptionId
|
||||
* @param {string} recurlyId
|
||||
* @param {string} [createdAt] - Should be an ISO date
|
||||
* @return {AccountMapping}
|
||||
*/
|
||||
function generateSubscriptionToRecurlyMapping(
|
||||
subscriptionId,
|
||||
recurlyId,
|
||||
createdAt = new Date().toISOString()
|
||||
) {
|
||||
return {
|
||||
source: 'recurly',
|
||||
sourceEntity: 'subscription',
|
||||
sourceEntityId: recurlyId,
|
||||
target: 'v2',
|
||||
targetEntity: 'subscription',
|
||||
targetEntityId: subscriptionId,
|
||||
createdAt,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractAccountMappingsFromSubscription,
|
||||
generateV1Mapping,
|
||||
generateSubscriptionToRecurlyMapping,
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
import metrics from '@overleaf/metrics'
|
||||
import AnalyticsManager from './AnalyticsManager.js'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import GeoIpLookup from '../../infrastructure/GeoIpLookup.js'
|
||||
import Features from '../../infrastructure/Features.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import AccountMappingHelper from './AccountMappingHelper.js'
|
||||
|
||||
async function registerSalesforceMapping(req, res, next) {
|
||||
if (!Features.hasFeature('analytics')) {
|
||||
return res.sendStatus(202)
|
||||
}
|
||||
const { createdAt, salesforceId, v1Id } = req.body
|
||||
AnalyticsManager.registerAccountMapping(
|
||||
AccountMappingHelper.generateV1Mapping(v1Id, salesforceId, createdAt)
|
||||
)
|
||||
res.sendStatus(202)
|
||||
}
|
||||
|
||||
async function updateEditingSession(req, res, next) {
|
||||
if (!Features.hasFeature('analytics')) {
|
||||
return res.sendStatus(202)
|
||||
}
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
const { projectId } = req.params
|
||||
const segmentation = req.body.segmentation || {}
|
||||
let countryCode = null
|
||||
|
||||
if (userId) {
|
||||
try {
|
||||
const geoDetails = await GeoIpLookup.promises.getDetails(req.ip)
|
||||
if (geoDetails && geoDetails.country_code) {
|
||||
countryCode = geoDetails.country_code
|
||||
}
|
||||
AnalyticsManager.updateEditingSession(
|
||||
userId,
|
||||
projectId,
|
||||
countryCode,
|
||||
segmentation
|
||||
)
|
||||
} catch (error) {
|
||||
metrics.inc('analytics_geo_ip_lookup_errors')
|
||||
}
|
||||
}
|
||||
res.sendStatus(202)
|
||||
}
|
||||
|
||||
function recordEvent(req, res, next) {
|
||||
if (!Features.hasFeature('analytics')) {
|
||||
return res.sendStatus(202)
|
||||
}
|
||||
delete req.body._csrf
|
||||
AnalyticsManager.recordEventForSession(
|
||||
req.session,
|
||||
req.params.event,
|
||||
req.body
|
||||
)
|
||||
res.sendStatus(202)
|
||||
}
|
||||
|
||||
export default {
|
||||
registerSalesforceMapping: expressify(registerSalesforceMapping),
|
||||
updateEditingSession: expressify(updateEditingSession),
|
||||
recordEvent,
|
||||
}
|
||||
404
services/web/app/src/Features/Analytics/AnalyticsManager.js
Normal file
404
services/web/app/src/Features/Analytics/AnalyticsManager.js
Normal file
@@ -0,0 +1,404 @@
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const UserAnalyticsIdCache = require('./UserAnalyticsIdCache')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Metrics = require('../../infrastructure/Metrics')
|
||||
const Queues = require('../../infrastructure/Queues')
|
||||
const crypto = require('crypto')
|
||||
const _ = require('lodash')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const logger = require('@overleaf/logger')
|
||||
|
||||
const analyticsEventsQueue = Queues.getQueue('analytics-events')
|
||||
const analyticsEditingSessionsQueue = Queues.getQueue(
|
||||
'analytics-editing-sessions'
|
||||
)
|
||||
const analyticsUserPropertiesQueue = Queues.getQueue(
|
||||
'analytics-user-properties'
|
||||
)
|
||||
const analyticsAccountMappingQueue = Queues.getQueue(
|
||||
'analytics-account-mapping'
|
||||
)
|
||||
|
||||
const ONE_MINUTE_MS = 60 * 1000
|
||||
|
||||
const UUID_REGEXP = /^[\w]{8}(-[\w]{4}){3}-[\w]{12}$/
|
||||
|
||||
function identifyUser(userId, analyticsId, isNewUser) {
|
||||
if (!userId || !analyticsId || !analyticsId.toString().match(UUID_REGEXP)) {
|
||||
return
|
||||
}
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
Metrics.analyticsQueue.inc({ status: 'adding', event_type: 'identify' })
|
||||
Queues.createScheduledJob(
|
||||
'analytics-events',
|
||||
{
|
||||
name: 'identify',
|
||||
data: { userId, analyticsId, isNewUser, createdAt: new Date() },
|
||||
},
|
||||
ONE_MINUTE_MS
|
||||
)
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({ status: 'added', event_type: 'identify' })
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({ status: 'error', event_type: 'identify' })
|
||||
})
|
||||
}
|
||||
|
||||
async function recordEventForUser(userId, event, segmentation) {
|
||||
if (!userId) {
|
||||
return
|
||||
}
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
const analyticsId = await UserAnalyticsIdCache.get(userId)
|
||||
if (analyticsId) {
|
||||
_recordEvent({ analyticsId, userId, event, segmentation, isLoggedIn: true })
|
||||
}
|
||||
}
|
||||
|
||||
function recordEventForUserInBackground(userId, event, segmentation) {
|
||||
recordEventForUser(userId, event, segmentation).catch(err => {
|
||||
logger.warn(
|
||||
{ err, userId, event, segmentation },
|
||||
'failed to record event for user'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function recordEventForSession(session, event, segmentation) {
|
||||
const { analyticsId, userId } = getIdsFromSession(session)
|
||||
if (!analyticsId) {
|
||||
return
|
||||
}
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
_recordEvent({
|
||||
analyticsId,
|
||||
userId,
|
||||
event,
|
||||
segmentation,
|
||||
isLoggedIn: !!userId,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
}
|
||||
|
||||
async function setUserPropertyForUser(userId, propertyName, propertyValue) {
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
|
||||
_checkPropertyValue(propertyValue)
|
||||
|
||||
const analyticsId = await UserAnalyticsIdCache.get(userId)
|
||||
if (analyticsId) {
|
||||
await _setUserProperty({ analyticsId, propertyName, propertyValue })
|
||||
}
|
||||
}
|
||||
|
||||
function setUserPropertyForUserInBackground(userId, property, value) {
|
||||
setUserPropertyForUser(userId, property, value).catch(err => {
|
||||
logger.warn(
|
||||
{ err, userId, property, value },
|
||||
'failed to set user property for user'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async function setUserPropertyForAnalyticsId(
|
||||
analyticsId,
|
||||
propertyName,
|
||||
propertyValue
|
||||
) {
|
||||
if (_isAnalyticsDisabled()) {
|
||||
return
|
||||
}
|
||||
|
||||
_checkPropertyValue(propertyValue)
|
||||
|
||||
await _setUserProperty({ analyticsId, propertyName, propertyValue })
|
||||
}
|
||||
|
||||
async function setUserPropertyForSession(session, propertyName, propertyValue) {
|
||||
const { analyticsId, userId } = getIdsFromSession(session)
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
|
||||
_checkPropertyValue(propertyValue)
|
||||
|
||||
if (analyticsId) {
|
||||
await _setUserProperty({ analyticsId, propertyName, propertyValue })
|
||||
}
|
||||
}
|
||||
|
||||
function setUserPropertyForSessionInBackground(session, property, value) {
|
||||
setUserPropertyForSession(session, property, value).catch(err => {
|
||||
const { analyticsId, userId } = getIdsFromSession(session)
|
||||
logger.warn(
|
||||
{ err, analyticsId, userId, property, value },
|
||||
'failed to set user property for session'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {(import('./types').AccountMapping)} AccountMapping
|
||||
*/
|
||||
|
||||
/**
|
||||
* Register mapping between two accounts.
|
||||
*
|
||||
* @param {AccountMapping} payload - The event payload to send to Analytics
|
||||
*/
|
||||
function registerAccountMapping({
|
||||
source,
|
||||
sourceEntity,
|
||||
sourceEntityId,
|
||||
target,
|
||||
targetEntity,
|
||||
targetEntityId,
|
||||
createdAt,
|
||||
}) {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'adding',
|
||||
event_type: 'account-mapping',
|
||||
})
|
||||
|
||||
analyticsAccountMappingQueue
|
||||
.add('account-mapping', {
|
||||
source,
|
||||
sourceEntity,
|
||||
sourceEntityId,
|
||||
target,
|
||||
targetEntity,
|
||||
targetEntityId,
|
||||
createdAt: createdAt ?? new Date(),
|
||||
})
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'added',
|
||||
event_type: 'account-mapping',
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'error',
|
||||
event_type: 'account-mapping',
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function updateEditingSession(userId, projectId, countryCode, segmentation) {
|
||||
if (!userId) {
|
||||
return
|
||||
}
|
||||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
if (!_isSegmentationValid(segmentation)) {
|
||||
logger.info(
|
||||
{ userId, projectId, segmentation },
|
||||
'rejecting analytics editing session due to bad segmentation'
|
||||
)
|
||||
return
|
||||
}
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'adding',
|
||||
event_type: 'editing-session',
|
||||
})
|
||||
analyticsEditingSessionsQueue
|
||||
.add('editing-session', {
|
||||
userId,
|
||||
projectId,
|
||||
countryCode,
|
||||
segmentation,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'added',
|
||||
event_type: 'editing-session',
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'error',
|
||||
event_type: 'editing-session',
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function _recordEvent(
|
||||
{ analyticsId, userId, event, segmentation, isLoggedIn },
|
||||
{ delay } = {}
|
||||
) {
|
||||
if (!_isAttributeValid(event)) {
|
||||
logger.info(
|
||||
{ analyticsId, event, segmentation },
|
||||
'rejecting analytics event due to bad event name'
|
||||
)
|
||||
return
|
||||
}
|
||||
if (!_isSegmentationValid(segmentation)) {
|
||||
logger.info(
|
||||
{ analyticsId, event, segmentation },
|
||||
'rejecting analytics event due to bad segmentation'
|
||||
)
|
||||
return
|
||||
}
|
||||
logger.debug(
|
||||
{
|
||||
analyticsId,
|
||||
userId,
|
||||
event,
|
||||
segmentation,
|
||||
isLoggedIn: !!userId,
|
||||
createdAt: new Date(),
|
||||
},
|
||||
'queueing analytics event'
|
||||
)
|
||||
Metrics.analyticsQueue.inc({ status: 'adding', event_type: 'event' })
|
||||
analyticsEventsQueue
|
||||
.add(
|
||||
'event',
|
||||
{
|
||||
analyticsId,
|
||||
userId,
|
||||
event,
|
||||
segmentation,
|
||||
isLoggedIn,
|
||||
createdAt: new Date(),
|
||||
},
|
||||
{ delay }
|
||||
)
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({ status: 'added', event_type: 'event' })
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({ status: 'error', event_type: 'event' })
|
||||
})
|
||||
}
|
||||
|
||||
async function _setUserProperty({ analyticsId, propertyName, propertyValue }) {
|
||||
if (!_isAttributeValid(propertyName)) {
|
||||
logger.info(
|
||||
{ analyticsId, propertyName, propertyValue },
|
||||
'rejecting analytics user property due to bad name'
|
||||
)
|
||||
return
|
||||
}
|
||||
if (!_isAttributeValueValid(propertyValue)) {
|
||||
logger.info(
|
||||
{ analyticsId, propertyName, propertyValue },
|
||||
'rejecting analytics user property due to bad value'
|
||||
)
|
||||
return
|
||||
}
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'adding',
|
||||
event_type: 'user-property',
|
||||
})
|
||||
await analyticsUserPropertiesQueue
|
||||
.add('user-property', {
|
||||
analyticsId,
|
||||
propertyName,
|
||||
propertyValue,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
.then(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'added',
|
||||
event_type: 'user-property',
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
Metrics.analyticsQueue.inc({
|
||||
status: 'error',
|
||||
event_type: 'user-property',
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function _isSmokeTestUser(userId) {
|
||||
const smokeTestUserId = Settings.smokeTest && Settings.smokeTest.userId
|
||||
return (
|
||||
smokeTestUserId != null &&
|
||||
userId != null &&
|
||||
userId.toString() === smokeTestUserId
|
||||
)
|
||||
}
|
||||
|
||||
function _isAnalyticsDisabled() {
|
||||
return !(Settings.analytics && Settings.analytics.enabled)
|
||||
}
|
||||
|
||||
function _checkPropertyValue(propertyValue) {
|
||||
if (propertyValue === undefined) {
|
||||
throw new Error(
|
||||
'propertyValue cannot be undefined, use null to unset a property'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function _isAttributeValid(attribute) {
|
||||
return !attribute || /^[a-zA-Z0-9-_.:;,/]+$/.test(attribute)
|
||||
}
|
||||
|
||||
function _isAttributeValueValid(attributeValue) {
|
||||
return _isAttributeValid(attributeValue) || attributeValue instanceof Date
|
||||
}
|
||||
|
||||
function _isSegmentationValid(segmentation) {
|
||||
if (segmentation) {
|
||||
for (const key of Object.keys(segmentation)) {
|
||||
if (!_isAttributeValid(key)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getIdsFromSession(session) {
|
||||
const analyticsId = _.get(session, ['analyticsId'])
|
||||
const userId = SessionManager.getLoggedInUserId(session)
|
||||
return { analyticsId, userId }
|
||||
}
|
||||
|
||||
async function analyticsIdMiddleware(req, res, next) {
|
||||
const session = req.session
|
||||
const sessionUser = SessionManager.getSessionUser(session)
|
||||
|
||||
if (sessionUser) {
|
||||
session.analyticsId = await UserAnalyticsIdCache.get(sessionUser._id)
|
||||
} else if (!session.analyticsId) {
|
||||
// generate an `analyticsId` if needed
|
||||
session.analyticsId = crypto.randomUUID()
|
||||
}
|
||||
|
||||
res.locals.getSessionAnalyticsId = () => session.analyticsId
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
identifyUser,
|
||||
recordEventForSession,
|
||||
recordEventForUser,
|
||||
recordEventForUserInBackground,
|
||||
setUserPropertyForUser,
|
||||
setUserPropertyForUserInBackground,
|
||||
setUserPropertyForSession,
|
||||
setUserPropertyForSessionInBackground,
|
||||
setUserPropertyForAnalyticsId,
|
||||
updateEditingSession,
|
||||
getIdsFromSession,
|
||||
registerAccountMapping,
|
||||
analyticsIdMiddleware: expressify(analyticsIdMiddleware),
|
||||
}
|
||||
28
services/web/app/src/Features/Analytics/AnalyticsProxy.mjs
Normal file
28
services/web/app/src/Features/Analytics/AnalyticsProxy.mjs
Normal file
@@ -0,0 +1,28 @@
|
||||
import settings from '@overleaf/settings'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import httpProxy from 'express-http-proxy'
|
||||
|
||||
export default {
|
||||
call(basePath) {
|
||||
if (!settings.apis.analytics) {
|
||||
return (req, res, next) =>
|
||||
next(
|
||||
new Errors.ServiceNotConfiguredError(
|
||||
'Analytics service not configured'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return httpProxy(settings.apis.analytics.url, {
|
||||
proxyReqPathResolver(req) {
|
||||
// req.url is the part of the path that comes after the mount point in
|
||||
// app.use()
|
||||
return `${basePath}${req.url}`
|
||||
},
|
||||
proxyReqOptDecorator(proxyReqOpts, srcReq) {
|
||||
proxyReqOpts.headers = {} // unset all headers
|
||||
return proxyReqOpts
|
||||
},
|
||||
})
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
const AnalyticsManager = require('./AnalyticsManager')
|
||||
const RequestHelper = require('./RequestHelper')
|
||||
|
||||
function clearSource(session) {
|
||||
if (session) {
|
||||
delete session.required_login_from_product_medium
|
||||
delete session.required_login_from_product_source
|
||||
}
|
||||
}
|
||||
|
||||
function setInbound(session, url, query, referrer) {
|
||||
const inboundSession = {
|
||||
referrer: RequestHelper.parseReferrer(referrer, url),
|
||||
utm: RequestHelper.parseUtm(query),
|
||||
}
|
||||
|
||||
if (inboundSession.referrer || inboundSession.utm) {
|
||||
session.inbound = inboundSession
|
||||
}
|
||||
}
|
||||
|
||||
function clearInbound(session) {
|
||||
if (session) {
|
||||
delete session.inbound
|
||||
}
|
||||
}
|
||||
|
||||
function addUserProperties(userId, session) {
|
||||
if (!session) {
|
||||
return
|
||||
}
|
||||
|
||||
if (session.required_login_from_product_medium) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-product-medium`,
|
||||
session.required_login_from_product_medium
|
||||
)
|
||||
if (session.required_login_from_product_source) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-product-source`,
|
||||
session.required_login_from_product_source
|
||||
)
|
||||
}
|
||||
} else if (session.referal_id) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-bonus-scheme`,
|
||||
true
|
||||
)
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-product-medium`,
|
||||
'bonus-scheme'
|
||||
)
|
||||
}
|
||||
|
||||
if (session.inbound) {
|
||||
if (session.inbound.referrer && session.inbound.referrer.medium) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-referrer-medium`,
|
||||
`${session.inbound.referrer.medium
|
||||
.charAt(0)
|
||||
.toUpperCase()}${session.inbound.referrer.medium.slice(1)}`
|
||||
)
|
||||
if (session.inbound.referrer.source) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-referrer-source`,
|
||||
session.inbound.referrer.source
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (session.inbound.utm) {
|
||||
for (const utmKey of RequestHelper.UTM_KEYS) {
|
||||
if (session.inbound.utm[utmKey]) {
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
`registered-from-${utmKey.replace('_', '-')}`,
|
||||
session.inbound.utm[utmKey]
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
clearSource,
|
||||
setInbound,
|
||||
clearInbound,
|
||||
addUserProperties,
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const AnalyticsRegistrationSourceHelper = require('./AnalyticsRegistrationSourceHelper')
|
||||
const SessionManager = require('../../Features/Authentication/SessionManager')
|
||||
|
||||
function setSource(medium, source) {
|
||||
return function (req, res, next) {
|
||||
if (req.session) {
|
||||
req.session.required_login_from_product_medium = medium
|
||||
if (source) {
|
||||
req.session.required_login_from_product_source = source
|
||||
}
|
||||
}
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
function clearSource() {
|
||||
return function (req, res, next) {
|
||||
AnalyticsRegistrationSourceHelper.clearSource(req.session)
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
function setInbound() {
|
||||
return function setInbound(req, res, next) {
|
||||
if (req.session.inbound) {
|
||||
return next() // don't overwrite referrer
|
||||
}
|
||||
|
||||
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
return next() // don't store referrer if user is already logged in
|
||||
}
|
||||
|
||||
const referrer = req.header('referrer')
|
||||
try {
|
||||
AnalyticsRegistrationSourceHelper.setInbound(
|
||||
req.session,
|
||||
req.url,
|
||||
req.query,
|
||||
referrer
|
||||
)
|
||||
} catch (error) {
|
||||
// log errors and fail silently
|
||||
OError.tag(error, 'failed to parse inbound referrer', {
|
||||
referrer,
|
||||
})
|
||||
logger.warn({ error }, error.message)
|
||||
}
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setSource,
|
||||
clearSource,
|
||||
setInbound,
|
||||
}
|
||||
51
services/web/app/src/Features/Analytics/AnalyticsRouter.mjs
Normal file
51
services/web/app/src/Features/Analytics/AnalyticsRouter.mjs
Normal file
@@ -0,0 +1,51 @@
|
||||
import AuthenticationController from './../Authentication/AuthenticationController.js'
|
||||
import AnalyticsController from './AnalyticsController.mjs'
|
||||
import AnalyticsProxy from './AnalyticsProxy.mjs'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js'
|
||||
|
||||
const rateLimiters = {
|
||||
recordEvent: new RateLimiter('analytics-record-event', {
|
||||
points: 200,
|
||||
duration: 60,
|
||||
}),
|
||||
updateEditingSession: new RateLimiter('analytics-update-editing-session', {
|
||||
points: 20,
|
||||
duration: 60,
|
||||
}),
|
||||
uniExternalCollabProxy: new RateLimiter(
|
||||
'analytics-uni-external-collab-proxy',
|
||||
{ points: 20, duration: 60 }
|
||||
),
|
||||
}
|
||||
|
||||
export default {
|
||||
apply(webRouter, privateApiRouter, publicApiRouter) {
|
||||
webRouter.post(
|
||||
'/event/:event([a-z0-9-_]+)',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.recordEvent),
|
||||
AnalyticsController.recordEvent
|
||||
)
|
||||
|
||||
webRouter.put(
|
||||
'/editingSession/:projectId',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.updateEditingSession, {
|
||||
params: ['projectId'],
|
||||
}),
|
||||
AnalyticsController.updateEditingSession
|
||||
)
|
||||
|
||||
publicApiRouter.use(
|
||||
'/analytics/uniExternalCollaboration',
|
||||
AuthenticationController.requirePrivateApiAuth(),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.uniExternalCollabProxy),
|
||||
AnalyticsProxy.call('/uniExternalCollaboration')
|
||||
)
|
||||
|
||||
publicApiRouter.post(
|
||||
'/analytics/register-v-1-salesforce-mapping',
|
||||
AuthenticationController.requirePrivateApiAuth(),
|
||||
AnalyticsController.registerSalesforceMapping
|
||||
)
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
import _ from 'lodash'
|
||||
import RequestHelper from './RequestHelper.js'
|
||||
import AnalyticsManager from './AnalyticsManager.js'
|
||||
import querystring from 'node:querystring'
|
||||
import { URL } from 'node:url'
|
||||
import Settings from '@overleaf/settings'
|
||||
import OError from '@overleaf/o-error'
|
||||
import logger from '@overleaf/logger'
|
||||
|
||||
function recordUTMTags() {
|
||||
return function (req, res, next) {
|
||||
const query = req.query
|
||||
|
||||
try {
|
||||
const utmValues = RequestHelper.parseUtm(query)
|
||||
|
||||
if (utmValues) {
|
||||
const path = new URL(req.url, Settings.siteUrl).pathname
|
||||
|
||||
AnalyticsManager.recordEventForSession(req.session, 'page-view', {
|
||||
path,
|
||||
...utmValues,
|
||||
})
|
||||
|
||||
const propertyValue = `${utmValues.utm_source || 'N/A'};${
|
||||
utmValues.utm_medium || 'N/A'
|
||||
};${utmValues.utm_campaign || 'N/A'};${
|
||||
utmValues.utm_content || utmValues.utm_term || 'N/A'
|
||||
}`
|
||||
AnalyticsManager.setUserPropertyForSessionInBackground(
|
||||
req.session,
|
||||
'utm-tags',
|
||||
propertyValue
|
||||
)
|
||||
|
||||
// redirect to URL without UTM query params
|
||||
const queryWithoutUtm = _.omit(query, RequestHelper.UTM_KEYS)
|
||||
const queryString =
|
||||
Object.keys(queryWithoutUtm).length > 0
|
||||
? '?' + querystring.stringify(queryWithoutUtm)
|
||||
: ''
|
||||
return res.redirect(path + queryString)
|
||||
}
|
||||
} catch (error) {
|
||||
// log errors and fail silently
|
||||
OError.tag(error, 'failed to track UTM tags', {
|
||||
query,
|
||||
})
|
||||
logger.warn({ error }, error.message)
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
recordUTMTags,
|
||||
}
|
||||
56
services/web/app/src/Features/Analytics/RequestHelper.js
Normal file
56
services/web/app/src/Features/Analytics/RequestHelper.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const RefererParser = require('referer-parser')
|
||||
const { URL } = require('url')
|
||||
|
||||
const UTM_KEYS = [
|
||||
'utm_campaign',
|
||||
'utm_source',
|
||||
'utm_term',
|
||||
'utm_content',
|
||||
'utm_medium',
|
||||
'utm_count',
|
||||
]
|
||||
|
||||
function parseUtm(query) {
|
||||
const utmValues = {}
|
||||
for (const utmKey of UTM_KEYS) {
|
||||
if (query[utmKey]) {
|
||||
utmValues[utmKey] = query[utmKey]
|
||||
}
|
||||
}
|
||||
return Object.keys(utmValues).length > 0 ? utmValues : null
|
||||
}
|
||||
|
||||
function parseReferrer(referrer, url) {
|
||||
if (!referrer) {
|
||||
return {
|
||||
medium: 'direct',
|
||||
}
|
||||
}
|
||||
|
||||
const parsedReferrer = new RefererParser(referrer, url)
|
||||
|
||||
const referrerValues = {
|
||||
medium: parsedReferrer.medium,
|
||||
source: parsedReferrer.referer || 'other',
|
||||
}
|
||||
|
||||
if (referrerValues.medium === 'unknown') {
|
||||
try {
|
||||
const referrerHostname = new URL(referrer).hostname
|
||||
if (referrerHostname) {
|
||||
referrerValues.medium = 'link'
|
||||
referrerValues.source = referrerHostname
|
||||
}
|
||||
} catch (error) {
|
||||
// ignore referrer parsing errors
|
||||
}
|
||||
}
|
||||
|
||||
return referrerValues
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
UTM_KEYS,
|
||||
parseUtm,
|
||||
parseReferrer,
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const { CacheLoader } = require('cache-flow')
|
||||
const { callbackify } = require('util')
|
||||
|
||||
class UserAnalyticsIdCache extends CacheLoader {
|
||||
constructor() {
|
||||
super('user-analytics-id', {
|
||||
expirationTime: 60,
|
||||
maxSize: 10000,
|
||||
})
|
||||
}
|
||||
|
||||
async load(userId) {
|
||||
const user = await UserGetter.promises.getUser(userId, { analyticsId: 1 })
|
||||
if (user) {
|
||||
return user.analyticsId || user._id.toString()
|
||||
}
|
||||
}
|
||||
|
||||
keyToString(userId) {
|
||||
if (userId) {
|
||||
return userId.toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const userAnalyticsIdCache = new UserAnalyticsIdCache()
|
||||
userAnalyticsIdCache.callbacks = {
|
||||
get: callbackify(userAnalyticsIdCache.get).bind(userAnalyticsIdCache),
|
||||
}
|
||||
module.exports = userAnalyticsIdCache
|
||||
9
services/web/app/src/Features/Analytics/types.d.ts
vendored
Normal file
9
services/web/app/src/Features/Analytics/types.d.ts
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export type AccountMapping = {
|
||||
source: string
|
||||
sourceEntity: string
|
||||
sourceEntityId: string
|
||||
target: string
|
||||
targetEntity: string
|
||||
targetEntityId: string
|
||||
createdAt: string
|
||||
}
|
||||
@@ -0,0 +1,671 @@
|
||||
const AuthenticationManager = require('./AuthenticationManager')
|
||||
const SessionManager = require('./SessionManager')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const LoginRateLimiter = require('../Security/LoginRateLimiter')
|
||||
const UserUpdater = require('../User/UserUpdater')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const logger = require('@overleaf/logger')
|
||||
const querystring = require('querystring')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const basicAuth = require('basic-auth')
|
||||
const tsscmp = require('tsscmp')
|
||||
const UserHandler = require('../User/UserHandler')
|
||||
const UserSessionsManager = require('../User/UserSessionsManager')
|
||||
const Analytics = require('../Analytics/AnalyticsManager')
|
||||
const passport = require('passport')
|
||||
const NotificationsBuilder = require('../Notifications/NotificationsBuilder')
|
||||
const UrlHelper = require('../Helpers/UrlHelper')
|
||||
const AsyncFormHelper = require('../Helpers/AsyncFormHelper')
|
||||
const _ = require('lodash')
|
||||
const UserAuditLogHandler = require('../User/UserAuditLogHandler')
|
||||
const AnalyticsRegistrationSourceHelper = require('../Analytics/AnalyticsRegistrationSourceHelper')
|
||||
const {
|
||||
acceptsJson,
|
||||
} = require('../../infrastructure/RequestContentTypeDetection')
|
||||
const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper')
|
||||
const Modules = require('../../infrastructure/Modules')
|
||||
const { expressify, promisify } = require('@overleaf/promise-utils')
|
||||
const { handleAuthenticateErrors } = require('./AuthenticationErrors')
|
||||
const EmailHelper = require('../Helpers/EmailHelper')
|
||||
|
||||
function send401WithChallenge(res) {
|
||||
res.setHeader('WWW-Authenticate', 'OverleafLogin')
|
||||
res.sendStatus(401)
|
||||
}
|
||||
|
||||
function checkCredentials(userDetailsMap, user, password) {
|
||||
const expectedPassword = userDetailsMap.get(user)
|
||||
const userExists = userDetailsMap.has(user) && expectedPassword // user exists with a non-null password
|
||||
const isValid = userExists && tsscmp(expectedPassword, password)
|
||||
if (!isValid) {
|
||||
logger.err({ user }, 'invalid login details')
|
||||
}
|
||||
Metrics.inc('security.http-auth.check-credentials', 1, {
|
||||
path: userExists ? 'known-user' : 'unknown-user',
|
||||
status: isValid ? 'pass' : 'fail',
|
||||
})
|
||||
return isValid
|
||||
}
|
||||
|
||||
function reduceStaffAccess(staffAccess) {
|
||||
const reducedStaffAccess = {}
|
||||
for (const field in staffAccess) {
|
||||
if (staffAccess[field]) {
|
||||
reducedStaffAccess[field] = true
|
||||
}
|
||||
}
|
||||
return reducedStaffAccess
|
||||
}
|
||||
|
||||
function userHasStaffAccess(user) {
|
||||
return user.staffAccess && Object.values(user.staffAccess).includes(true)
|
||||
}
|
||||
|
||||
// TODO: Finish making these methods async
|
||||
const AuthenticationController = {
|
||||
serializeUser(user, callback) {
|
||||
if (!user._id || !user.email) {
|
||||
const err = new Error('serializeUser called with non-user object')
|
||||
logger.warn({ user }, err.message)
|
||||
return callback(err)
|
||||
}
|
||||
const lightUser = {
|
||||
_id: user._id,
|
||||
first_name: user.first_name,
|
||||
last_name: user.last_name,
|
||||
email: user.email,
|
||||
referal_id: user.referal_id,
|
||||
session_created: new Date().toISOString(),
|
||||
ip_address: user._login_req_ip,
|
||||
must_reconfirm: user.must_reconfirm,
|
||||
v1_id: user.overleaf != null ? user.overleaf.id : undefined,
|
||||
analyticsId: user.analyticsId || user._id,
|
||||
alphaProgram: user.alphaProgram || undefined, // only store if set
|
||||
betaProgram: user.betaProgram || undefined, // only store if set
|
||||
}
|
||||
if (user.isAdmin) {
|
||||
lightUser.isAdmin = true
|
||||
}
|
||||
if (userHasStaffAccess(user)) {
|
||||
lightUser.staffAccess = reduceStaffAccess(user.staffAccess)
|
||||
}
|
||||
|
||||
callback(null, lightUser)
|
||||
},
|
||||
|
||||
deserializeUser(user, cb) {
|
||||
cb(null, user)
|
||||
},
|
||||
|
||||
passportLogin(req, res, next) {
|
||||
// This function is middleware which wraps the passport.authenticate middleware,
|
||||
// so we can send back our custom `{message: {text: "", type: ""}}` responses on failure,
|
||||
// and send a `{redir: ""}` response on success
|
||||
passport.authenticate(
|
||||
'local',
|
||||
{ keepSessionInfo: true },
|
||||
async function (err, user, info) {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
if (user) {
|
||||
// `user` is either a user object or false
|
||||
AuthenticationController.setAuditInfo(req, {
|
||||
method: 'Password login',
|
||||
})
|
||||
|
||||
try {
|
||||
// We could investigate whether this can be done together with 'preFinishLogin' instead of being its own hook
|
||||
await Modules.promises.hooks.fire(
|
||||
'saasLogin',
|
||||
{ email: user.email },
|
||||
req
|
||||
)
|
||||
await AuthenticationController.promises.finishLogin(user, req, res)
|
||||
} catch (err) {
|
||||
return next(err)
|
||||
}
|
||||
} else {
|
||||
if (info.redir != null) {
|
||||
return res.json({ redir: info.redir })
|
||||
} else {
|
||||
res.status(info.status || 200)
|
||||
delete info.status
|
||||
const body = { message: info }
|
||||
const { errorReason } = info
|
||||
if (errorReason) {
|
||||
body.errorReason = errorReason
|
||||
delete info.errorReason
|
||||
}
|
||||
return res.json(body)
|
||||
}
|
||||
}
|
||||
}
|
||||
)(req, res, next)
|
||||
},
|
||||
|
||||
async _finishLoginAsync(user, req, res) {
|
||||
if (user === false) {
|
||||
return AsyncFormHelper.redirect(req, res, '/login')
|
||||
} // OAuth2 'state' mismatch
|
||||
|
||||
if (user.suspended) {
|
||||
return AsyncFormHelper.redirect(req, res, '/account-suspended')
|
||||
}
|
||||
|
||||
if (Settings.adminOnlyLogin && !hasAdminAccess(user)) {
|
||||
return res.status(403).json({
|
||||
message: { type: 'error', text: 'Admin only panel' },
|
||||
})
|
||||
}
|
||||
|
||||
const auditInfo = AuthenticationController.getAuditInfo(req)
|
||||
|
||||
const anonymousAnalyticsId = req.session.analyticsId
|
||||
const isNewUser = req.session.justRegistered || false
|
||||
|
||||
const results = await Modules.promises.hooks.fire(
|
||||
'preFinishLogin',
|
||||
req,
|
||||
res,
|
||||
user
|
||||
)
|
||||
|
||||
if (results.some(result => result && result.doNotFinish)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (user.must_reconfirm) {
|
||||
return AuthenticationController._redirectToReconfirmPage(req, res, user)
|
||||
}
|
||||
|
||||
const redir =
|
||||
AuthenticationController.getRedirectFromSession(req) || '/project'
|
||||
|
||||
_loginAsyncHandlers(req, user, anonymousAnalyticsId, isNewUser)
|
||||
const userId = user._id
|
||||
|
||||
await UserAuditLogHandler.promises.addEntry(
|
||||
userId,
|
||||
'login',
|
||||
userId,
|
||||
req.ip,
|
||||
auditInfo
|
||||
)
|
||||
|
||||
await _afterLoginSessionSetupAsync(req, user)
|
||||
|
||||
AuthenticationController._clearRedirectFromSession(req)
|
||||
AnalyticsRegistrationSourceHelper.clearSource(req.session)
|
||||
AnalyticsRegistrationSourceHelper.clearInbound(req.session)
|
||||
AsyncFormHelper.redirect(req, res, redir)
|
||||
},
|
||||
|
||||
finishLogin(user, req, res, next) {
|
||||
AuthenticationController._finishLoginAsync(user, req, res).catch(err =>
|
||||
next(err)
|
||||
)
|
||||
},
|
||||
|
||||
async doPassportLogin(req, username, password, done) {
|
||||
let user, info
|
||||
try {
|
||||
;({ user, info } = await AuthenticationController._doPassportLogin(
|
||||
req,
|
||||
username,
|
||||
password
|
||||
))
|
||||
} catch (error) {
|
||||
return done(error)
|
||||
}
|
||||
return done(undefined, user, info)
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param req
|
||||
* @param username
|
||||
* @param password
|
||||
* @returns {Promise<{ user: any, info: any}>}
|
||||
*/
|
||||
async _doPassportLogin(req, username, password) {
|
||||
const email = EmailHelper.parseEmail(username)
|
||||
if (!email) {
|
||||
Metrics.inc('login_failure_reason', 1, { status: 'invalid_email' })
|
||||
return {
|
||||
user: null,
|
||||
info: {
|
||||
status: 400,
|
||||
type: 'error',
|
||||
text: req.i18n.translate('email_address_is_invalid'),
|
||||
},
|
||||
}
|
||||
}
|
||||
AuthenticationController.setAuditInfo(req, { method: 'Password login' })
|
||||
|
||||
const { fromKnownDevice } = AuthenticationController.getAuditInfo(req)
|
||||
const auditLog = {
|
||||
ipAddress: req.ip,
|
||||
info: { method: 'Password login', fromKnownDevice },
|
||||
}
|
||||
|
||||
let user, isPasswordReused
|
||||
try {
|
||||
;({ user, isPasswordReused } =
|
||||
await AuthenticationManager.promises.authenticate(
|
||||
{ email },
|
||||
password,
|
||||
auditLog,
|
||||
{
|
||||
enforceHIBPCheck: !fromKnownDevice,
|
||||
}
|
||||
))
|
||||
} catch (error) {
|
||||
return {
|
||||
user: false,
|
||||
info: handleAuthenticateErrors(error, req),
|
||||
}
|
||||
}
|
||||
|
||||
if (user && AuthenticationController.captchaRequiredForLogin(req, user)) {
|
||||
Metrics.inc('login_failure_reason', 1, { status: 'captcha_missing' })
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
text: req.i18n.translate('cannot_verify_user_not_robot'),
|
||||
type: 'error',
|
||||
errorReason: 'cannot_verify_user_not_robot',
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
} else if (user) {
|
||||
if (
|
||||
isPasswordReused &&
|
||||
AuthenticationController.getRedirectFromSession(req) == null
|
||||
) {
|
||||
AuthenticationController.setRedirectInSession(
|
||||
req,
|
||||
'/compromised-password'
|
||||
)
|
||||
}
|
||||
|
||||
// async actions
|
||||
return { user, info: undefined }
|
||||
} else {
|
||||
Metrics.inc('login_failure_reason', 1, { status: 'password_invalid' })
|
||||
AuthenticationController._recordFailedLogin()
|
||||
logger.debug({ email }, 'failed log in')
|
||||
return {
|
||||
user: false,
|
||||
info: {
|
||||
type: 'error',
|
||||
key: 'invalid-password-retry-or-reset',
|
||||
status: 401,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
captchaRequiredForLogin(req, user) {
|
||||
switch (AuthenticationController.getAuditInfo(req).captcha) {
|
||||
case 'trusted':
|
||||
case 'disabled':
|
||||
return false
|
||||
case 'solved':
|
||||
return false
|
||||
case 'skipped': {
|
||||
let required = false
|
||||
if (user.lastFailedLogin) {
|
||||
const requireCaptchaUntil =
|
||||
user.lastFailedLogin.getTime() +
|
||||
Settings.elevateAccountSecurityAfterFailedLogin
|
||||
required = requireCaptchaUntil >= Date.now()
|
||||
}
|
||||
Metrics.inc('force_captcha_on_login', 1, {
|
||||
status: required ? 'yes' : 'no',
|
||||
})
|
||||
return required
|
||||
}
|
||||
default:
|
||||
throw new Error('captcha middleware missing in handler chain')
|
||||
}
|
||||
},
|
||||
|
||||
ipMatchCheck(req, user) {
|
||||
if (req.ip !== user.lastLoginIp) {
|
||||
NotificationsBuilder.ipMatcherAffiliation(user._id).create(
|
||||
req.ip,
|
||||
() => {}
|
||||
)
|
||||
}
|
||||
return UserUpdater.updateUser(
|
||||
user._id.toString(),
|
||||
{
|
||||
$set: { lastLoginIp: req.ip },
|
||||
},
|
||||
() => {}
|
||||
)
|
||||
},
|
||||
|
||||
requireLogin() {
|
||||
const doRequest = function (req, res, next) {
|
||||
if (next == null) {
|
||||
next = function () {}
|
||||
}
|
||||
if (!SessionManager.isUserLoggedIn(req.session)) {
|
||||
if (acceptsJson(req)) return send401WithChallenge(res)
|
||||
return AuthenticationController._redirectToLoginOrRegisterPage(req, res)
|
||||
} else {
|
||||
req.user = SessionManager.getSessionUser(req.session)
|
||||
req.logger?.addFields({ userId: req.user._id })
|
||||
return next()
|
||||
}
|
||||
}
|
||||
|
||||
return doRequest
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} scope
|
||||
* @return {import('express').Handler}
|
||||
*/
|
||||
requireOauth(scope) {
|
||||
if (typeof scope !== 'string' || !scope) {
|
||||
throw new Error(
|
||||
"requireOauth() expects a non-empty string as 'scope' parameter"
|
||||
)
|
||||
}
|
||||
|
||||
// require this here because module may not be included in some versions
|
||||
const Oauth2Server = require('../../../../modules/oauth2-server/app/src/Oauth2Server')
|
||||
const middleware = async (req, res, next) => {
|
||||
const request = new Oauth2Server.Request(req)
|
||||
const response = new Oauth2Server.Response(res)
|
||||
try {
|
||||
const token = await Oauth2Server.server.authenticate(
|
||||
request,
|
||||
response,
|
||||
{ scope }
|
||||
)
|
||||
req.oauth = { access_token: token.accessToken }
|
||||
req.oauth_token = token
|
||||
req.oauth_user = token.user
|
||||
next()
|
||||
} catch (err) {
|
||||
if (
|
||||
err.code === 400 &&
|
||||
err.message === 'Invalid request: malformed authorization header'
|
||||
) {
|
||||
err.code = 401
|
||||
}
|
||||
// send all other errors
|
||||
res
|
||||
.status(err.code)
|
||||
.json({ error: err.name, error_description: err.message })
|
||||
}
|
||||
}
|
||||
return expressify(middleware)
|
||||
},
|
||||
|
||||
_globalLoginWhitelist: [],
|
||||
addEndpointToLoginWhitelist(endpoint) {
|
||||
return AuthenticationController._globalLoginWhitelist.push(endpoint)
|
||||
},
|
||||
|
||||
requireGlobalLogin(req, res, next) {
|
||||
if (
|
||||
AuthenticationController._globalLoginWhitelist.includes(
|
||||
req._parsedUrl.pathname
|
||||
)
|
||||
) {
|
||||
return next()
|
||||
}
|
||||
|
||||
if (req.headers.authorization != null) {
|
||||
AuthenticationController.requirePrivateApiAuth()(req, res, next)
|
||||
} else if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
next()
|
||||
} else {
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user trying to access endpoint not in global whitelist'
|
||||
)
|
||||
if (acceptsJson(req)) return send401WithChallenge(res)
|
||||
AuthenticationController.setRedirectInSession(req)
|
||||
res.redirect('/login')
|
||||
}
|
||||
},
|
||||
|
||||
validateAdmin(req, res, next) {
|
||||
const adminDomains = Settings.adminDomains
|
||||
if (
|
||||
!adminDomains ||
|
||||
!(Array.isArray(adminDomains) && adminDomains.length)
|
||||
) {
|
||||
return next()
|
||||
}
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
if (!hasAdminAccess(user)) {
|
||||
return next()
|
||||
}
|
||||
const email = user.email
|
||||
if (email == null) {
|
||||
return next(
|
||||
new OError('[ValidateAdmin] Admin user without email address', {
|
||||
userId: user._id,
|
||||
})
|
||||
)
|
||||
}
|
||||
if (!adminDomains.find(domain => email.endsWith(`@${domain}`))) {
|
||||
return next(
|
||||
new OError('[ValidateAdmin] Admin user with invalid email domain', {
|
||||
email,
|
||||
userId: user._id,
|
||||
})
|
||||
)
|
||||
}
|
||||
return next()
|
||||
},
|
||||
|
||||
checkCredentials,
|
||||
|
||||
requireBasicAuth: function (userDetails) {
|
||||
const userDetailsMap = new Map(Object.entries(userDetails))
|
||||
return function (req, res, next) {
|
||||
const credentials = basicAuth(req)
|
||||
if (
|
||||
!credentials ||
|
||||
!checkCredentials(userDetailsMap, credentials.name, credentials.pass)
|
||||
) {
|
||||
send401WithChallenge(res)
|
||||
Metrics.inc('security.http-auth', 1, { status: 'reject' })
|
||||
} else {
|
||||
Metrics.inc('security.http-auth', 1, { status: 'accept' })
|
||||
next()
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
requirePrivateApiAuth() {
|
||||
return AuthenticationController.requireBasicAuth(Settings.httpAuthUsers)
|
||||
},
|
||||
|
||||
setAuditInfo(req, info) {
|
||||
if (!req.__authAuditInfo) {
|
||||
req.__authAuditInfo = {}
|
||||
}
|
||||
Object.assign(req.__authAuditInfo, info)
|
||||
},
|
||||
|
||||
getAuditInfo(req) {
|
||||
return req.__authAuditInfo || {}
|
||||
},
|
||||
|
||||
setRedirectInSession(req, value) {
|
||||
if (value == null) {
|
||||
value =
|
||||
Object.keys(req.query).length > 0
|
||||
? `${req.path}?${querystring.stringify(req.query)}`
|
||||
: `${req.path}`
|
||||
}
|
||||
if (
|
||||
req.session != null &&
|
||||
!/^\/(socket.io|js|stylesheets|img)\/.*$/.test(value) &&
|
||||
!/^.*\.(png|jpeg|svg)$/.test(value)
|
||||
) {
|
||||
const safePath = UrlHelper.getSafeRedirectPath(value)
|
||||
return (req.session.postLoginRedirect = safePath)
|
||||
}
|
||||
},
|
||||
|
||||
_redirectToLoginOrRegisterPage(req, res) {
|
||||
if (
|
||||
req.query.zipUrl != null ||
|
||||
req.session.sharedProjectData ||
|
||||
req.path === '/user/subscription/new'
|
||||
) {
|
||||
AuthenticationController._redirectToRegisterPage(req, res)
|
||||
} else {
|
||||
AuthenticationController._redirectToLoginPage(req, res)
|
||||
}
|
||||
},
|
||||
|
||||
_redirectToLoginPage(req, res) {
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user not logged in so redirecting to login page'
|
||||
)
|
||||
AuthenticationController.setRedirectInSession(req)
|
||||
const url = `/login?${querystring.stringify(req.query)}`
|
||||
res.redirect(url)
|
||||
Metrics.inc('security.login-redirect')
|
||||
},
|
||||
|
||||
_redirectToReconfirmPage(req, res, user) {
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user needs to reconfirm so redirecting to reconfirm page'
|
||||
)
|
||||
req.session.reconfirm_email = user != null ? user.email : undefined
|
||||
const redir = '/user/reconfirm'
|
||||
AsyncFormHelper.redirect(req, res, redir)
|
||||
},
|
||||
|
||||
_redirectToRegisterPage(req, res) {
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user not logged in so redirecting to register page'
|
||||
)
|
||||
AuthenticationController.setRedirectInSession(req)
|
||||
const url = `/register?${querystring.stringify(req.query)}`
|
||||
res.redirect(url)
|
||||
Metrics.inc('security.login-redirect')
|
||||
},
|
||||
|
||||
_recordSuccessfulLogin(userId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
UserUpdater.updateUser(
|
||||
userId.toString(),
|
||||
{
|
||||
$set: { lastLoggedIn: new Date() },
|
||||
$inc: { loginCount: 1 },
|
||||
},
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
callback(error)
|
||||
}
|
||||
Metrics.inc('user.login.success')
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_recordFailedLogin(callback) {
|
||||
Metrics.inc('user.login.failed')
|
||||
if (callback) callback()
|
||||
},
|
||||
|
||||
getRedirectFromSession(req) {
|
||||
let safePath
|
||||
const value = _.get(req, ['session', 'postLoginRedirect'])
|
||||
if (value) {
|
||||
safePath = UrlHelper.getSafeRedirectPath(value)
|
||||
}
|
||||
return safePath || null
|
||||
},
|
||||
|
||||
_clearRedirectFromSession(req) {
|
||||
if (req.session != null) {
|
||||
delete req.session.postLoginRedirect
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
function _afterLoginSessionSetup(req, user, callback) {
|
||||
req.login(user, { keepSessionInfo: true }, function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error from req.login', {
|
||||
user_id: user._id,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
delete req.session.__tmp
|
||||
delete req.session.csrfSecret
|
||||
req.session.save(function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error saving regenerated session after login', {
|
||||
user_id: user._id,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
UserSessionsManager.trackSession(user, req.sessionID, function () {})
|
||||
if (!req.deviceHistory) {
|
||||
// Captcha disabled or SSO-based login.
|
||||
return callback()
|
||||
}
|
||||
req.deviceHistory.add(user.email)
|
||||
req.deviceHistory
|
||||
.serialize(req.res)
|
||||
.catch(err => {
|
||||
logger.err({ err }, 'cannot serialize deviceHistory')
|
||||
})
|
||||
.finally(() => callback())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const _afterLoginSessionSetupAsync = promisify(_afterLoginSessionSetup)
|
||||
|
||||
function _loginAsyncHandlers(req, user, anonymousAnalyticsId, isNewUser) {
|
||||
UserHandler.populateTeamInvites(user, err => {
|
||||
if (err != null) {
|
||||
logger.warn({ err }, 'error setting up login data')
|
||||
}
|
||||
})
|
||||
LoginRateLimiter.recordSuccessfulLogin(user.email, () => {})
|
||||
AuthenticationController._recordSuccessfulLogin(user._id, () => {})
|
||||
AuthenticationController.ipMatchCheck(req, user)
|
||||
Analytics.recordEventForUserInBackground(user._id, 'user-logged-in', {
|
||||
source: req.session.saml
|
||||
? 'saml'
|
||||
: req.user_info?.auth_provider || 'email-password',
|
||||
})
|
||||
Analytics.identifyUser(user._id, anonymousAnalyticsId, isNewUser)
|
||||
|
||||
logger.debug(
|
||||
{ email: user.email, userId: user._id.toString() },
|
||||
'successful log in'
|
||||
)
|
||||
|
||||
req.session.justLoggedIn = true
|
||||
// capture the request ip for use when creating the session
|
||||
return (user._login_req_ip = req.ip)
|
||||
}
|
||||
|
||||
AuthenticationController.promises = {
|
||||
finishLogin: AuthenticationController._finishLoginAsync,
|
||||
}
|
||||
|
||||
module.exports = AuthenticationController
|
||||
@@ -0,0 +1,58 @@
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Errors = require('../Errors/Errors')
|
||||
|
||||
class InvalidEmailError extends Errors.BackwardCompatibleError {}
|
||||
class InvalidPasswordError extends Errors.BackwardCompatibleError {}
|
||||
class ParallelLoginError extends Errors.BackwardCompatibleError {}
|
||||
class PasswordMustBeDifferentError extends Errors.BackwardCompatibleError {}
|
||||
class PasswordReusedError extends Errors.BackwardCompatibleError {}
|
||||
|
||||
function handleAuthenticateErrors(error, req) {
|
||||
if (error.message === 'password is too long') {
|
||||
Metrics.inc('login_failure_reason', 1, {
|
||||
status: 'password_is_too_long',
|
||||
})
|
||||
return {
|
||||
status: 422,
|
||||
type: 'error',
|
||||
key: 'password-too-long',
|
||||
text: req.i18n.translate('password_too_long_please_reset'),
|
||||
}
|
||||
}
|
||||
if (error instanceof ParallelLoginError) {
|
||||
Metrics.inc('login_failure_reason', 1, { status: 'parallel_login' })
|
||||
return { status: 429 }
|
||||
}
|
||||
if (error instanceof PasswordReusedError) {
|
||||
Metrics.inc('login_failure_reason', 1, {
|
||||
status: 'password_compromised',
|
||||
})
|
||||
const text = `${req.i18n
|
||||
.translate('password_compromised_try_again_or_use_known_device_or_reset')
|
||||
.replace('<0>', '')
|
||||
.replace('</0>', ' (https://haveibeenpwned.com/passwords)')
|
||||
.replace('<1>', '')
|
||||
.replace('</1>', ` (${Settings.siteUrl}/user/password/reset)`)}.`
|
||||
return {
|
||||
status: 400,
|
||||
type: 'error',
|
||||
key: 'password-compromised',
|
||||
text,
|
||||
}
|
||||
}
|
||||
Metrics.inc('login_failure_reason', 1, {
|
||||
status: error instanceof OError ? error.name : 'error',
|
||||
})
|
||||
throw error
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
InvalidEmailError,
|
||||
InvalidPasswordError,
|
||||
ParallelLoginError,
|
||||
PasswordMustBeDifferentError,
|
||||
PasswordReusedError,
|
||||
handleAuthenticateErrors,
|
||||
}
|
||||
@@ -0,0 +1,477 @@
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { User } = require('../../models/User')
|
||||
const { db, ObjectId } = require('../../infrastructure/mongodb')
|
||||
const bcrypt = require('bcrypt')
|
||||
const EmailHelper = require('../Helpers/EmailHelper')
|
||||
const {
|
||||
InvalidEmailError,
|
||||
InvalidPasswordError,
|
||||
ParallelLoginError,
|
||||
PasswordMustBeDifferentError,
|
||||
PasswordReusedError,
|
||||
} = require('./AuthenticationErrors')
|
||||
const {
|
||||
callbackify,
|
||||
callbackifyMultiResult,
|
||||
} = require('@overleaf/promise-utils')
|
||||
const HaveIBeenPwned = require('./HaveIBeenPwned')
|
||||
const UserAuditLogHandler = require('../User/UserAuditLogHandler')
|
||||
const logger = require('@overleaf/logger')
|
||||
const DiffHelper = require('../Helpers/DiffHelper')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
|
||||
const BCRYPT_ROUNDS = Settings.security.bcryptRounds || 12
|
||||
const BCRYPT_MINOR_VERSION = Settings.security.bcryptMinorVersion || 'a'
|
||||
const MAX_SIMILARITY = 0.7
|
||||
|
||||
function _exceedsMaximumLengthRatio(password, maxSimilarity, value) {
|
||||
const passwordLength = password.length
|
||||
const lengthBoundSimilarity = (maxSimilarity / 2) * passwordLength
|
||||
const valueLength = value.length
|
||||
return (
|
||||
passwordLength >= 10 * valueLength && valueLength < lengthBoundSimilarity
|
||||
)
|
||||
}
|
||||
|
||||
const _checkWriteResult = function (result) {
|
||||
// for MongoDB
|
||||
return !!(result && result.modifiedCount === 1)
|
||||
}
|
||||
|
||||
function _validatePasswordNotTooLong(password) {
|
||||
// bcrypt has a hard limit of 72 characters.
|
||||
if (password.length > 72) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password is too long',
|
||||
info: { code: 'too_long' },
|
||||
})
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function _metricsForSuccessfulPasswordMatch(password) {
|
||||
const validationResult = AuthenticationManager.validatePassword(password)
|
||||
const status =
|
||||
validationResult === null ? 'success' : validationResult?.info?.code
|
||||
Metrics.inc('check-password', { status })
|
||||
return null
|
||||
}
|
||||
|
||||
const AuthenticationManager = {
|
||||
async _checkUserPassword(query, password) {
|
||||
// Using Mongoose for legacy reasons here. The returned User instance
|
||||
// gets serialized into the session and there may be subtle differences
|
||||
// between the user returned by Mongoose vs mongodb (such as default values)
|
||||
const user = await User.findOne(query).exec()
|
||||
|
||||
if (!user || !user.hashedPassword) {
|
||||
return { user: null, match: null }
|
||||
}
|
||||
|
||||
let rounds = 0
|
||||
try {
|
||||
rounds = bcrypt.getRounds(user.hashedPassword)
|
||||
} catch (err) {
|
||||
let prefix, suffix, length
|
||||
if (typeof user.hashedPassword === 'string') {
|
||||
length = user.hashedPassword.length
|
||||
if (user.hashedPassword.length > 50) {
|
||||
// A full bcrypt hash is 60 characters long.
|
||||
prefix = user.hashedPassword.slice(0, '$2a$12$x'.length)
|
||||
suffix = user.hashedPassword.slice(-4)
|
||||
} else if (user.hashedPassword.length > 20) {
|
||||
prefix = user.hashedPassword.slice(0, 4)
|
||||
suffix = user.hashedPassword.slice(-4)
|
||||
} else {
|
||||
prefix = user.hashedPassword.slice(0, 4)
|
||||
}
|
||||
}
|
||||
logger.warn(
|
||||
{
|
||||
err,
|
||||
userId: user._id,
|
||||
hashedPassword: {
|
||||
type: typeof user.hashedPassword,
|
||||
length,
|
||||
prefix,
|
||||
suffix,
|
||||
},
|
||||
},
|
||||
'unexpected user.hashedPassword value'
|
||||
)
|
||||
}
|
||||
Metrics.inc('bcrypt', 1, {
|
||||
method: 'compare',
|
||||
path: rounds,
|
||||
})
|
||||
|
||||
const match = await bcrypt.compare(password, user.hashedPassword)
|
||||
|
||||
if (match) {
|
||||
_metricsForSuccessfulPasswordMatch(password)
|
||||
}
|
||||
|
||||
return { user, match }
|
||||
},
|
||||
|
||||
async authenticate(query, password, auditLog, { enforceHIBPCheck = true }) {
|
||||
const { user, match } = await AuthenticationManager._checkUserPassword(
|
||||
query,
|
||||
password
|
||||
)
|
||||
|
||||
if (!user) {
|
||||
return { user: null }
|
||||
}
|
||||
|
||||
const update = { $inc: { loginEpoch: 1 } }
|
||||
if (!match) {
|
||||
update.$set = { lastFailedLogin: new Date() }
|
||||
}
|
||||
|
||||
const result = await User.updateOne(
|
||||
{ _id: user._id, loginEpoch: user.loginEpoch },
|
||||
update,
|
||||
{}
|
||||
).exec()
|
||||
|
||||
if (result.modifiedCount !== 1) {
|
||||
throw new ParallelLoginError()
|
||||
}
|
||||
|
||||
if (!match) {
|
||||
if (!auditLog) {
|
||||
return { user: null }
|
||||
} else {
|
||||
try {
|
||||
await UserAuditLogHandler.promises.addEntry(
|
||||
user._id,
|
||||
'failed-password-match',
|
||||
user._id,
|
||||
auditLog.ipAddress,
|
||||
auditLog.info
|
||||
)
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
{ userId: user._id, err, info: auditLog.info },
|
||||
'Error while adding AuditLog entry for failed-password-match'
|
||||
)
|
||||
}
|
||||
return { user: null }
|
||||
}
|
||||
}
|
||||
await AuthenticationManager.checkRounds(user, user.hashedPassword, password)
|
||||
|
||||
let isPasswordReused
|
||||
try {
|
||||
isPasswordReused =
|
||||
await HaveIBeenPwned.promises.checkPasswordForReuse(password)
|
||||
} catch (err) {
|
||||
logger.err({ err }, 'cannot check password for re-use')
|
||||
}
|
||||
|
||||
if (isPasswordReused && enforceHIBPCheck) {
|
||||
throw new PasswordReusedError()
|
||||
}
|
||||
|
||||
return { user, isPasswordReused }
|
||||
},
|
||||
|
||||
validateEmail(email) {
|
||||
const parsed = EmailHelper.parseEmail(email)
|
||||
if (!parsed) {
|
||||
return new InvalidEmailError({ message: 'email not valid' })
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
// validates a password based on a similar set of rules previously used by `passfield.js` on the frontend
|
||||
// note that `passfield.js` enforced more rules than this, but these are the most commonly set.
|
||||
// returns null on success, or an error object.
|
||||
validatePassword(password, email) {
|
||||
if (password == null) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password not set',
|
||||
info: { code: 'not_set' },
|
||||
})
|
||||
}
|
||||
|
||||
Metrics.inc('try-validate-password')
|
||||
|
||||
let allowAnyChars, min, max
|
||||
if (Settings.passwordStrengthOptions) {
|
||||
allowAnyChars = Settings.passwordStrengthOptions.allowAnyChars === true
|
||||
if (Settings.passwordStrengthOptions.length) {
|
||||
min = Settings.passwordStrengthOptions.length.min
|
||||
max = Settings.passwordStrengthOptions.length.max
|
||||
}
|
||||
}
|
||||
allowAnyChars = !!allowAnyChars
|
||||
min = min || 8
|
||||
max = max || 72
|
||||
|
||||
// we don't support passwords > 72 characters in length, because bcrypt truncates them
|
||||
if (max > 72) {
|
||||
max = 72
|
||||
}
|
||||
|
||||
if (password.length < min) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password is too short',
|
||||
info: { code: 'too_short' },
|
||||
})
|
||||
}
|
||||
if (password.length > max) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password is too long',
|
||||
info: { code: 'too_long' },
|
||||
})
|
||||
}
|
||||
const passwordLengthError = _validatePasswordNotTooLong(password)
|
||||
if (passwordLengthError) {
|
||||
return passwordLengthError
|
||||
}
|
||||
if (
|
||||
!allowAnyChars &&
|
||||
!AuthenticationManager._passwordCharactersAreValid(password)
|
||||
) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password contains an invalid character',
|
||||
info: { code: 'invalid_character' },
|
||||
})
|
||||
}
|
||||
if (typeof email === 'string' && email !== '') {
|
||||
const startOfEmail = email.split('@')[0]
|
||||
if (
|
||||
password.includes(email) ||
|
||||
password.includes(startOfEmail) ||
|
||||
email.includes(password)
|
||||
) {
|
||||
return new InvalidPasswordError({
|
||||
message: 'password contains part of email address',
|
||||
info: { code: 'contains_email' },
|
||||
})
|
||||
}
|
||||
try {
|
||||
const passwordTooSimilarError =
|
||||
AuthenticationManager._validatePasswordNotTooSimilar(password, email)
|
||||
if (passwordTooSimilarError) {
|
||||
Metrics.inc('password-too-similar-to-email')
|
||||
return new InvalidPasswordError({
|
||||
message: 'password is too similar to email address',
|
||||
info: { code: 'too_similar' },
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ error },
|
||||
'error while checking password similarity to email'
|
||||
)
|
||||
}
|
||||
// TODO: remove this check once the password-too-similar checks are active?
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
async setUserPassword(user, password) {
|
||||
return await AuthenticationManager.setUserPasswordInV2(user, password)
|
||||
},
|
||||
|
||||
async checkRounds(user, hashedPassword, password) {
|
||||
// Temporarily disable this function, TODO: re-enable this
|
||||
if (Settings.security.disableBcryptRoundsUpgrades) {
|
||||
Metrics.inc('bcrypt_check_rounds', 1, { status: 'disabled' })
|
||||
return
|
||||
}
|
||||
// check current number of rounds and rehash if necessary
|
||||
const currentRounds = bcrypt.getRounds(hashedPassword)
|
||||
if (currentRounds < BCRYPT_ROUNDS) {
|
||||
Metrics.inc('bcrypt_check_rounds', 1, { status: 'upgrade' })
|
||||
return await AuthenticationManager._setUserPasswordInMongo(user, password)
|
||||
} else {
|
||||
Metrics.inc('bcrypt_check_rounds', 1, { status: 'success' })
|
||||
}
|
||||
},
|
||||
|
||||
async hashPassword(password) {
|
||||
// Double-check the size to avoid truncating in bcrypt.
|
||||
const error = _validatePasswordNotTooLong(password)
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
|
||||
const salt = await bcrypt.genSalt(BCRYPT_ROUNDS, BCRYPT_MINOR_VERSION)
|
||||
|
||||
Metrics.inc('bcrypt', 1, {
|
||||
method: 'hash',
|
||||
path: BCRYPT_ROUNDS,
|
||||
})
|
||||
return await bcrypt.hash(password, salt)
|
||||
},
|
||||
|
||||
async setUserPasswordInV2(user, password) {
|
||||
if (!user || !user.email || !user._id) {
|
||||
throw new Error('invalid user object')
|
||||
}
|
||||
const validationError = this.validatePassword(password, user.email)
|
||||
if (validationError) {
|
||||
throw validationError
|
||||
}
|
||||
// check if we can log in with this password. In which case we should reject it,
|
||||
// because it is the same as the existing password.
|
||||
const { match } = await AuthenticationManager._checkUserPassword(
|
||||
{ _id: user._id },
|
||||
password
|
||||
)
|
||||
|
||||
if (match) {
|
||||
throw new PasswordMustBeDifferentError()
|
||||
}
|
||||
|
||||
let isPasswordReused
|
||||
try {
|
||||
isPasswordReused =
|
||||
await HaveIBeenPwned.promises.checkPasswordForReuse(password)
|
||||
} catch (error) {
|
||||
logger.err({ error }, 'cannot check password for re-use')
|
||||
}
|
||||
|
||||
if (isPasswordReused) {
|
||||
throw new PasswordReusedError()
|
||||
}
|
||||
|
||||
// password is strong enough or the validation with the service did not happen
|
||||
return await this._setUserPasswordInMongo(user, password)
|
||||
},
|
||||
|
||||
async _setUserPasswordInMongo(user, password) {
|
||||
const hash = await this.hashPassword(password)
|
||||
const result = await db.users.updateOne(
|
||||
{ _id: new ObjectId(user._id.toString()) },
|
||||
{
|
||||
$set: {
|
||||
hashedPassword: hash,
|
||||
},
|
||||
$unset: {
|
||||
password: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return _checkWriteResult(result)
|
||||
},
|
||||
|
||||
_passwordCharactersAreValid(password) {
|
||||
let digits, letters, lettersUp, symbols
|
||||
if (
|
||||
Settings.passwordStrengthOptions &&
|
||||
Settings.passwordStrengthOptions.chars
|
||||
) {
|
||||
digits = Settings.passwordStrengthOptions.chars.digits
|
||||
letters = Settings.passwordStrengthOptions.chars.letters
|
||||
lettersUp = Settings.passwordStrengthOptions.chars.letters_up
|
||||
symbols = Settings.passwordStrengthOptions.chars.symbols
|
||||
}
|
||||
digits = digits || '1234567890'
|
||||
letters = letters || 'abcdefghijklmnopqrstuvwxyz'
|
||||
lettersUp = lettersUp || 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
symbols = symbols || '@#$%^&*()-_=+[]{};:<>/?!£€.,'
|
||||
|
||||
for (let charIndex = 0; charIndex <= password.length - 1; charIndex++) {
|
||||
if (
|
||||
digits.indexOf(password[charIndex]) === -1 &&
|
||||
letters.indexOf(password[charIndex]) === -1 &&
|
||||
lettersUp.indexOf(password[charIndex]) === -1 &&
|
||||
symbols.indexOf(password[charIndex]) === -1
|
||||
) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if the password is similar to (parts of) the email address.
|
||||
* For now, this merely sends a metric when the password and
|
||||
* email address are deemed to be too similar to each other.
|
||||
* Later we will reject passwords that fail this check.
|
||||
*
|
||||
* This logic was borrowed from the django project:
|
||||
* https://github.com/django/django/blob/fa3afc5d86f1f040922cca2029d6a34301597a70/django/contrib/auth/password_validation.py#L159-L214
|
||||
*/
|
||||
_validatePasswordNotTooSimilar(password, email) {
|
||||
password = password.toLowerCase()
|
||||
email = email.toLowerCase()
|
||||
const stringsToCheck = [email]
|
||||
.concat(email.split(/\W+/))
|
||||
.concat(email.split(/@/))
|
||||
for (const emailPart of stringsToCheck) {
|
||||
if (!_exceedsMaximumLengthRatio(password, MAX_SIMILARITY, emailPart)) {
|
||||
const similarity = DiffHelper.stringSimilarity(password, emailPart)
|
||||
if (similarity > MAX_SIMILARITY) {
|
||||
return new Error('password is too similar to email')
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
getMessageForInvalidPasswordError(error, req) {
|
||||
const errorCode = error?.info?.code
|
||||
const message = {
|
||||
type: 'error',
|
||||
}
|
||||
switch (errorCode) {
|
||||
case 'not_set':
|
||||
message.key = 'password-not-set'
|
||||
message.text = req.i18n.translate('invalid_password_not_set')
|
||||
break
|
||||
case 'invalid_character':
|
||||
message.key = 'password-invalid-character'
|
||||
message.text = req.i18n.translate('invalid_password_invalid_character')
|
||||
break
|
||||
case 'contains_email':
|
||||
message.key = 'password-contains-email'
|
||||
message.text = req.i18n.translate('invalid_password_contains_email')
|
||||
break
|
||||
case 'too_similar':
|
||||
message.key = 'password-too-similar'
|
||||
message.text = req.i18n.translate('invalid_password_too_similar')
|
||||
break
|
||||
case 'too_short':
|
||||
message.key = 'password-too-short'
|
||||
message.text = req.i18n.translate('invalid_password_too_short', {
|
||||
minLength: Settings.passwordStrengthOptions?.length?.min || 8,
|
||||
})
|
||||
break
|
||||
case 'too_long':
|
||||
message.key = 'password-too-long'
|
||||
message.text = req.i18n.translate('invalid_password_too_long', {
|
||||
maxLength: Settings.passwordStrengthOptions?.length?.max || 72,
|
||||
})
|
||||
break
|
||||
default:
|
||||
logger.error({ err: error }, 'Unknown password validation error code')
|
||||
message.text = req.i18n.translate('invalid_password')
|
||||
break
|
||||
}
|
||||
return message
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
_validatePasswordNotTooSimilar:
|
||||
AuthenticationManager._validatePasswordNotTooSimilar, // Private function exported for tests
|
||||
validateEmail: AuthenticationManager.validateEmail,
|
||||
validatePassword: AuthenticationManager.validatePassword,
|
||||
getMessageForInvalidPasswordError:
|
||||
AuthenticationManager.getMessageForInvalidPasswordError,
|
||||
authenticate: callbackifyMultiResult(AuthenticationManager.authenticate, [
|
||||
'user',
|
||||
'isPasswordReused',
|
||||
]),
|
||||
setUserPassword: callbackify(AuthenticationManager.setUserPassword),
|
||||
checkRounds: callbackify(AuthenticationManager.checkRounds),
|
||||
hashPassword: callbackify(AuthenticationManager.hashPassword),
|
||||
setUserPasswordInV2: callbackify(AuthenticationManager.setUserPasswordInV2),
|
||||
promises: AuthenticationManager,
|
||||
}
|
||||
127
services/web/app/src/Features/Authentication/HaveIBeenPwned.js
Normal file
127
services/web/app/src/Features/Authentication/HaveIBeenPwned.js
Normal file
@@ -0,0 +1,127 @@
|
||||
/*
|
||||
This module is operating on raw user passwords. Be very defensive.
|
||||
Pay special attention when passing the password or even a hash/prefix around.
|
||||
We need to ensure that no parts of it get logged or returned on either the
|
||||
happy path or via an error (message or attributes).
|
||||
*/
|
||||
|
||||
const { callbackify } = require('util')
|
||||
const { fetchString } = require('@overleaf/fetch-utils')
|
||||
const crypto = require('crypto')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const logger = require('@overleaf/logger')
|
||||
|
||||
const HEX_CHARS_UPPER = '1234567890ABCDEF'
|
||||
const API_ERROR = new Error('cannot contact HaveIBeenPwned api')
|
||||
const INVALID_PREFIX = new Error(
|
||||
'This is not a valid hex prefix. Rejecting to pass it to HaveIBeenPwned'
|
||||
)
|
||||
const INVALID_RESPONSE = new Error('cannot consume HaveIBeenPwned api response')
|
||||
const INVALID_SCORE = new Error(
|
||||
'non integer score returned by HaveIBeenPwned api'
|
||||
)
|
||||
const CODED_ERROR_MESSAGES = [
|
||||
API_ERROR,
|
||||
INVALID_PREFIX,
|
||||
INVALID_RESPONSE,
|
||||
INVALID_SCORE,
|
||||
].map(err => err.message)
|
||||
|
||||
async function getScoresForPrefix(prefix) {
|
||||
if (
|
||||
typeof prefix !== 'string' ||
|
||||
prefix.length !== 5 ||
|
||||
Array.from(prefix).some(c => !HEX_CHARS_UPPER.includes(c))
|
||||
) {
|
||||
// Make sure we do not pass arbitrary objects to the api.
|
||||
throw INVALID_PREFIX
|
||||
}
|
||||
try {
|
||||
return await fetchString(
|
||||
`${Settings.apis.haveIBeenPwned.url}/range/${prefix}`,
|
||||
{
|
||||
headers: {
|
||||
'User-Agent': 'www.overleaf.com',
|
||||
// Docs: https://haveibeenpwned.com/API/v3#PwnedPasswordsPadding
|
||||
'Add-Padding': true,
|
||||
},
|
||||
signal: AbortSignal.timeout(Settings.apis.haveIBeenPwned.timeout),
|
||||
}
|
||||
)
|
||||
} catch (_errorWithPotentialReferenceToPrefix) {
|
||||
// NOTE: Do not leak request details by passing the original error up.
|
||||
throw API_ERROR
|
||||
}
|
||||
}
|
||||
|
||||
async function isPasswordReused(password) {
|
||||
const sha1 = crypto
|
||||
.createHash('sha1')
|
||||
.update(password)
|
||||
.digest('hex')
|
||||
.toUpperCase()
|
||||
const prefix = sha1.slice(0, 5)
|
||||
const body = await getScoresForPrefix(prefix)
|
||||
|
||||
let score = 0
|
||||
try {
|
||||
for (const line of body.split('\r\n')) {
|
||||
const [candidate, scoreRaw] = line.split(':')
|
||||
if (prefix + candidate === sha1) {
|
||||
score = parseInt(scoreRaw)
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch (_errorWithPotentialReferenceToHash) {
|
||||
// NOTE: Do not leak password details by logging the original error.
|
||||
throw INVALID_RESPONSE
|
||||
}
|
||||
|
||||
if (Number.isNaN(score)) {
|
||||
// NOTE: Do not leak password details by logging the score.
|
||||
throw INVALID_SCORE
|
||||
}
|
||||
return score > 0
|
||||
}
|
||||
|
||||
async function checkPasswordForReuse(password) {
|
||||
if (!Settings.apis.haveIBeenPwned.enabled) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const isReused = await isPasswordReused(password)
|
||||
|
||||
Metrics.inc('password_re_use', {
|
||||
status: isReused ? 're-used' : 'unique',
|
||||
})
|
||||
|
||||
return isReused
|
||||
} catch (err) {
|
||||
let error = err
|
||||
// Make sure we do not leak any password details.
|
||||
if (!CODED_ERROR_MESSAGES.includes(err.message)) {
|
||||
error = new Error('hidden message')
|
||||
}
|
||||
error = new Error(error.message)
|
||||
|
||||
Metrics.inc('password_re_use', { status: 'failure' })
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
function checkPasswordForReuseInBackground(password) {
|
||||
checkPasswordForReuse(password).catch(error => {
|
||||
logger.err({ error }, 'cannot check password for re-use')
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkPasswordForReuse: callbackify(checkPasswordForReuse),
|
||||
checkPasswordForReuseInBackground,
|
||||
promises: {
|
||||
checkPasswordForReuse,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
const _ = require('lodash')
|
||||
|
||||
const SessionManager = {
|
||||
getSessionUser(session) {
|
||||
const sessionUser = _.get(session, ['user'])
|
||||
const sessionPassportUser = _.get(session, ['passport', 'user'])
|
||||
return sessionUser || sessionPassportUser || null
|
||||
},
|
||||
|
||||
setInSessionUser(session, props) {
|
||||
const sessionUser = SessionManager.getSessionUser(session)
|
||||
if (!sessionUser) {
|
||||
return
|
||||
}
|
||||
for (const key in props) {
|
||||
const value = props[key]
|
||||
sessionUser[key] = value
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
isUserLoggedIn(session) {
|
||||
const userId = SessionManager.getLoggedInUserId(session)
|
||||
return ![null, undefined, false].includes(userId)
|
||||
},
|
||||
|
||||
getLoggedInUserId(session) {
|
||||
const user = SessionManager.getSessionUser(session)
|
||||
if (user) {
|
||||
return user._id
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
},
|
||||
|
||||
getLoggedInUserV1Id(session) {
|
||||
const user = SessionManager.getSessionUser(session)
|
||||
if (user != null && user.v1_id != null) {
|
||||
return user.v1_id
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = SessionManager
|
||||
@@ -0,0 +1,315 @@
|
||||
const { callbackify } = require('util')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter')
|
||||
const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const { User } = require('../../models/User')
|
||||
const PrivilegeLevels = require('./PrivilegeLevels')
|
||||
const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
|
||||
const PublicAccessLevels = require('./PublicAccessLevels')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||
|
||||
function isRestrictedUser(
|
||||
userId,
|
||||
privilegeLevel,
|
||||
isTokenMember,
|
||||
isInvitedMember
|
||||
) {
|
||||
if (privilegeLevel === PrivilegeLevels.NONE) {
|
||||
return true
|
||||
}
|
||||
return (
|
||||
privilegeLevel === PrivilegeLevels.READ_ONLY &&
|
||||
(isTokenMember || !userId) &&
|
||||
!isInvitedMember
|
||||
)
|
||||
}
|
||||
|
||||
async function isRestrictedUserForProject(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
const isInvitedMember =
|
||||
await CollaboratorsGetter.promises.isUserInvitedMemberOfProject(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
return isRestrictedUser(
|
||||
userId,
|
||||
privilegeLevel,
|
||||
isTokenMember,
|
||||
isInvitedMember
|
||||
)
|
||||
}
|
||||
|
||||
async function getPublicAccessLevel(projectId) {
|
||||
if (!ObjectId.isValid(projectId)) {
|
||||
throw new Error('invalid project id')
|
||||
}
|
||||
|
||||
// Note, the Project property in the DB is `publicAccesLevel`, without the second `s`
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
publicAccesLevel: 1,
|
||||
})
|
||||
if (!project) {
|
||||
throw new Errors.NotFoundError(`no project found with id ${projectId}`)
|
||||
}
|
||||
return project.publicAccesLevel
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the privilege level that the user has for the project.
|
||||
*
|
||||
* @param userId - The id of the user that wants to access the project.
|
||||
* @param projectId - The id of the project to be accessed.
|
||||
* @param {string} token
|
||||
* @param {Object} opts
|
||||
* @param {boolean} opts.ignoreSiteAdmin - Do not consider whether the user is
|
||||
* a site admin.
|
||||
* @param {boolean} opts.ignorePublicAccess - Do not consider the project is
|
||||
* publicly accessible.
|
||||
*
|
||||
* @returns {string|boolean} The privilege level. One of "owner",
|
||||
* "readAndWrite", "readOnly" or false.
|
||||
*/
|
||||
async function getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token,
|
||||
opts = {}
|
||||
) {
|
||||
if (userId) {
|
||||
return getPrivilegeLevelForProjectWithUser(userId, projectId, opts)
|
||||
} else {
|
||||
return getPrivilegeLevelForProjectWithoutUser(projectId, token, opts)
|
||||
}
|
||||
}
|
||||
|
||||
// User is present, get their privilege level from database
|
||||
async function getPrivilegeLevelForProjectWithUser(
|
||||
userId,
|
||||
projectId,
|
||||
opts = {}
|
||||
) {
|
||||
if (!opts.ignoreSiteAdmin) {
|
||||
if (await isUserSiteAdmin(userId)) {
|
||||
return PrivilegeLevels.OWNER
|
||||
}
|
||||
}
|
||||
|
||||
const privilegeLevel =
|
||||
await CollaboratorsGetter.promises.getMemberIdPrivilegeLevel(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
if (privilegeLevel && privilegeLevel !== PrivilegeLevels.NONE) {
|
||||
// The user has direct access
|
||||
return privilegeLevel
|
||||
}
|
||||
|
||||
if (!opts.ignorePublicAccess) {
|
||||
// Legacy public-access system
|
||||
// User is present (not anonymous), but does not have direct access
|
||||
const publicAccessLevel = await getPublicAccessLevel(projectId)
|
||||
if (publicAccessLevel === PublicAccessLevels.READ_ONLY) {
|
||||
return PrivilegeLevels.READ_ONLY
|
||||
}
|
||||
if (publicAccessLevel === PublicAccessLevels.READ_AND_WRITE) {
|
||||
return PrivilegeLevels.READ_AND_WRITE
|
||||
}
|
||||
}
|
||||
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
|
||||
// User is Anonymous, Try Token-based access
|
||||
async function getPrivilegeLevelForProjectWithoutUser(
|
||||
projectId,
|
||||
token,
|
||||
opts = {}
|
||||
) {
|
||||
const publicAccessLevel = await getPublicAccessLevel(projectId)
|
||||
if (!opts.ignorePublicAccess) {
|
||||
if (publicAccessLevel === PublicAccessLevels.READ_ONLY) {
|
||||
// Legacy public read-only access for anonymous user
|
||||
return PrivilegeLevels.READ_ONLY
|
||||
}
|
||||
if (publicAccessLevel === PublicAccessLevels.READ_AND_WRITE) {
|
||||
// Legacy public read-write access for anonymous user
|
||||
return PrivilegeLevels.READ_AND_WRITE
|
||||
}
|
||||
}
|
||||
if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||
return getPrivilegeLevelForProjectWithToken(projectId, token)
|
||||
}
|
||||
|
||||
// Deny anonymous user access
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
|
||||
async function getPrivilegeLevelForProjectWithToken(projectId, token) {
|
||||
// Anonymous users can have read-only access to token-based projects,
|
||||
// while read-write access must be logged in,
|
||||
// unless the `enableAnonymousReadAndWriteSharing` setting is enabled
|
||||
const { isValidReadAndWrite, isValidReadOnly } =
|
||||
await TokenAccessHandler.promises.validateTokenForAnonymousAccess(
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (isValidReadOnly) {
|
||||
// Grant anonymous user read-only access
|
||||
return PrivilegeLevels.READ_ONLY
|
||||
}
|
||||
if (isValidReadAndWrite) {
|
||||
// Grant anonymous user read-and-write access
|
||||
return PrivilegeLevels.READ_AND_WRITE
|
||||
}
|
||||
// Deny anonymous access
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
|
||||
async function canUserReadProject(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return [
|
||||
PrivilegeLevels.OWNER,
|
||||
PrivilegeLevels.READ_AND_WRITE,
|
||||
PrivilegeLevels.READ_ONLY,
|
||||
PrivilegeLevels.REVIEW,
|
||||
].includes(privilegeLevel)
|
||||
}
|
||||
|
||||
async function canUserWriteProjectContent(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return [PrivilegeLevels.OWNER, PrivilegeLevels.READ_AND_WRITE].includes(
|
||||
privilegeLevel
|
||||
)
|
||||
}
|
||||
|
||||
async function canUserWriteOrReviewProjectContent(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return (
|
||||
privilegeLevel === PrivilegeLevels.OWNER ||
|
||||
privilegeLevel === PrivilegeLevels.READ_AND_WRITE ||
|
||||
privilegeLevel === PrivilegeLevels.REVIEW
|
||||
)
|
||||
}
|
||||
|
||||
async function canUserWriteProjectSettings(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token,
|
||||
{ ignorePublicAccess: true }
|
||||
)
|
||||
return [PrivilegeLevels.OWNER, PrivilegeLevels.READ_AND_WRITE].includes(
|
||||
privilegeLevel
|
||||
)
|
||||
}
|
||||
|
||||
async function canUserRenameProject(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return privilegeLevel === PrivilegeLevels.OWNER
|
||||
}
|
||||
|
||||
async function canUserAdminProject(userId, projectId, token) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
return privilegeLevel === PrivilegeLevels.OWNER
|
||||
}
|
||||
|
||||
async function isUserSiteAdmin(userId) {
|
||||
if (!userId) {
|
||||
return false
|
||||
}
|
||||
if (!Settings.adminPrivilegeAvailable) return false
|
||||
const user = await User.findOne({ _id: userId }, { isAdmin: 1 }).exec()
|
||||
return hasAdminAccess(user)
|
||||
}
|
||||
|
||||
async function canUserDeleteOrResolveThread(
|
||||
userId,
|
||||
projectId,
|
||||
docId,
|
||||
threadId,
|
||||
token
|
||||
) {
|
||||
const privilegeLevel = await getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token,
|
||||
{ ignorePublicAccess: true }
|
||||
)
|
||||
if (
|
||||
privilegeLevel === PrivilegeLevels.OWNER ||
|
||||
privilegeLevel === PrivilegeLevels.READ_AND_WRITE
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (privilegeLevel !== PrivilegeLevels.REVIEW) {
|
||||
return false
|
||||
}
|
||||
|
||||
const comment = await DocumentUpdaterHandler.promises.getComment(
|
||||
projectId,
|
||||
docId,
|
||||
threadId
|
||||
)
|
||||
return comment.metadata.user_id === userId
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
canUserReadProject: callbackify(canUserReadProject),
|
||||
canUserWriteProjectContent: callbackify(canUserWriteProjectContent),
|
||||
canUserWriteOrReviewProjectContent: callbackify(
|
||||
canUserWriteOrReviewProjectContent
|
||||
),
|
||||
canUserDeleteOrResolveThread: callbackify(canUserDeleteOrResolveThread),
|
||||
canUserWriteProjectSettings: callbackify(canUserWriteProjectSettings),
|
||||
canUserRenameProject: callbackify(canUserRenameProject),
|
||||
canUserAdminProject: callbackify(canUserAdminProject),
|
||||
getPrivilegeLevelForProject: callbackify(getPrivilegeLevelForProject),
|
||||
isRestrictedUser,
|
||||
isRestrictedUserForProject: callbackify(isRestrictedUserForProject),
|
||||
isUserSiteAdmin: callbackify(isUserSiteAdmin),
|
||||
promises: {
|
||||
canUserReadProject,
|
||||
canUserWriteProjectContent,
|
||||
canUserWriteOrReviewProjectContent,
|
||||
canUserDeleteOrResolveThread,
|
||||
canUserWriteProjectSettings,
|
||||
canUserRenameProject,
|
||||
canUserAdminProject,
|
||||
getPrivilegeLevelForProject,
|
||||
isRestrictedUserForProject,
|
||||
isUserSiteAdmin,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,292 @@
|
||||
const AuthorizationManager = require('./AuthorizationManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const HttpErrorHandler = require('../Errors/HttpErrorHandler')
|
||||
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const {
|
||||
canRedirectToAdminDomain,
|
||||
} = require('../Helpers/AdminAuthorizationHelper')
|
||||
const { getSafeAdminDomainRedirect } = require('../Helpers/UrlHelper')
|
||||
|
||||
function _handleAdminDomainRedirect(req, res) {
|
||||
if (canRedirectToAdminDomain(SessionManager.getSessionUser(req.session))) {
|
||||
logger.warn({ req }, 'redirecting admin user to admin domain')
|
||||
res.redirect(getSafeAdminDomainRedirect(req.originalUrl))
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async function ensureUserCanReadMultipleProjects(req, res, next) {
|
||||
const projectIds = (req.query.project_ids || '').split(',')
|
||||
const userId = _getUserId(req)
|
||||
for (const projectId of projectIds) {
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canRead = await AuthorizationManager.promises.canUserReadProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (!canRead) {
|
||||
return _redirectToRestricted(req, res, next)
|
||||
}
|
||||
}
|
||||
next()
|
||||
}
|
||||
|
||||
async function blockRestrictedUserFromProject(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const isRestrictedUser =
|
||||
await AuthorizationManager.promises.isRestrictedUserForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (isRestrictedUser) {
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
next()
|
||||
}
|
||||
|
||||
async function ensureUserCanReadProject(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canRead = await AuthorizationManager.promises.canUserReadProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (canRead) {
|
||||
logger.debug({ userId, projectId }, 'allowing user read access to project')
|
||||
return next()
|
||||
}
|
||||
logger.debug({ userId, projectId }, 'denying user read access to project')
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserCanWriteProjectSettings(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
|
||||
if (req.body.name != null) {
|
||||
const canRename = await AuthorizationManager.promises.canUserRenameProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (!canRename) {
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
}
|
||||
|
||||
const otherParams = Object.keys(req.body).filter(x => x !== 'name')
|
||||
if (otherParams.length > 0) {
|
||||
const canWrite =
|
||||
await AuthorizationManager.promises.canUserWriteProjectSettings(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (!canWrite) {
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
async function ensureUserCanDeleteOrResolveThread(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const docId = _getDocId(req)
|
||||
const threadId = _getThreadId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canDeleteThread =
|
||||
await AuthorizationManager.promises.canUserDeleteOrResolveThread(
|
||||
userId,
|
||||
projectId,
|
||||
docId,
|
||||
threadId,
|
||||
token
|
||||
)
|
||||
if (canDeleteThread) {
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'allowing user to delete or resolve a comment thread'
|
||||
)
|
||||
return next()
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
{ userId, projectId, threadId },
|
||||
'denying user to delete or resolve a comment thread'
|
||||
)
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserCanWriteProjectContent(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canWrite =
|
||||
await AuthorizationManager.promises.canUserWriteProjectContent(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (canWrite) {
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'allowing user write access to project content'
|
||||
)
|
||||
return next()
|
||||
}
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'denying user write access to project settings'
|
||||
)
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserCanWriteOrReviewProjectContent(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
|
||||
const canWriteOrReviewProjectContent =
|
||||
await AuthorizationManager.promises.canUserWriteOrReviewProjectContent(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (canWriteOrReviewProjectContent) {
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'allowing user write or review access to project content'
|
||||
)
|
||||
return next()
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'denying user write or review access to project content'
|
||||
)
|
||||
return HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserCanAdminProject(req, res, next) {
|
||||
const projectId = _getProjectId(req)
|
||||
const userId = _getUserId(req)
|
||||
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
const canAdmin = await AuthorizationManager.promises.canUserAdminProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (canAdmin) {
|
||||
logger.debug({ userId, projectId }, 'allowing user admin access to project')
|
||||
return next()
|
||||
}
|
||||
logger.debug({ userId, projectId }, 'denying user admin access to project')
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserIsSiteAdmin(req, res, next) {
|
||||
const userId = _getUserId(req)
|
||||
if (await AuthorizationManager.promises.isUserSiteAdmin(userId)) {
|
||||
logger.debug({ userId }, 'allowing user admin access to site')
|
||||
return next()
|
||||
}
|
||||
if (_handleAdminDomainRedirect(req, res)) return
|
||||
logger.debug({ userId }, 'denying user admin access to site')
|
||||
_redirectToRestricted(req, res, next)
|
||||
}
|
||||
|
||||
function _getProjectId(req) {
|
||||
const projectId = req.params.project_id || req.params.Project_id
|
||||
if (!projectId) {
|
||||
throw new Error('Expected project_id in request parameters')
|
||||
}
|
||||
if (!ObjectId.isValid(projectId)) {
|
||||
throw new Errors.NotFoundError(`invalid projectId: ${projectId}`)
|
||||
}
|
||||
return projectId
|
||||
}
|
||||
|
||||
function _getDocId(req) {
|
||||
const docId = req.params.doc_id
|
||||
if (!docId) {
|
||||
throw new Error('Expected doc_id in request parameters')
|
||||
}
|
||||
if (!ObjectId.isValid(docId)) {
|
||||
throw new Errors.NotFoundError(`invalid docId: ${docId}`)
|
||||
}
|
||||
return docId
|
||||
}
|
||||
|
||||
function _getThreadId(req) {
|
||||
const threadId = req.params.thread_id
|
||||
if (!threadId) {
|
||||
throw new Error('Expected thread_id in request parameters')
|
||||
}
|
||||
if (!ObjectId.isValid(threadId)) {
|
||||
throw new Errors.NotFoundError(`invalid threadId: ${threadId}`)
|
||||
}
|
||||
return threadId
|
||||
}
|
||||
|
||||
function _getUserId(req) {
|
||||
return (
|
||||
SessionManager.getLoggedInUserId(req.session) ||
|
||||
(req.oauth_user && req.oauth_user._id) ||
|
||||
null
|
||||
)
|
||||
}
|
||||
|
||||
function _redirectToRestricted(req, res, next) {
|
||||
// TODO: move this to throwing ForbiddenError
|
||||
res.redirect(`/restricted?from=${encodeURIComponent(res.locals.currentUrl)}`)
|
||||
}
|
||||
|
||||
function restricted(req, res, next) {
|
||||
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
return res.render('user/restricted', { title: 'restricted' })
|
||||
}
|
||||
const { from } = req.query
|
||||
logger.debug({ from }, 'redirecting to login')
|
||||
if (from) {
|
||||
AuthenticationController.setRedirectInSession(req, from)
|
||||
}
|
||||
res.redirect('/login')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ensureUserCanReadMultipleProjects: expressify(
|
||||
ensureUserCanReadMultipleProjects
|
||||
),
|
||||
blockRestrictedUserFromProject: expressify(blockRestrictedUserFromProject),
|
||||
ensureUserCanReadProject: expressify(ensureUserCanReadProject),
|
||||
ensureUserCanWriteProjectSettings: expressify(
|
||||
ensureUserCanWriteProjectSettings
|
||||
),
|
||||
ensureUserCanDeleteOrResolveThread: expressify(
|
||||
ensureUserCanDeleteOrResolveThread
|
||||
),
|
||||
ensureUserCanWriteProjectContent: expressify(
|
||||
ensureUserCanWriteProjectContent
|
||||
),
|
||||
ensureUserCanWriteOrReviewProjectContent: expressify(
|
||||
ensureUserCanWriteOrReviewProjectContent
|
||||
),
|
||||
ensureUserCanAdminProject: expressify(ensureUserCanAdminProject),
|
||||
ensureUserIsSiteAdmin: expressify(ensureUserIsSiteAdmin),
|
||||
restricted,
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
const { ForbiddenError, UserNotFoundError } = require('../Errors/Errors')
|
||||
const {
|
||||
getUserCapabilities,
|
||||
getUserRestrictions,
|
||||
combineGroupPolicies,
|
||||
combineAllowedProperties,
|
||||
} = require('./PermissionsManager')
|
||||
const { assertUserPermissions } = require('./PermissionsManager').promises
|
||||
const Modules = require('../../infrastructure/Modules')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
|
||||
/**
|
||||
* Function that returns middleware to add an `assertPermission` function to the request object to check if the user has a specific capability.
|
||||
* @returns {Function} The middleware function that adds the `assertPermission` function to the request object.
|
||||
*/
|
||||
function useCapabilities() {
|
||||
const middleware = async function (req, res, next) {
|
||||
// attach the user's capabilities to the request object
|
||||
req.capabilitySet = new Set()
|
||||
// provide a function to assert that a capability is present
|
||||
req.assertPermission = capability => {
|
||||
if (!req.capabilitySet.has(capability)) {
|
||||
throw new ForbiddenError(
|
||||
`user does not have permission for ${capability}`
|
||||
)
|
||||
}
|
||||
}
|
||||
if (!req.user) {
|
||||
return next()
|
||||
}
|
||||
try {
|
||||
let results = await Modules.promises.hooks.fire(
|
||||
'getGroupPolicyForUser',
|
||||
req.user
|
||||
)
|
||||
// merge array of all results from all modules
|
||||
results = results.flat()
|
||||
|
||||
if (results.length > 0) {
|
||||
// get the combined group policy applying to the user
|
||||
const groupPolicies = results.map(result => result.groupPolicy)
|
||||
const combinedGroupPolicy = combineGroupPolicies(groupPolicies)
|
||||
// attach the new capabilities to the request object
|
||||
for (const cap of getUserCapabilities(combinedGroupPolicy)) {
|
||||
req.capabilitySet.add(cap)
|
||||
}
|
||||
// also attach the user's restrictions (the capabilities they don't have)
|
||||
req.userRestrictions = getUserRestrictions(combinedGroupPolicy)
|
||||
|
||||
// attach allowed properties to the request object
|
||||
const allowedProperties = combineAllowedProperties(results)
|
||||
for (const [prop, value] of Object.entries(allowedProperties)) {
|
||||
req[prop] = value
|
||||
}
|
||||
}
|
||||
next()
|
||||
} catch (error) {
|
||||
if (error instanceof UserNotFoundError) {
|
||||
// the user is logged in but doesn't exist in the database
|
||||
// this can happen if the user has just deleted their account
|
||||
return next()
|
||||
} else {
|
||||
next(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
return expressify(middleware)
|
||||
}
|
||||
|
||||
/**
|
||||
* Function that returns middleware to check if the user has permission to access a resource.
|
||||
* @param {[string]} requiredCapabilities - the capabilities required to access the resource.
|
||||
* @returns {Function} The middleware function that checks if the user has the required capabilities.
|
||||
*/
|
||||
function requirePermission(...requiredCapabilities) {
|
||||
if (
|
||||
requiredCapabilities.length === 0 ||
|
||||
requiredCapabilities.some(capability => typeof capability !== 'string')
|
||||
) {
|
||||
throw new Error('invalid required capabilities')
|
||||
}
|
||||
const doRequest = async function (req, res, next) {
|
||||
if (!Features.hasFeature('saas')) {
|
||||
return next()
|
||||
}
|
||||
if (!req.user) {
|
||||
return next(new Error('no user'))
|
||||
}
|
||||
try {
|
||||
await assertUserPermissions(req.user, requiredCapabilities)
|
||||
next()
|
||||
} catch (error) {
|
||||
next(error)
|
||||
}
|
||||
}
|
||||
return doRequest
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
requirePermission,
|
||||
useCapabilities,
|
||||
}
|
||||
@@ -0,0 +1,480 @@
|
||||
/**
|
||||
* This module exports functions for managing permissions and policies.
|
||||
*
|
||||
* It provides a way to:
|
||||
*
|
||||
* - Register capabilities and policies
|
||||
* - Associate policies with custom validators
|
||||
* - Apply collections of policies to a user
|
||||
* - Check whether a user has a given capability
|
||||
* - Check whether a user complies with a given policy
|
||||
*
|
||||
* Capabilities: boolean values that represent whether a user is allowed to
|
||||
* perform a certain action or not. The capabilities are represented as a Set.
|
||||
* For example, to delete their account a user would need the
|
||||
* `delete-own-account` capability. A user starts with a set of default
|
||||
* capabilities that let them do all the things they can currently do in
|
||||
* Overleaf.
|
||||
*
|
||||
* Policy: a rule which specifies which capabilities will be removed from a user
|
||||
* when the policy is applied.
|
||||
*
|
||||
* For example, a policy `userCannotDeleteOwnAccount` is represented as
|
||||
* `{'delete-own-account' : false}` meaning that the `delete-own-account`
|
||||
* capability will be removed. A policy can remove more than one capability, and
|
||||
* more than one policy could apply to a user.
|
||||
*
|
||||
* Validator: a function that takes an object with user and subscription properties
|
||||
* and returns a boolean indicating whether the user satisfies the policy or not.
|
||||
* For example, a validator for the `userCannotHaveSecondaryEmail` policy would
|
||||
* check whether the user has more than one email address.
|
||||
*
|
||||
* Group Policies: a collection of policies with a setting indicating whether
|
||||
* they are enforced or not. Used to place restrictions on managed users in a
|
||||
* group.
|
||||
*
|
||||
* For example, a group policy could be
|
||||
*
|
||||
* {
|
||||
* "userCannotDeleteOwnAccount": true, // enforced
|
||||
* "userCannotHaveSecondaryEmail": false // not enforced
|
||||
* }
|
||||
*/
|
||||
|
||||
const { callbackify } = require('util')
|
||||
const { ForbiddenError } = require('../Errors/Errors')
|
||||
const Modules = require('../../infrastructure/Modules')
|
||||
|
||||
const POLICY_TO_CAPABILITY_MAP = new Map()
|
||||
const POLICY_TO_VALIDATOR_MAP = new Map()
|
||||
const DEFAULT_PERMISSIONS = new Map()
|
||||
const ALLOWED_PROPERTIES = new Set()
|
||||
|
||||
/**
|
||||
* Throws an error if the given capability is not registered.
|
||||
*
|
||||
* @private
|
||||
* @param {string} capability - The name of the capability to check.
|
||||
* @throws {Error} If the capability is not registered.
|
||||
*/
|
||||
function ensureCapabilityExists(capability) {
|
||||
if (!DEFAULT_PERMISSIONS.has(capability)) {
|
||||
throw new Error(`unknown capability: ${capability}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates an group policy object
|
||||
*
|
||||
* @param {Object} policies - An object containing policy names and booleans
|
||||
* as key-value entries.
|
||||
* @throws {Error} if the `policies` object contains a policy that is not
|
||||
* registered, or the policy value is not a boolean
|
||||
*/
|
||||
function validatePolicies(policies) {
|
||||
for (const [policy, value] of Object.entries(policies)) {
|
||||
if (!POLICY_TO_CAPABILITY_MAP.has(policy)) {
|
||||
throw new Error(`unknown policy: ${policy}`)
|
||||
}
|
||||
if (typeof value !== 'boolean') {
|
||||
throw new Error(`policy value must be a boolean: ${policy} = ${value}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a new capability with the given name and options.
|
||||
*
|
||||
* @param {string} name - The name of the capability to register.
|
||||
* @param {Object} options - The options for the capability.
|
||||
* @param {boolean} options.default - The default value for the capability
|
||||
* (required).
|
||||
* @throws {Error} If the default value is not a boolean or if the capability is
|
||||
* already registered.
|
||||
*/
|
||||
function registerCapability(name, options) {
|
||||
// check that the default value is a boolean
|
||||
const defaultValue = options?.default
|
||||
if (typeof defaultValue !== 'boolean') {
|
||||
throw new Error('default value must be a boolean')
|
||||
}
|
||||
if (DEFAULT_PERMISSIONS.has(name)) {
|
||||
throw new Error(`capability already registered: ${name}`)
|
||||
}
|
||||
DEFAULT_PERMISSIONS.set(name, defaultValue)
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a new policy with the given name, capabilities, and options.
|
||||
*
|
||||
* @param {string} name - The name of the policy to register.
|
||||
* @param {Object} capabilities - The capabilities for the policy.
|
||||
* @param {Object} [options] - The options for the policy.
|
||||
* @param {Function?} [options.validator] - The optional validator function for the
|
||||
* policy.
|
||||
* @throws {Error} If the policy is already registered or if a capability is not
|
||||
* a boolean or is unknown.
|
||||
*/
|
||||
function registerPolicy(name, capabilities, options = {}) {
|
||||
const { validator } = options
|
||||
// check that the only options provided are capabilities and validators
|
||||
// FIXME: maybe use a schema validator here?
|
||||
if (POLICY_TO_CAPABILITY_MAP.has(name)) {
|
||||
throw new Error(`policy already registered: ${name}`)
|
||||
}
|
||||
// check that all the entries in the capability set exist and are booleans
|
||||
for (const [capabilityName, capabilityValue] of Object.entries(
|
||||
capabilities
|
||||
)) {
|
||||
// check that the capability exists (look in the default permissions)
|
||||
if (!DEFAULT_PERMISSIONS.has(capabilityName)) {
|
||||
throw new Error(`unknown capability: ${capabilityName}`)
|
||||
}
|
||||
// check that the value is a boolean
|
||||
if (typeof capabilityValue !== 'boolean') {
|
||||
throw new Error(
|
||||
`capability value must be a boolean: ${capabilityName} = ${capabilityValue}`
|
||||
)
|
||||
}
|
||||
}
|
||||
// set the policy capabilities
|
||||
POLICY_TO_CAPABILITY_MAP.set(name, new Map(Object.entries(capabilities)))
|
||||
|
||||
// set the policy validator (if present)
|
||||
if (validator) {
|
||||
POLICY_TO_VALIDATOR_MAP.set(name, validator)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers an allowed property that can be added to the request object.
|
||||
*
|
||||
* @param {string} name - The name of the property to register.
|
||||
* @returns {void}
|
||||
*/
|
||||
function registerAllowedProperty(name) {
|
||||
ALLOWED_PROPERTIES.add(name)
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the set of allowed properties that have been registered
|
||||
*
|
||||
* @returns {Set} ALLOWED_PROPERTIES
|
||||
*/
|
||||
function getAllowedProperties() {
|
||||
return ALLOWED_PROPERTIES
|
||||
}
|
||||
/**
|
||||
* Returns an array of policy names that are enforced based on the provided
|
||||
* group policy object.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} groupPolicy - The group policy object to check.
|
||||
* @returns {Array} An array of policy names that are enforced.
|
||||
*/
|
||||
function getEnforcedPolicyNames(groupPolicy = {}) {
|
||||
if (!groupPolicy) {
|
||||
return []
|
||||
}
|
||||
return Object.keys(
|
||||
typeof groupPolicy.toObject === 'function'
|
||||
? groupPolicy.toObject()
|
||||
: groupPolicy
|
||||
).filter(
|
||||
policyName =>
|
||||
!['__v', '_id'].includes(policyName) && groupPolicy[policyName] !== false
|
||||
) // filter out the policies that are not enforced
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the specified capability for the given policy.
|
||||
*
|
||||
* @private
|
||||
* @param {string} policyName - The name of the policy to retrieve the
|
||||
* capability value from.
|
||||
* @param {string} capability - The name of the capability to retrieve the value
|
||||
* for.
|
||||
* @returns {boolean | undefined} The value of the capability for the policy, or
|
||||
* undefined if the policy or capability is not found.
|
||||
*/
|
||||
function getCapabilityValueFromPolicy(policyName, capability) {
|
||||
return POLICY_TO_CAPABILITY_MAP.get(policyName)?.get(capability)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the default value for the specified capability.
|
||||
*
|
||||
* @private
|
||||
* @param {string} capability - The name of the capability to retrieve the
|
||||
* default value for.
|
||||
* @returns {boolean | undefined} The default value for the capability, or
|
||||
* undefined if the capability is not found.
|
||||
*/
|
||||
function getDefaultPermission(capability) {
|
||||
return DEFAULT_PERMISSIONS.get(capability)
|
||||
}
|
||||
|
||||
function getValidatorFromPolicy(policyName) {
|
||||
return POLICY_TO_VALIDATOR_MAP.get(policyName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of default capabilities based on the DEFAULT_PERMISSIONS map.
|
||||
*
|
||||
* @private
|
||||
* @returns {Set} A set of default capabilities.
|
||||
*/
|
||||
function getDefaultCapabilities() {
|
||||
const defaultCapabilities = new Set()
|
||||
for (const [
|
||||
capabilityName,
|
||||
capabilityValue,
|
||||
] of DEFAULT_PERMISSIONS.entries()) {
|
||||
if (capabilityValue === true) {
|
||||
defaultCapabilities.add(capabilityName)
|
||||
}
|
||||
}
|
||||
return defaultCapabilities
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies a given policy to a set of capabilities, to remove those capabilities
|
||||
* which are not allowed by the policy.
|
||||
*
|
||||
* @private
|
||||
* @param {Set} capabilitySet - The set of capabilities to apply the policy to.
|
||||
* @param {string} policyName - The name of the policy to apply.
|
||||
* @throws {Error} If the policy is unknown.
|
||||
*/
|
||||
function applyPolicy(capabilitySet, policyName) {
|
||||
const policyCapabilities = POLICY_TO_CAPABILITY_MAP.get(policyName)
|
||||
if (!policyCapabilities) {
|
||||
throw new Error(`unknown policy: ${policyName}`)
|
||||
}
|
||||
for (const [
|
||||
capabilityName,
|
||||
capabilityValue,
|
||||
] of policyCapabilities.entries()) {
|
||||
if (capabilityValue !== true) {
|
||||
capabilitySet.delete(capabilityName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of capabilities that a user has based on their group policy.
|
||||
*
|
||||
* @param {Object} groupPolicy - The group policy object to check.
|
||||
* @returns {Set} A set of capabilities that the user has, based on their group
|
||||
* policy.
|
||||
* @throws {Error} If the policy is unknown.
|
||||
*/
|
||||
function getUserCapabilities(groupPolicy) {
|
||||
const userCapabilities = getDefaultCapabilities()
|
||||
const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy)
|
||||
for (const enforcedPolicyName of enforcedPolicyNames) {
|
||||
applyPolicy(userCapabilities, enforcedPolicyName)
|
||||
}
|
||||
return userCapabilities
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines an array of group policies into a single policy object.
|
||||
*
|
||||
* @param {Array} groupPolicies - An array of group policies.
|
||||
* @returns {Object} - The combined group policy object.
|
||||
*/
|
||||
function combineGroupPolicies(groupPolicies) {
|
||||
const combinedGroupPolicy = {}
|
||||
for (const groupPolicy of groupPolicies) {
|
||||
const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy)
|
||||
for (const enforcedPolicyName of enforcedPolicyNames) {
|
||||
combinedGroupPolicy[enforcedPolicyName] = true
|
||||
}
|
||||
}
|
||||
return combinedGroupPolicy
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines the allowed properties from an array of property objects.
|
||||
*
|
||||
* @param {Array<Object>} propertyObjects - An array of property objects.
|
||||
* @returns {Object} - An object containing the combined allowed properties.
|
||||
*/
|
||||
function combineAllowedProperties(propertyObjects) {
|
||||
const userProperties = {}
|
||||
for (const properties of propertyObjects) {
|
||||
for (const [key, value] of Object.entries(properties)) {
|
||||
if (ALLOWED_PROPERTIES.has(key)) {
|
||||
userProperties[key] ??= value
|
||||
}
|
||||
}
|
||||
}
|
||||
return userProperties
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of capabilities that a user does not have based on their group policy.
|
||||
*
|
||||
* @param {Object} groupPolicy - The group policy object to check.
|
||||
* @returns {Set} A set of capabilities that the user does not have, based on their group
|
||||
* policy.
|
||||
* @throws {Error} If the policy is unknown.
|
||||
*/
|
||||
function getUserRestrictions(groupPolicy) {
|
||||
const userCapabilities = getUserCapabilities(groupPolicy)
|
||||
const userRestrictions = getDefaultCapabilities()
|
||||
for (const capability of userCapabilities) {
|
||||
userRestrictions.delete(capability)
|
||||
}
|
||||
return userRestrictions
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a user has permission for a given capability based on their group
|
||||
* policy.
|
||||
*
|
||||
* @param {Object} groupPolicy - The group policy object for the user.
|
||||
* @param {string} capability - The name of the capability to check permission
|
||||
* for.
|
||||
* @returns {boolean} True if the user has permission for the capability, false
|
||||
* otherwise.
|
||||
* @throws {Error} If the capability does not exist.
|
||||
*/
|
||||
function hasPermission(groupPolicy, capability) {
|
||||
ensureCapabilityExists(capability)
|
||||
// look through all the entries in the group policy and see if any of them apply to the capability
|
||||
const results = getEnforcedPolicyNames(groupPolicy).map(userPolicyName =>
|
||||
getCapabilityValueFromPolicy(userPolicyName, capability)
|
||||
)
|
||||
// if there are no results, or none of the policies apply, return the default permission
|
||||
if (results.length === 0 || results.every(result => result === undefined)) {
|
||||
return getDefaultPermission(capability)
|
||||
}
|
||||
// only allow the permission if all the results are true, otherwise deny it
|
||||
return results.every(result => result === true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously checks which policies a user complies with using the
|
||||
* applicable validators. Each validator is an async function that takes an object
|
||||
* with user, groupPolicy, and subscription properties and returns a boolean.
|
||||
*
|
||||
* @param {Object} options - The options object.
|
||||
* @param {Object} options.user - The user object to check.
|
||||
* @param {Object} options.groupPolicy - The group policy object to check.
|
||||
* @param {Object} options.subscription - The subscription object for the group policy.
|
||||
* @returns {Promise<Map>} A promise that resolves with a Map object containing
|
||||
* the validation status for each enforced policy. The keys of the Map are the
|
||||
* enforced policy names, and the values are booleans indicating whether the
|
||||
* user complies with the policy.
|
||||
*/
|
||||
async function getUserValidationStatus({ user, groupPolicy, subscription }) {
|
||||
// find all the enforced policies for the user
|
||||
const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy)
|
||||
// for each enforced policy, we have a list of capabilities with expected values
|
||||
// some of those capabilities have validators
|
||||
// we need to run the validators and the result to see if if the user is complies with the policy
|
||||
const userValidationStatus = new Map()
|
||||
for (const enforcedPolicyName of enforcedPolicyNames) {
|
||||
const validator = getValidatorFromPolicy(enforcedPolicyName)
|
||||
if (validator) {
|
||||
userValidationStatus.set(
|
||||
enforcedPolicyName,
|
||||
await validator({ user, subscription })
|
||||
)
|
||||
}
|
||||
}
|
||||
return userValidationStatus
|
||||
}
|
||||
|
||||
/**
|
||||
* asserts that a user has permission for a given set of capabilities
|
||||
* as set out in both their current group subscription, and any institutions they are affiliated with,
|
||||
* throwing an ForbiddenError if they do not
|
||||
*
|
||||
* @param {Object} user - The user object to retrieve the group policy for.
|
||||
* Only the user's _id is required
|
||||
* @param {Array} capabilities - The list of the capabilities to check permission for.
|
||||
* @returns {Promise<void>}
|
||||
* @throws {Error} If the user does not have permission
|
||||
*/
|
||||
async function assertUserPermissions(user, requiredCapabilities) {
|
||||
const hasAllPermissions = await checkUserPermissions(
|
||||
user,
|
||||
requiredCapabilities
|
||||
)
|
||||
if (!hasAllPermissions) {
|
||||
throw new ForbiddenError(
|
||||
`user does not have one or more permissions within ${requiredCapabilities}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a user has permission for a given set of capabilities
|
||||
* as set out in both their current group subscription, and any institutions they are affiliated with
|
||||
*
|
||||
* @param {Object} user - The user object to retrieve the group policy for.
|
||||
* Only the user's _id is required
|
||||
* @param {Array} capabilities - The list of the capabilities to check permission for.
|
||||
* @returns {Promise<Boolean>} - true if the user has all permissions, false if not
|
||||
*/
|
||||
async function checkUserPermissions(user, requiredCapabilities) {
|
||||
let results = await Modules.promises.hooks.fire('getGroupPolicyForUser', user)
|
||||
results = results.flat()
|
||||
if (!results?.length) return true
|
||||
|
||||
// get the combined group policy applying to the user
|
||||
const groupPolicies = results.map(result => result.groupPolicy)
|
||||
const combinedGroupPolicy = combineGroupPolicies(groupPolicies)
|
||||
for (const requiredCapability of requiredCapabilities) {
|
||||
// if the user has the permission, continue
|
||||
if (!hasPermission(combinedGroupPolicy, requiredCapability)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* checks if all collaborators of a given project have the specified capability, including the owner
|
||||
*
|
||||
* @async
|
||||
* @function checkUserListPermissions
|
||||
* @param {Object[]} userList - An array of all user to check permissions for
|
||||
* @param {Array} capabilities - The list of the capabilities to check permission for.
|
||||
* @returns {Promise<boolean>} - A promise that resolves to `true` if all collaborators have the specified capability, otherwise `false`.
|
||||
*/
|
||||
async function checkUserListPermissions(userList, capabilities) {
|
||||
for (const user of userList) {
|
||||
// mimic a user object with only id, since we need it to fetch permissions
|
||||
const allowed = await checkUserPermissions(user, capabilities)
|
||||
if (!allowed) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validatePolicies,
|
||||
registerCapability,
|
||||
registerPolicy,
|
||||
registerAllowedProperty,
|
||||
combineGroupPolicies,
|
||||
combineAllowedProperties,
|
||||
getAllowedProperties,
|
||||
hasPermission,
|
||||
getUserCapabilities,
|
||||
getUserRestrictions,
|
||||
getUserValidationStatus: callbackify(getUserValidationStatus),
|
||||
checkCollaboratorsPermission: callbackify(checkUserListPermissions),
|
||||
checkUserPermissions: callbackify(checkUserPermissions),
|
||||
promises: {
|
||||
assertUserPermissions,
|
||||
getUserValidationStatus,
|
||||
checkUserListPermissions,
|
||||
checkUserPermissions,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
const PrivilegeLevels = {
|
||||
NONE: false,
|
||||
READ_ONLY: 'readOnly',
|
||||
READ_AND_WRITE: 'readAndWrite',
|
||||
REVIEW: 'review',
|
||||
OWNER: 'owner',
|
||||
}
|
||||
|
||||
module.exports = PrivilegeLevels
|
||||
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Note:
|
||||
* It used to be that `project.publicAccessLevel` could be set to `private`,
|
||||
* `readOnly` or `readAndWrite`, the latter of which made the project publicly
|
||||
* accessible.
|
||||
*
|
||||
* This system was replaced with "link sharing", therafter the valid values are
|
||||
* `private` or `tokenBased`. While it is no longer possible to set
|
||||
* `publicAccessLevel` to the legacy values, there are projects in the system
|
||||
* that already have those values set.
|
||||
*/
|
||||
module.exports = {
|
||||
READ_ONLY: 'readOnly', // LEGACY
|
||||
READ_AND_WRITE: 'readAndWrite', // LEGACY
|
||||
PRIVATE: 'private',
|
||||
TOKEN_BASED: 'tokenBased',
|
||||
}
|
||||
5
services/web/app/src/Features/Authorization/Sources.js
Normal file
5
services/web/app/src/Features/Authorization/Sources.js
Normal file
@@ -0,0 +1,5 @@
|
||||
module.exports = {
|
||||
INVITE: 'invite',
|
||||
TOKEN: 'token',
|
||||
OWNER: 'owner',
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
import BetaProgramHandler from './BetaProgramHandler.mjs'
|
||||
import OError from '@overleaf/o-error'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import SplitTestSessionHandler from '../SplitTests/SplitTestSessionHandler.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
async function optIn(req, res) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await BetaProgramHandler.promises.optIn(userId)
|
||||
try {
|
||||
await SplitTestSessionHandler.promises.sessionMaintenance(req, null)
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error },
|
||||
'Failed to perform session maintenance after beta program opt in'
|
||||
)
|
||||
}
|
||||
res.redirect('/beta/participate')
|
||||
}
|
||||
|
||||
async function optOut(req, res) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await BetaProgramHandler.promises.optOut(userId)
|
||||
try {
|
||||
await SplitTestSessionHandler.promises.sessionMaintenance(req, null)
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error },
|
||||
'Failed to perform session maintenance after beta program opt out'
|
||||
)
|
||||
}
|
||||
res.redirect('/beta/participate')
|
||||
}
|
||||
|
||||
async function optInPage(req, res) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
let user
|
||||
try {
|
||||
user = await UserGetter.promises.getUser(userId, { betaProgram: 1 })
|
||||
} catch (error) {
|
||||
throw OError.tag(error, 'error fetching user', {
|
||||
userId,
|
||||
})
|
||||
}
|
||||
res.render('beta_program/opt_in', {
|
||||
title: 'sharelatex_beta_program',
|
||||
user,
|
||||
})
|
||||
}
|
||||
|
||||
export default {
|
||||
optIn: expressify(optIn),
|
||||
optOut: expressify(optOut),
|
||||
optInPage: expressify(optInPage),
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
import { callbackify } from 'node:util'
|
||||
import metrics from '@overleaf/metrics'
|
||||
import UserUpdater from '../User/UserUpdater.js'
|
||||
import AnalyticsManager from '../Analytics/AnalyticsManager.js'
|
||||
|
||||
async function optIn(userId) {
|
||||
await UserUpdater.promises.updateUser(userId, { $set: { betaProgram: true } })
|
||||
metrics.inc('beta-program.opt-in')
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
'beta-program',
|
||||
true
|
||||
)
|
||||
}
|
||||
|
||||
async function optOut(userId) {
|
||||
await UserUpdater.promises.updateUser(userId, {
|
||||
$set: { betaProgram: false },
|
||||
})
|
||||
metrics.inc('beta-program.opt-out')
|
||||
AnalyticsManager.setUserPropertyForUserInBackground(
|
||||
userId,
|
||||
'beta-program',
|
||||
false
|
||||
)
|
||||
}
|
||||
|
||||
export default {
|
||||
optIn: callbackify(optIn),
|
||||
optOut: callbackify(optOut),
|
||||
promises: {
|
||||
optIn,
|
||||
optOut,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { URL } = require('url')
|
||||
const settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const V1Api = require('../V1/V1Api')
|
||||
const sanitizeHtml = require('sanitize-html')
|
||||
const { promisify } = require('@overleaf/promise-utils')
|
||||
|
||||
module.exports = {
|
||||
getBrandVariationById,
|
||||
promises: {
|
||||
getBrandVariationById: promisify(getBrandVariationById),
|
||||
},
|
||||
}
|
||||
|
||||
function getBrandVariationById(brandVariationId, callback) {
|
||||
if (brandVariationId == null || brandVariationId === '') {
|
||||
return callback(new Error('Branding variation id not provided'))
|
||||
}
|
||||
logger.debug({ brandVariationId }, 'fetching brand variation details from v1')
|
||||
V1Api.request(
|
||||
{
|
||||
uri: `/api/v2/brand_variations/${brandVariationId}`,
|
||||
},
|
||||
function (error, response, brandVariationDetails) {
|
||||
if (error != null) {
|
||||
OError.tag(error, 'error getting brand variation details', {
|
||||
brandVariationId,
|
||||
})
|
||||
return callback(error)
|
||||
}
|
||||
formatBrandVariationDetails(brandVariationDetails)
|
||||
sanitizeBrandVariationDetails(brandVariationDetails)
|
||||
callback(null, brandVariationDetails)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function formatBrandVariationDetails(details) {
|
||||
if (details.export_url != null) {
|
||||
details.export_url = setV1AsHostIfRelativeURL(details.export_url)
|
||||
}
|
||||
if (details.home_url != null) {
|
||||
details.home_url = setV1AsHostIfRelativeURL(details.home_url)
|
||||
}
|
||||
if (details.logo_url != null) {
|
||||
details.logo_url = setV1AsHostIfRelativeURL(details.logo_url)
|
||||
}
|
||||
if (details.journal_guidelines_url != null) {
|
||||
details.journal_guidelines_url = setV1AsHostIfRelativeURL(
|
||||
details.journal_guidelines_url
|
||||
)
|
||||
}
|
||||
if (details.journal_cover_url != null) {
|
||||
details.journal_cover_url = setV1AsHostIfRelativeURL(
|
||||
details.journal_cover_url
|
||||
)
|
||||
}
|
||||
if (details.submission_confirmation_page_logo_url != null) {
|
||||
details.submission_confirmation_page_logo_url = setV1AsHostIfRelativeURL(
|
||||
details.submission_confirmation_page_logo_url
|
||||
)
|
||||
}
|
||||
if (details.publish_menu_icon != null) {
|
||||
details.publish_menu_icon = setV1AsHostIfRelativeURL(
|
||||
details.publish_menu_icon
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function sanitizeBrandVariationDetails(details) {
|
||||
if (details.submit_button_html) {
|
||||
details.submit_button_html = sanitizeHtml(
|
||||
details.submit_button_html,
|
||||
settings.modules.sanitize.options
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function setV1AsHostIfRelativeURL(urlString) {
|
||||
// The first argument is the base URL to resolve against if the second argument is not absolute.
|
||||
// As it only applies if the second argument is not absolute, we can use it to transform relative URLs into
|
||||
// absolute ones using v1 as the host. If the URL is absolute (e.g. a filepicker one), then the base
|
||||
// argument is just ignored
|
||||
return new URL(urlString, settings.apis.v1.publicUrl).href
|
||||
}
|
||||
119
services/web/app/src/Features/Captcha/CaptchaMiddleware.js
Normal file
119
services/web/app/src/Features/Captcha/CaptchaMiddleware.js
Normal file
@@ -0,0 +1,119 @@
|
||||
const { fetchJson } = require('@overleaf/fetch-utils')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const DeviceHistory = require('./DeviceHistory')
|
||||
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const EmailsHelper = require('../Helpers/EmailHelper')
|
||||
|
||||
function respondInvalidCaptcha(req, res) {
|
||||
res.status(400).json({
|
||||
errorReason: 'cannot_verify_user_not_robot',
|
||||
message: {
|
||||
text: req.i18n.translate('cannot_verify_user_not_robot'),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function initializeDeviceHistory(req) {
|
||||
req.deviceHistory = new DeviceHistory()
|
||||
try {
|
||||
await req.deviceHistory.parse(req)
|
||||
} catch (err) {
|
||||
logger.err({ err }, 'cannot parse deviceHistory')
|
||||
}
|
||||
}
|
||||
|
||||
async function canSkipCaptcha(req, res) {
|
||||
const trustedUser =
|
||||
req.body?.email && Settings.recaptcha.trustedUsers.includes(req.body.email)
|
||||
if (trustedUser) {
|
||||
return res.json(true)
|
||||
}
|
||||
await initializeDeviceHistory(req)
|
||||
const canSkip = req.deviceHistory.has(req.body?.email)
|
||||
Metrics.inc('captcha_pre_flight', 1, {
|
||||
status: canSkip ? 'skipped' : 'missing',
|
||||
})
|
||||
res.json(canSkip)
|
||||
}
|
||||
|
||||
function validateCaptcha(action) {
|
||||
return expressify(async function (req, res, next) {
|
||||
const email = EmailsHelper.parseEmail(req.body?.email)
|
||||
const trustedUser =
|
||||
email &&
|
||||
(Settings.recaptcha.trustedUsers.includes(email) ||
|
||||
Settings.recaptcha.trustedUsersRegex?.test(email))
|
||||
if (!Settings.recaptcha?.siteKey || Settings.recaptcha.disabled[action]) {
|
||||
if (action === 'login') {
|
||||
AuthenticationController.setAuditInfo(req, { captcha: 'disabled' })
|
||||
}
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'disabled' })
|
||||
return next()
|
||||
}
|
||||
if (trustedUser) {
|
||||
if (action === 'login') {
|
||||
AuthenticationController.setAuditInfo(req, { captcha: 'trusted' })
|
||||
}
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'trusted' })
|
||||
return next()
|
||||
}
|
||||
const reCaptchaResponse = req.body['g-recaptcha-response']
|
||||
if (action === 'login') {
|
||||
await initializeDeviceHistory(req)
|
||||
const fromKnownDevice = req.deviceHistory.has(email)
|
||||
AuthenticationController.setAuditInfo(req, { fromKnownDevice })
|
||||
if (!reCaptchaResponse && fromKnownDevice) {
|
||||
// The user has previously logged in from this device, which required
|
||||
// solving a captcha or keeping the device history alive.
|
||||
// We can skip checking the (missing) captcha response.
|
||||
AuthenticationController.setAuditInfo(req, { captcha: 'skipped' })
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'skipped' })
|
||||
return next()
|
||||
}
|
||||
}
|
||||
if (!reCaptchaResponse) {
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'missing' })
|
||||
return respondInvalidCaptcha(req, res)
|
||||
}
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchJson(Settings.recaptcha.endpoint, {
|
||||
method: 'POST',
|
||||
body: new URLSearchParams([
|
||||
['secret', Settings.recaptcha.secretKey],
|
||||
['response', reCaptchaResponse],
|
||||
]),
|
||||
})
|
||||
} catch (err) {
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'error' })
|
||||
throw OError.tag(err, 'failed recaptcha siteverify request', {
|
||||
body: err.body,
|
||||
})
|
||||
}
|
||||
|
||||
if (!body.success) {
|
||||
logger.warn(
|
||||
{ statusCode: 200, body },
|
||||
'failed recaptcha siteverify request'
|
||||
)
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'failed' })
|
||||
return respondInvalidCaptcha(req, res)
|
||||
}
|
||||
Metrics.inc('captcha', 1, { path: action, status: 'solved' })
|
||||
if (action === 'login') {
|
||||
AuthenticationController.setAuditInfo(req, { captcha: 'solved' })
|
||||
}
|
||||
next()
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
respondInvalidCaptcha,
|
||||
validateCaptcha,
|
||||
canSkipCaptcha: expressify(canSkipCaptcha),
|
||||
}
|
||||
103
services/web/app/src/Features/Captcha/DeviceHistory.js
Normal file
103
services/web/app/src/Features/Captcha/DeviceHistory.js
Normal file
@@ -0,0 +1,103 @@
|
||||
const crypto = require('crypto')
|
||||
const jose = require('jose')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
const COOKIE_NAME = Settings.deviceHistory.cookieName
|
||||
const ENTRY_EXPIRY = Settings.deviceHistory.entryExpiry
|
||||
const MAX_ENTRIES = Settings.deviceHistory.maxEntries
|
||||
|
||||
let SECRET
|
||||
if (Settings.deviceHistory.secret) {
|
||||
SECRET = crypto.createSecretKey(
|
||||
Buffer.from(Settings.deviceHistory.secret, 'hex')
|
||||
)
|
||||
}
|
||||
const CONTENT_ENCRYPTION_ALGORITHM = 'A256GCM'
|
||||
const KEY_MANAGEMENT_ALGORITHM = 'A256GCMKW'
|
||||
const ENCRYPTION_HEADER = {
|
||||
alg: KEY_MANAGEMENT_ALGORITHM,
|
||||
enc: CONTENT_ENCRYPTION_ALGORITHM,
|
||||
}
|
||||
const DECRYPTION_OPTIONS = {
|
||||
contentEncryptionAlgorithms: [CONTENT_ENCRYPTION_ALGORITHM],
|
||||
keyManagementAlgorithms: [KEY_MANAGEMENT_ALGORITHM],
|
||||
}
|
||||
|
||||
const ENCODER = new TextEncoder()
|
||||
const DECODER = new TextDecoder()
|
||||
|
||||
class DeviceHistory {
|
||||
constructor() {
|
||||
this.entries = []
|
||||
}
|
||||
|
||||
has(email) {
|
||||
return this.entries.some(entry => entry.e === email)
|
||||
}
|
||||
|
||||
add(email) {
|
||||
// Entries are sorted by age, starting from oldest (idx 0) to newest.
|
||||
// When parsing/serializing we are looking at the last n=MAX_ENTRIES entries
|
||||
// from the list and discard any other stale entries.
|
||||
this.entries = this.entries.filter(entry => entry.e !== email)
|
||||
this.entries.push({ e: email, t: Date.now() })
|
||||
}
|
||||
|
||||
async serialize(res) {
|
||||
let v = ''
|
||||
if (this.entries.length > 0 && SECRET) {
|
||||
v = await new jose.CompactEncrypt(
|
||||
ENCODER.encode(JSON.stringify(this.entries.slice(-MAX_ENTRIES)))
|
||||
)
|
||||
.setProtectedHeader(ENCRYPTION_HEADER)
|
||||
.encrypt(SECRET)
|
||||
}
|
||||
|
||||
const options = {
|
||||
domain: Settings.cookieDomain,
|
||||
maxAge: ENTRY_EXPIRY,
|
||||
secure: Settings.secureCookie,
|
||||
sameSite: Settings.sameSiteCookie,
|
||||
httpOnly: true,
|
||||
path: '/login',
|
||||
}
|
||||
if (v) {
|
||||
res.cookie(COOKIE_NAME, v, options)
|
||||
} else {
|
||||
options.maxAge = -1
|
||||
res.clearCookie(COOKIE_NAME, options)
|
||||
}
|
||||
}
|
||||
|
||||
async parse(req) {
|
||||
const blob = req.cookies[COOKIE_NAME]
|
||||
if (!blob || !SECRET) {
|
||||
Metrics.inc('device_history', 1, { status: 'missing' })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const { plaintext } = await jose.compactDecrypt(
|
||||
blob,
|
||||
SECRET,
|
||||
DECRYPTION_OPTIONS
|
||||
)
|
||||
const minTimestamp = Date.now() - ENTRY_EXPIRY
|
||||
this.entries = JSON.parse(DECODER.decode(plaintext))
|
||||
.slice(-MAX_ENTRIES)
|
||||
.filter(entry => entry.t > minTimestamp)
|
||||
} catch (err) {
|
||||
Metrics.inc('device_history', 1, { status: 'failure' })
|
||||
throw err
|
||||
}
|
||||
if (this.entries.length === MAX_ENTRIES) {
|
||||
// Track hitting the limit, we might need to increase the limit.
|
||||
Metrics.inc('device_history_at_limit')
|
||||
}
|
||||
// Collect quantiles of the size
|
||||
Metrics.summary('device_history_size', this.entries.length)
|
||||
Metrics.inc('device_history', 1, { status: 'success' })
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DeviceHistory
|
||||
166
services/web/app/src/Features/Chat/ChatApiHandler.js
Normal file
166
services/web/app/src/Features/Chat/ChatApiHandler.js
Normal file
@@ -0,0 +1,166 @@
|
||||
// @ts-check
|
||||
|
||||
const { fetchJson, fetchNothing } = require('@overleaf/fetch-utils')
|
||||
const settings = require('@overleaf/settings')
|
||||
const { callbackify } = require('util')
|
||||
|
||||
async function getThreads(projectId) {
|
||||
return await fetchJson(chatApiUrl(`/project/${projectId}/threads`))
|
||||
}
|
||||
|
||||
async function destroyProject(projectId) {
|
||||
await fetchNothing(chatApiUrl(`/project/${projectId}`), { method: 'DELETE' })
|
||||
}
|
||||
|
||||
async function sendGlobalMessage(projectId, userId, content) {
|
||||
const message = await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/messages`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { user_id: userId, content },
|
||||
}
|
||||
)
|
||||
return message
|
||||
}
|
||||
|
||||
async function getGlobalMessages(projectId, limit, before) {
|
||||
const url = chatApiUrl(`/project/${projectId}/messages`)
|
||||
if (limit != null) {
|
||||
url.searchParams.set('limit', limit)
|
||||
}
|
||||
if (before != null) {
|
||||
url.searchParams.set('before', before)
|
||||
}
|
||||
|
||||
return await fetchJson(url)
|
||||
}
|
||||
|
||||
async function sendComment(projectId, threadId, userId, content) {
|
||||
const comment = await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/thread/${threadId}/messages`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { user_id: userId, content },
|
||||
}
|
||||
)
|
||||
return comment
|
||||
}
|
||||
|
||||
async function resolveThread(projectId, threadId, userId) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(`/project/${projectId}/thread/${threadId}/resolve`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { user_id: userId },
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function reopenThread(projectId, threadId) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(`/project/${projectId}/thread/${threadId}/reopen`),
|
||||
{ method: 'POST' }
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteThread(projectId, threadId) {
|
||||
await fetchNothing(chatApiUrl(`/project/${projectId}/thread/${threadId}`), {
|
||||
method: 'DELETE',
|
||||
})
|
||||
}
|
||||
|
||||
async function editMessage(projectId, threadId, messageId, userId, content) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(
|
||||
`/project/${projectId}/thread/${threadId}/messages/${messageId}/edit`
|
||||
),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { content, userId },
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteMessage(projectId, threadId, messageId) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(
|
||||
`/project/${projectId}/thread/${threadId}/messages/${messageId}`
|
||||
),
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteUserMessage(projectId, threadId, userId, messageId) {
|
||||
await fetchNothing(
|
||||
chatApiUrl(
|
||||
`/project/${projectId}/thread/${threadId}/user/${userId}/messages/${messageId}`
|
||||
),
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
}
|
||||
|
||||
async function getResolvedThreadIds(projectId) {
|
||||
const body = await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/resolved-thread-ids`)
|
||||
)
|
||||
return body.resolvedThreadIds
|
||||
}
|
||||
|
||||
async function duplicateCommentThreads(projectId, threads) {
|
||||
return await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/duplicate-comment-threads`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: {
|
||||
threads,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function generateThreadData(projectId, threads) {
|
||||
return await fetchJson(
|
||||
chatApiUrl(`/project/${projectId}/generate-thread-data`),
|
||||
{
|
||||
method: 'POST',
|
||||
json: { threads },
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function chatApiUrl(path) {
|
||||
return new URL(path, settings.apis.chat.internal_url)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getThreads: callbackify(getThreads),
|
||||
destroyProject: callbackify(destroyProject),
|
||||
sendGlobalMessage: callbackify(sendGlobalMessage),
|
||||
getGlobalMessages: callbackify(getGlobalMessages),
|
||||
sendComment: callbackify(sendComment),
|
||||
resolveThread: callbackify(resolveThread),
|
||||
reopenThread: callbackify(reopenThread),
|
||||
deleteThread: callbackify(deleteThread),
|
||||
editMessage: callbackify(editMessage),
|
||||
deleteMessage: callbackify(deleteMessage),
|
||||
deleteUserMessage: callbackify(deleteUserMessage),
|
||||
getResolvedThreadIds: callbackify(getResolvedThreadIds),
|
||||
duplicateCommentThreads: callbackify(duplicateCommentThreads),
|
||||
generateThreadData: callbackify(generateThreadData),
|
||||
promises: {
|
||||
getThreads,
|
||||
destroyProject,
|
||||
sendGlobalMessage,
|
||||
getGlobalMessages,
|
||||
sendComment,
|
||||
resolveThread,
|
||||
reopenThread,
|
||||
deleteThread,
|
||||
editMessage,
|
||||
deleteMessage,
|
||||
deleteUserMessage,
|
||||
getResolvedThreadIds,
|
||||
duplicateCommentThreads,
|
||||
generateThreadData,
|
||||
},
|
||||
}
|
||||
84
services/web/app/src/Features/Chat/ChatController.js
Normal file
84
services/web/app/src/Features/Chat/ChatController.js
Normal file
@@ -0,0 +1,84 @@
|
||||
/* eslint-disable
|
||||
n/handle-callback-err,
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ChatController
|
||||
const ChatApiHandler = require('./ChatApiHandler')
|
||||
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const UserInfoManager = require('../User/UserInfoManager')
|
||||
const UserInfoController = require('../User/UserInfoController')
|
||||
const ChatManager = require('./ChatManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
|
||||
module.exports = ChatController = {
|
||||
sendMessage(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
const { content, client_id: clientId } = req.body
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
if (userId == null) {
|
||||
const err = new Error('no logged-in user')
|
||||
return next(err)
|
||||
}
|
||||
return ChatApiHandler.sendGlobalMessage(
|
||||
projectId,
|
||||
userId,
|
||||
content,
|
||||
function (err, message) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
return UserInfoManager.getPersonalInfo(
|
||||
message.user_id,
|
||||
function (err, user) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
message.user = UserInfoController.formatPersonalInfo(user)
|
||||
message.clientId = clientId
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'new-chat-message',
|
||||
message
|
||||
)
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getMessages(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
const { query } = req
|
||||
return ChatApiHandler.getGlobalMessages(
|
||||
projectId,
|
||||
query.limit,
|
||||
query.before,
|
||||
function (err, messages) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
return ChatManager.injectUserInfoIntoThreads(
|
||||
{ global: { messages } },
|
||||
function (err) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
return res.json(messages)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
61
services/web/app/src/Features/Chat/ChatManager.js
Normal file
61
services/web/app/src/Features/Chat/ChatManager.js
Normal file
@@ -0,0 +1,61 @@
|
||||
const async = require('async')
|
||||
const UserInfoManager = require('../User/UserInfoManager')
|
||||
const UserInfoController = require('../User/UserInfoController')
|
||||
const { promisify } = require('@overleaf/promise-utils')
|
||||
|
||||
function injectUserInfoIntoThreads(threads, callback) {
|
||||
// There will be a lot of repitition of user_ids, so first build a list
|
||||
// of unique ones to perform db look ups on, then use these to populate the
|
||||
// user fields
|
||||
let message, thread, threadId, userId
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
const userIds = {}
|
||||
for (threadId in threads) {
|
||||
thread = threads[threadId]
|
||||
if (thread.resolved) {
|
||||
userIds[thread.resolved_by_user_id] = true
|
||||
}
|
||||
for (message of Array.from(thread.messages)) {
|
||||
userIds[message.user_id] = true
|
||||
}
|
||||
}
|
||||
|
||||
const jobs = []
|
||||
const users = {}
|
||||
for (userId in userIds) {
|
||||
;(userId =>
|
||||
jobs.push(cb =>
|
||||
UserInfoManager.getPersonalInfo(userId, function (error, user) {
|
||||
if (error != null) return cb(error)
|
||||
user = UserInfoController.formatPersonalInfo(user)
|
||||
users[userId] = user
|
||||
cb()
|
||||
})
|
||||
))(userId)
|
||||
}
|
||||
|
||||
return async.series(jobs, function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
for (threadId in threads) {
|
||||
thread = threads[threadId]
|
||||
if (thread.resolved) {
|
||||
thread.resolved_by_user = users[thread.resolved_by_user_id]
|
||||
}
|
||||
for (message of Array.from(thread.messages)) {
|
||||
message.user = users[message.user_id]
|
||||
}
|
||||
}
|
||||
return callback(null, threads)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
injectUserInfoIntoThreads,
|
||||
promises: {
|
||||
injectUserInfoIntoThreads: promisify(injectUserInfoIntoThreads),
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,202 @@
|
||||
import OError from '@overleaf/o-error'
|
||||
import HttpErrorHandler from '../../Features/Errors/HttpErrorHandler.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import CollaboratorsHandler from './CollaboratorsHandler.js'
|
||||
import CollaboratorsGetter from './CollaboratorsGetter.js'
|
||||
import OwnershipTransferHandler from './OwnershipTransferHandler.js'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import EditorRealTimeController from '../Editor/EditorRealTimeController.js'
|
||||
import TagsHandler from '../Tags/TagsHandler.js'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import { hasAdminAccess } from '../Helpers/AdminAuthorizationHelper.js'
|
||||
import TokenAccessHandler from '../TokenAccess/TokenAccessHandler.js'
|
||||
import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js'
|
||||
import LimitationsManager from '../Subscription/LimitationsManager.js'
|
||||
|
||||
const ObjectId = mongodb.ObjectId
|
||||
|
||||
export default {
|
||||
removeUserFromProject: expressify(removeUserFromProject),
|
||||
removeSelfFromProject: expressify(removeSelfFromProject),
|
||||
getAllMembers: expressify(getAllMembers),
|
||||
setCollaboratorInfo: expressify(setCollaboratorInfo),
|
||||
transferOwnership: expressify(transferOwnership),
|
||||
getShareTokens: expressify(getShareTokens),
|
||||
}
|
||||
|
||||
async function removeUserFromProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = req.params.user_id
|
||||
const sessionUserId = SessionManager.getLoggedInUserId(req.session)
|
||||
await _removeUserIdFromProject(projectId, userId)
|
||||
EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', {
|
||||
members: true,
|
||||
})
|
||||
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'remove-collaborator',
|
||||
sessionUserId,
|
||||
req.ip,
|
||||
{ userId }
|
||||
)
|
||||
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function removeSelfFromProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await _removeUserIdFromProject(projectId, userId)
|
||||
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'leave-project',
|
||||
userId,
|
||||
req.ip
|
||||
)
|
||||
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function getAllMembers(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
logger.debug({ projectId }, 'getting all active members for project')
|
||||
let members
|
||||
try {
|
||||
members = await CollaboratorsGetter.promises.getAllInvitedMembers(projectId)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error getting members for project', { projectId })
|
||||
}
|
||||
res.json({ members })
|
||||
}
|
||||
|
||||
async function setCollaboratorInfo(req, res, next) {
|
||||
try {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = req.params.user_id
|
||||
const { privilegeLevel } = req.body
|
||||
|
||||
const allowed =
|
||||
await LimitationsManager.promises.canChangeCollaboratorPrivilegeLevel(
|
||||
projectId,
|
||||
userId,
|
||||
privilegeLevel
|
||||
)
|
||||
if (!allowed) {
|
||||
return HttpErrorHandler.forbidden(
|
||||
req,
|
||||
res,
|
||||
'edit collaborator limit reached'
|
||||
)
|
||||
}
|
||||
|
||||
await CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel(
|
||||
projectId,
|
||||
userId,
|
||||
privilegeLevel
|
||||
)
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'project:collaboratorAccessLevel:changed',
|
||||
{ userId }
|
||||
)
|
||||
res.sendStatus(204)
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
HttpErrorHandler.notFound(req, res)
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function transferOwnership(req, res, next) {
|
||||
const sessionUser = SessionManager.getSessionUser(req.session)
|
||||
const projectId = req.params.Project_id
|
||||
const toUserId = req.body.user_id
|
||||
try {
|
||||
await OwnershipTransferHandler.promises.transferOwnership(
|
||||
projectId,
|
||||
toUserId,
|
||||
{
|
||||
allowTransferToNonCollaborators: hasAdminAccess(sessionUser),
|
||||
sessionUserId: new ObjectId(sessionUser._id),
|
||||
ipAddress: req.ip,
|
||||
}
|
||||
)
|
||||
res.sendStatus(204)
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.ProjectNotFoundError) {
|
||||
HttpErrorHandler.notFound(req, res, `project not found: ${projectId}`)
|
||||
} else if (err instanceof Errors.UserNotFoundError) {
|
||||
HttpErrorHandler.notFound(req, res, `user not found: ${toUserId}`)
|
||||
} else if (err instanceof Errors.UserNotCollaboratorError) {
|
||||
HttpErrorHandler.forbidden(
|
||||
req,
|
||||
res,
|
||||
`user ${toUserId} should be a collaborator in project ${projectId} prior to ownership transfer`
|
||||
)
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function _removeUserIdFromProject(projectId, userId) {
|
||||
await CollaboratorsHandler.promises.removeUserFromProject(projectId, userId)
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'userRemovedFromProject',
|
||||
userId
|
||||
)
|
||||
await TagsHandler.promises.removeProjectFromAllTags(userId, projectId)
|
||||
}
|
||||
|
||||
async function getShareTokens(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
let tokens
|
||||
if (userId) {
|
||||
tokens = await CollaboratorsGetter.promises.getPublicShareTokens(
|
||||
new ObjectId(userId),
|
||||
new ObjectId(projectId)
|
||||
)
|
||||
} else {
|
||||
// anonymous access, the token is already available in the session
|
||||
const readOnly = TokenAccessHandler.getRequestToken(req, projectId)
|
||||
tokens = { readOnly }
|
||||
}
|
||||
if (!tokens) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
if (tokens.readOnly || tokens.readAndWrite) {
|
||||
logger.info(
|
||||
{
|
||||
projectId,
|
||||
userId: userId || 'anonymous',
|
||||
ip: req.ip,
|
||||
tokens: Object.keys(tokens),
|
||||
},
|
||||
'project tokens accessed'
|
||||
)
|
||||
}
|
||||
|
||||
if (tokens.readOnly) {
|
||||
tokens.readOnlyHashPrefix = TokenAccessHandler.createTokenHashPrefix(
|
||||
tokens.readOnly
|
||||
)
|
||||
}
|
||||
|
||||
if (tokens.readAndWrite) {
|
||||
tokens.readAndWriteHashPrefix = TokenAccessHandler.createTokenHashPrefix(
|
||||
tokens.readAndWrite
|
||||
)
|
||||
}
|
||||
|
||||
res.json(tokens)
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
import { callbackify } from 'node:util'
|
||||
import { Project } from '../../models/Project.js'
|
||||
import EmailHandler from '../Email/EmailHandler.js'
|
||||
import Settings from '@overleaf/settings'
|
||||
|
||||
const CollaboratorsEmailHandler = {
|
||||
_buildInviteUrl(project, invite) {
|
||||
return `${Settings.siteUrl}/project/${project._id}/invite/token/${invite.token}`
|
||||
},
|
||||
|
||||
async notifyUserOfProjectInvite(projectId, email, invite, sendingUser) {
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
const project = await Project.findOne({ _id: projectId })
|
||||
.select('name owner_ref')
|
||||
.populate('owner_ref')
|
||||
.exec()
|
||||
const emailOptions = {
|
||||
to: email,
|
||||
replyTo: project.owner_ref.email,
|
||||
project: {
|
||||
name: project.name,
|
||||
},
|
||||
inviteUrl: CollaboratorsEmailHandler._buildInviteUrl(project, invite),
|
||||
owner: project.owner_ref,
|
||||
sendingUser_id: sendingUser._id,
|
||||
}
|
||||
await EmailHandler.promises.sendEmail('projectInvite', emailOptions)
|
||||
},
|
||||
}
|
||||
|
||||
export default {
|
||||
promises: CollaboratorsEmailHandler,
|
||||
notifyUserOfProjectInvite: callbackify(
|
||||
CollaboratorsEmailHandler.notifyUserOfProjectInvite
|
||||
),
|
||||
_buildInviteUrl: CollaboratorsEmailHandler._buildInviteUrl,
|
||||
}
|
||||
@@ -0,0 +1,418 @@
|
||||
const { callbackify } = require('util')
|
||||
const pLimit = require('p-limit')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { Project } = require('../../models/Project')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const PublicAccessLevels = require('../Authorization/PublicAccessLevels')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const ProjectEditorHandler = require('../Project/ProjectEditorHandler')
|
||||
const Sources = require('../Authorization/Sources')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
|
||||
module.exports = {
|
||||
getMemberIdsWithPrivilegeLevels: callbackify(getMemberIdsWithPrivilegeLevels),
|
||||
getMemberIds: callbackify(getMemberIds),
|
||||
getInvitedMemberIds: callbackify(getInvitedMemberIds),
|
||||
getInvitedMembersWithPrivilegeLevels: callbackify(
|
||||
getInvitedMembersWithPrivilegeLevels
|
||||
),
|
||||
getInvitedMembersWithPrivilegeLevelsFromFields: callbackify(
|
||||
getInvitedMembersWithPrivilegeLevelsFromFields
|
||||
),
|
||||
getMemberIdPrivilegeLevel: callbackify(getMemberIdPrivilegeLevel),
|
||||
getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf),
|
||||
dangerouslyGetAllProjectsUserIsMemberOf: callbackify(
|
||||
dangerouslyGetAllProjectsUserIsMemberOf
|
||||
),
|
||||
isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject),
|
||||
getPublicShareTokens: callbackify(getPublicShareTokens),
|
||||
userIsTokenMember: callbackify(userIsTokenMember),
|
||||
getAllInvitedMembers: callbackify(getAllInvitedMembers),
|
||||
promises: {
|
||||
getMemberIdsWithPrivilegeLevels,
|
||||
getMemberIds,
|
||||
getInvitedMemberIds,
|
||||
getInvitedMembersWithPrivilegeLevels,
|
||||
getInvitedMembersWithPrivilegeLevelsFromFields,
|
||||
getMemberIdPrivilegeLevel,
|
||||
getInvitedEditCollaboratorCount,
|
||||
getInvitedPendingEditorCount,
|
||||
getProjectsUserIsMemberOf,
|
||||
dangerouslyGetAllProjectsUserIsMemberOf,
|
||||
isUserInvitedMemberOfProject,
|
||||
isUserInvitedReadWriteMemberOfProject,
|
||||
getPublicShareTokens,
|
||||
userIsTokenMember,
|
||||
userIsReadWriteTokenMember,
|
||||
getAllInvitedMembers,
|
||||
},
|
||||
}
|
||||
|
||||
async function getMemberIdsWithPrivilegeLevels(projectId) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
collaberator_refs: 1,
|
||||
readOnly_refs: 1,
|
||||
tokenAccessReadOnly_refs: 1,
|
||||
tokenAccessReadAndWrite_refs: 1,
|
||||
publicAccesLevel: 1,
|
||||
pendingEditor_refs: 1,
|
||||
reviewer_refs: 1,
|
||||
pendingReviewer_refs: 1,
|
||||
})
|
||||
if (!project) {
|
||||
throw new Errors.NotFoundError(`no project found with id ${projectId}`)
|
||||
}
|
||||
const memberIds = _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||
project.owner_ref,
|
||||
project.collaberator_refs,
|
||||
project.readOnly_refs,
|
||||
project.tokenAccessReadAndWrite_refs,
|
||||
project.tokenAccessReadOnly_refs,
|
||||
project.publicAccesLevel,
|
||||
project.pendingEditor_refs,
|
||||
project.reviewer_refs,
|
||||
project.pendingReviewer_refs
|
||||
)
|
||||
return memberIds
|
||||
}
|
||||
|
||||
async function getMemberIds(projectId) {
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
return members.map(m => m.id)
|
||||
}
|
||||
|
||||
async function getInvitedMemberIds(projectId) {
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
return members.filter(m => m.source !== Sources.TOKEN).map(m => m.id)
|
||||
}
|
||||
|
||||
async function getInvitedMembersWithPrivilegeLevels(projectId) {
|
||||
let members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
members = members.filter(m => m.source !== Sources.TOKEN)
|
||||
return _loadMembers(members)
|
||||
}
|
||||
|
||||
async function getInvitedMembersWithPrivilegeLevelsFromFields(
|
||||
ownerId,
|
||||
collaboratorIds,
|
||||
readOnlyIds,
|
||||
reviewerIds
|
||||
) {
|
||||
const members = _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||
ownerId,
|
||||
collaboratorIds,
|
||||
readOnlyIds,
|
||||
[],
|
||||
[],
|
||||
null,
|
||||
[],
|
||||
reviewerIds,
|
||||
[]
|
||||
)
|
||||
return _loadMembers(members)
|
||||
}
|
||||
|
||||
async function getMemberIdPrivilegeLevel(userId, projectId) {
|
||||
// In future if the schema changes and getting all member ids is more expensive (multiple documents)
|
||||
// then optimise this.
|
||||
if (userId == null) {
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
for (const member of members) {
|
||||
if (member.id === userId.toString()) {
|
||||
return member.privilegeLevel
|
||||
}
|
||||
}
|
||||
return PrivilegeLevels.NONE
|
||||
}
|
||||
|
||||
async function getInvitedEditCollaboratorCount(projectId) {
|
||||
// Counts invited members with editor or reviewer roles
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
return members.filter(
|
||||
m =>
|
||||
m.source === Sources.INVITE &&
|
||||
(m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE ||
|
||||
m.privilegeLevel === PrivilegeLevels.REVIEW)
|
||||
).length
|
||||
}
|
||||
|
||||
async function getInvitedPendingEditorCount(projectId) {
|
||||
// Only counts invited members that are readonly pending editors or pending
|
||||
// reviewers
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
return members.filter(
|
||||
m =>
|
||||
m.source === Sources.INVITE &&
|
||||
m.privilegeLevel === PrivilegeLevels.READ_ONLY &&
|
||||
(m.pendingEditor || m.pendingReviewer)
|
||||
).length
|
||||
}
|
||||
|
||||
async function isUserInvitedMemberOfProject(userId, projectId) {
|
||||
if (!userId) {
|
||||
return false
|
||||
}
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
for (const member of members) {
|
||||
if (
|
||||
member.id.toString() === userId.toString() &&
|
||||
member.source !== Sources.TOKEN
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async function isUserInvitedReadWriteMemberOfProject(userId, projectId) {
|
||||
if (!userId) {
|
||||
return false
|
||||
}
|
||||
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||
for (const member of members) {
|
||||
if (
|
||||
member.id.toString() === userId.toString() &&
|
||||
member.source !== Sources.TOKEN &&
|
||||
member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async function getPublicShareTokens(userId, projectId) {
|
||||
const memberInfo = await Project.findOne(
|
||||
{
|
||||
_id: projectId,
|
||||
},
|
||||
{
|
||||
isOwner: { $eq: ['$owner_ref', userId] },
|
||||
hasTokenReadOnlyAccess: {
|
||||
$and: [
|
||||
{ $in: [userId, '$tokenAccessReadOnly_refs'] },
|
||||
{ $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] },
|
||||
],
|
||||
},
|
||||
tokens: 1,
|
||||
}
|
||||
)
|
||||
.lean()
|
||||
.exec()
|
||||
|
||||
if (!memberInfo) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (memberInfo.isOwner) {
|
||||
return memberInfo.tokens
|
||||
} else if (memberInfo.hasTokenReadOnlyAccess) {
|
||||
return {
|
||||
readOnly: memberInfo.tokens.readOnly,
|
||||
}
|
||||
} else {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
// This function returns all the projects that a user currently has access to,
|
||||
// excluding projects where the user is listed in the token access fields when
|
||||
// token access has been disabled.
|
||||
async function getProjectsUserIsMemberOf(userId, fields) {
|
||||
const limit = pLimit(2)
|
||||
const [readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly] =
|
||||
await Promise.all([
|
||||
limit(() => Project.find({ collaberator_refs: userId }, fields).exec()),
|
||||
limit(() => Project.find({ reviewer_refs: userId }, fields).exec()),
|
||||
limit(() => Project.find({ readOnly_refs: userId }, fields).exec()),
|
||||
limit(() =>
|
||||
Project.find(
|
||||
{
|
||||
tokenAccessReadAndWrite_refs: userId,
|
||||
publicAccesLevel: PublicAccessLevels.TOKEN_BASED,
|
||||
},
|
||||
fields
|
||||
).exec()
|
||||
),
|
||||
limit(() =>
|
||||
Project.find(
|
||||
{
|
||||
tokenAccessReadOnly_refs: userId,
|
||||
publicAccesLevel: PublicAccessLevels.TOKEN_BASED,
|
||||
},
|
||||
fields
|
||||
).exec()
|
||||
),
|
||||
])
|
||||
return { readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly }
|
||||
}
|
||||
|
||||
// This function returns all the projects that a user is a member of, regardless of
|
||||
// the current state of the project, so it includes those projects where token access
|
||||
// has been disabled.
|
||||
async function dangerouslyGetAllProjectsUserIsMemberOf(userId, fields) {
|
||||
const readAndWrite = await Project.find(
|
||||
{ collaberator_refs: userId },
|
||||
fields
|
||||
).exec()
|
||||
const readOnly = await Project.find({ readOnly_refs: userId }, fields).exec()
|
||||
const tokenReadAndWrite = await Project.find(
|
||||
{ tokenAccessReadAndWrite_refs: userId },
|
||||
fields
|
||||
).exec()
|
||||
const tokenReadOnly = await Project.find(
|
||||
{ tokenAccessReadOnly_refs: userId },
|
||||
fields
|
||||
).exec()
|
||||
return { readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly }
|
||||
}
|
||||
|
||||
async function getAllInvitedMembers(projectId) {
|
||||
try {
|
||||
const rawMembers = await getInvitedMembersWithPrivilegeLevels(projectId)
|
||||
const { members } =
|
||||
ProjectEditorHandler.buildOwnerAndMembersViews(rawMembers)
|
||||
return members
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error getting members for project', { projectId })
|
||||
}
|
||||
}
|
||||
|
||||
async function userIsTokenMember(userId, projectId) {
|
||||
userId = new ObjectId(userId.toString())
|
||||
projectId = new ObjectId(projectId.toString())
|
||||
const project = await Project.findOne(
|
||||
{
|
||||
_id: projectId,
|
||||
$or: [
|
||||
{ tokenAccessReadOnly_refs: userId },
|
||||
{ tokenAccessReadAndWrite_refs: userId },
|
||||
],
|
||||
},
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
).exec()
|
||||
return project != null
|
||||
}
|
||||
|
||||
async function userIsReadWriteTokenMember(userId, projectId) {
|
||||
userId = new ObjectId(userId.toString())
|
||||
projectId = new ObjectId(projectId.toString())
|
||||
const project = await Project.findOne(
|
||||
{
|
||||
_id: projectId,
|
||||
tokenAccessReadAndWrite_refs: userId,
|
||||
},
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
).exec()
|
||||
return project != null
|
||||
}
|
||||
|
||||
function _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||
ownerId,
|
||||
collaboratorIds,
|
||||
readOnlyIds,
|
||||
tokenAccessIds,
|
||||
tokenAccessReadOnlyIds,
|
||||
publicAccessLevel,
|
||||
pendingEditorIds,
|
||||
reviewerIds,
|
||||
pendingReviewerIds
|
||||
) {
|
||||
const members = []
|
||||
members.push({
|
||||
id: ownerId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.OWNER,
|
||||
source: Sources.OWNER,
|
||||
})
|
||||
|
||||
for (const memberId of collaboratorIds || []) {
|
||||
members.push({
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.READ_AND_WRITE,
|
||||
source: Sources.INVITE,
|
||||
})
|
||||
}
|
||||
|
||||
for (const memberId of readOnlyIds || []) {
|
||||
const record = {
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.READ_ONLY,
|
||||
source: Sources.INVITE,
|
||||
}
|
||||
|
||||
if (pendingEditorIds?.some(pe => memberId.equals(pe))) {
|
||||
record.pendingEditor = true
|
||||
} else if (pendingReviewerIds?.some(pr => memberId.equals(pr))) {
|
||||
record.pendingReviewer = true
|
||||
}
|
||||
members.push(record)
|
||||
}
|
||||
|
||||
if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||
for (const memberId of tokenAccessIds || []) {
|
||||
members.push({
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.READ_AND_WRITE,
|
||||
source: Sources.TOKEN,
|
||||
})
|
||||
}
|
||||
for (const memberId of tokenAccessReadOnlyIds || []) {
|
||||
members.push({
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.READ_ONLY,
|
||||
source: Sources.TOKEN,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for (const memberId of reviewerIds || []) {
|
||||
members.push({
|
||||
id: memberId.toString(),
|
||||
privilegeLevel: PrivilegeLevels.REVIEW,
|
||||
source: Sources.INVITE,
|
||||
})
|
||||
}
|
||||
return members
|
||||
}
|
||||
|
||||
async function _loadMembers(members) {
|
||||
const limit = pLimit(3)
|
||||
const results = await Promise.all(
|
||||
members.map(member =>
|
||||
limit(async () => {
|
||||
const user = await UserGetter.promises.getUser(member.id, {
|
||||
_id: 1,
|
||||
email: 1,
|
||||
features: 1,
|
||||
first_name: 1,
|
||||
last_name: 1,
|
||||
signUpDate: 1,
|
||||
})
|
||||
if (user != null) {
|
||||
const record = {
|
||||
user,
|
||||
privilegeLevel: member.privilegeLevel,
|
||||
}
|
||||
if (member.pendingEditor) {
|
||||
record.pendingEditor = true
|
||||
} else if (member.pendingReviewer) {
|
||||
record.pendingReviewer = true
|
||||
}
|
||||
return record
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
return results.filter(r => r != null)
|
||||
}
|
||||
@@ -0,0 +1,468 @@
|
||||
const { callbackify } = require('util')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { Project } = require('../../models/Project')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const ProjectHelper = require('../Project/ProjectHelper')
|
||||
const logger = require('@overleaf/logger')
|
||||
const ContactManager = require('../Contacts/ContactManager')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
|
||||
const CollaboratorsGetter = require('./CollaboratorsGetter')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender')
|
||||
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
||||
|
||||
module.exports = {
|
||||
userIsTokenMember: callbackify(userIsTokenMember),
|
||||
removeUserFromProject: callbackify(removeUserFromProject),
|
||||
removeUserFromAllProjects: callbackify(removeUserFromAllProjects),
|
||||
addUserIdToProject: callbackify(addUserIdToProject),
|
||||
transferProjects: callbackify(transferProjects),
|
||||
promises: {
|
||||
userIsTokenMember,
|
||||
removeUserFromProject,
|
||||
removeUserFromAllProjects,
|
||||
addUserIdToProject,
|
||||
transferProjects,
|
||||
setCollaboratorPrivilegeLevel,
|
||||
convertTrackChangesToExplicitFormat,
|
||||
},
|
||||
}
|
||||
// Forces null pendingReviewer_refs, readOnly_refs, and reviewer_refs to
|
||||
// be empty arrays to avoid errors during $pull ops
|
||||
// See https://github.com/overleaf/internal/issues/24610
|
||||
async function fixNullCollaboratorRefs(projectId) {
|
||||
// Temporary cleanup for the case where pendingReviewer_refs is null
|
||||
await Project.updateOne(
|
||||
{ _id: projectId, pendingReviewer_refs: { $type: 'null' } },
|
||||
{ $set: { pendingReviewer_refs: [] } }
|
||||
).exec()
|
||||
|
||||
// Temporary cleanup for the case where readOnly_refs is null
|
||||
await Project.updateOne(
|
||||
{ _id: projectId, readOnly_refs: { $type: 'null' } },
|
||||
{ $set: { readOnly_refs: [] } }
|
||||
).exec()
|
||||
|
||||
// Temporary cleanup for the case where reviewer_refs is null
|
||||
await Project.updateOne(
|
||||
{ _id: projectId, reviewer_refs: { $type: 'null' } },
|
||||
{ $set: { reviewer_refs: [] } }
|
||||
).exec()
|
||||
}
|
||||
|
||||
async function removeUserFromProject(projectId, userId) {
|
||||
try {
|
||||
const project = await Project.findOne({ _id: projectId }).exec()
|
||||
|
||||
await fixNullCollaboratorRefs(projectId)
|
||||
|
||||
// Deal with the old type of boolean value for archived
|
||||
// In order to clear it
|
||||
if (typeof project.archived === 'boolean') {
|
||||
let archived = ProjectHelper.calculateArchivedArray(
|
||||
project,
|
||||
userId,
|
||||
'ARCHIVE'
|
||||
)
|
||||
|
||||
archived = archived.filter(id => id.toString() !== userId.toString())
|
||||
|
||||
await Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{
|
||||
$set: { archived },
|
||||
$pull: {
|
||||
collaberator_refs: userId,
|
||||
reviewer_refs: userId,
|
||||
readOnly_refs: userId,
|
||||
pendingEditor_refs: userId,
|
||||
pendingReviewer_refs: userId,
|
||||
tokenAccessReadOnly_refs: userId,
|
||||
tokenAccessReadAndWrite_refs: userId,
|
||||
trashed: userId,
|
||||
},
|
||||
}
|
||||
)
|
||||
} else {
|
||||
await Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{
|
||||
$pull: {
|
||||
collaberator_refs: userId,
|
||||
readOnly_refs: userId,
|
||||
reviewer_refs: userId,
|
||||
pendingEditor_refs: userId,
|
||||
pendingReviewer_refs: userId,
|
||||
tokenAccessReadOnly_refs: userId,
|
||||
tokenAccessReadAndWrite_refs: userId,
|
||||
archived: userId,
|
||||
trashed: userId,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'problem removing user from project collaborators', {
|
||||
projectId,
|
||||
userId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function removeUserFromAllProjects(userId) {
|
||||
const { readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly } =
|
||||
await CollaboratorsGetter.promises.dangerouslyGetAllProjectsUserIsMemberOf(
|
||||
userId,
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
const allProjects = readAndWrite
|
||||
.concat(readOnly)
|
||||
.concat(tokenReadAndWrite)
|
||||
.concat(tokenReadOnly)
|
||||
for (const project of allProjects) {
|
||||
await removeUserFromProject(project._id, userId)
|
||||
}
|
||||
}
|
||||
|
||||
async function addUserIdToProject(
|
||||
projectId,
|
||||
addingUserId,
|
||||
userId,
|
||||
privilegeLevel,
|
||||
{ pendingEditor, pendingReviewer } = {}
|
||||
) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
name: 1,
|
||||
collaberator_refs: 1,
|
||||
readOnly_refs: 1,
|
||||
reviewer_refs: 1,
|
||||
track_changes: 1,
|
||||
})
|
||||
let level
|
||||
let existingUsers = project.collaberator_refs || []
|
||||
existingUsers = existingUsers.concat(project.readOnly_refs || [])
|
||||
existingUsers = existingUsers.map(u => u.toString())
|
||||
if (existingUsers.includes(userId.toString())) {
|
||||
return // User already in Project
|
||||
}
|
||||
if (privilegeLevel === PrivilegeLevels.READ_AND_WRITE) {
|
||||
level = { collaberator_refs: userId }
|
||||
logger.debug(
|
||||
{ privileges: 'readAndWrite', userId, projectId },
|
||||
'adding user'
|
||||
)
|
||||
} else if (privilegeLevel === PrivilegeLevels.READ_ONLY) {
|
||||
level = { readOnly_refs: userId }
|
||||
if (pendingEditor) {
|
||||
level.pendingEditor_refs = userId
|
||||
} else if (pendingReviewer) {
|
||||
level.pendingReviewer_refs = userId
|
||||
}
|
||||
logger.debug(
|
||||
{
|
||||
privileges: 'readOnly',
|
||||
userId,
|
||||
projectId,
|
||||
pendingEditor,
|
||||
pendingReviewer,
|
||||
},
|
||||
'adding user'
|
||||
)
|
||||
} else if (privilegeLevel === PrivilegeLevels.REVIEW) {
|
||||
level = { reviewer_refs: userId }
|
||||
logger.debug({ privileges: 'reviewer', userId, projectId }, 'adding user')
|
||||
} else {
|
||||
throw new Error(`unknown privilegeLevel: ${privilegeLevel}`)
|
||||
}
|
||||
|
||||
if (addingUserId) {
|
||||
ContactManager.addContact(addingUserId, userId, () => {})
|
||||
}
|
||||
|
||||
if (privilegeLevel === PrivilegeLevels.REVIEW) {
|
||||
const trackChanges = await convertTrackChangesToExplicitFormat(
|
||||
projectId,
|
||||
project.track_changes
|
||||
)
|
||||
trackChanges[userId] = true
|
||||
|
||||
await Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{ track_changes: trackChanges, $addToSet: level }
|
||||
).exec()
|
||||
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'toggle-track-changes',
|
||||
trackChanges
|
||||
)
|
||||
} else {
|
||||
await Project.updateOne({ _id: projectId }, { $addToSet: level }).exec()
|
||||
}
|
||||
|
||||
// Ensure there is a dedicated folder for this "new" project.
|
||||
await TpdsUpdateSender.promises.createProject({
|
||||
projectId,
|
||||
projectName: project.name,
|
||||
ownerId: project.owner_ref,
|
||||
userId,
|
||||
})
|
||||
|
||||
// Flush to TPDS in background to add files to collaborator's Dropbox
|
||||
TpdsProjectFlusher.promises.flushProjectToTpds(projectId).catch(err => {
|
||||
logger.error(
|
||||
{ err, projectId, userId },
|
||||
'error flushing to TPDS after adding collaborator'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async function transferProjects(fromUserId, toUserId) {
|
||||
// Find all the projects this user is part of so we can flush them to TPDS
|
||||
const projects = await Project.find(
|
||||
{
|
||||
$or: [
|
||||
{ owner_ref: fromUserId },
|
||||
{ collaberator_refs: fromUserId },
|
||||
{ readOnly_refs: fromUserId },
|
||||
],
|
||||
},
|
||||
{ _id: 1 }
|
||||
).exec()
|
||||
const projectIds = projects.map(p => p._id)
|
||||
logger.debug({ projectIds, fromUserId, toUserId }, 'transferring projects')
|
||||
|
||||
await Project.updateMany(
|
||||
{ owner_ref: fromUserId },
|
||||
{ $set: { owner_ref: toUserId } }
|
||||
).exec()
|
||||
|
||||
await Project.updateMany(
|
||||
{ collaberator_refs: fromUserId },
|
||||
{
|
||||
$addToSet: { collaberator_refs: toUserId },
|
||||
}
|
||||
).exec()
|
||||
await Project.updateMany(
|
||||
{ collaberator_refs: fromUserId },
|
||||
{
|
||||
$pull: { collaberator_refs: fromUserId },
|
||||
}
|
||||
).exec()
|
||||
|
||||
await Project.updateMany(
|
||||
{ readOnly_refs: fromUserId },
|
||||
{
|
||||
$addToSet: { readOnly_refs: toUserId },
|
||||
}
|
||||
).exec()
|
||||
await Project.updateMany(
|
||||
{ readOnly_refs: fromUserId },
|
||||
{
|
||||
$pull: { readOnly_refs: fromUserId },
|
||||
}
|
||||
).exec()
|
||||
|
||||
await Project.updateMany(
|
||||
{ pendingEditor_refs: fromUserId },
|
||||
{
|
||||
$addToSet: { pendingEditor_refs: toUserId },
|
||||
}
|
||||
).exec()
|
||||
await Project.updateMany(
|
||||
{ pendingEditor_refs: fromUserId },
|
||||
{
|
||||
$pull: { pendingEditor_refs: fromUserId },
|
||||
}
|
||||
).exec()
|
||||
|
||||
await Project.updateMany(
|
||||
{ pendingReviewer_refs: fromUserId },
|
||||
{
|
||||
$addToSet: { pendingReviewer_refs: toUserId },
|
||||
}
|
||||
).exec()
|
||||
await Project.updateMany(
|
||||
{ pendingReviewer_refs: fromUserId },
|
||||
{
|
||||
$pull: { pendingReviewer_refs: fromUserId },
|
||||
}
|
||||
).exec()
|
||||
|
||||
// Flush in background, no need to block on this
|
||||
_flushProjects(projectIds).catch(err => {
|
||||
logger.err(
|
||||
{ err, projectIds, fromUserId, toUserId },
|
||||
'error flushing tranferred projects to TPDS'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async function setCollaboratorPrivilegeLevel(
|
||||
projectId,
|
||||
userId,
|
||||
privilegeLevel,
|
||||
{ pendingEditor, pendingReviewer } = {}
|
||||
) {
|
||||
// Make sure we're only updating the project if the user is already a
|
||||
// collaborator
|
||||
const query = {
|
||||
_id: projectId,
|
||||
$or: [
|
||||
{ collaberator_refs: userId },
|
||||
{ readOnly_refs: userId },
|
||||
{ reviewer_refs: userId },
|
||||
],
|
||||
}
|
||||
let update
|
||||
|
||||
await fixNullCollaboratorRefs(projectId)
|
||||
|
||||
switch (privilegeLevel) {
|
||||
case PrivilegeLevels.READ_AND_WRITE: {
|
||||
update = {
|
||||
$pull: {
|
||||
readOnly_refs: userId,
|
||||
pendingEditor_refs: userId,
|
||||
reviewer_refs: userId,
|
||||
pendingReviewer_refs: userId,
|
||||
},
|
||||
$addToSet: { collaberator_refs: userId },
|
||||
}
|
||||
break
|
||||
}
|
||||
case PrivilegeLevels.REVIEW: {
|
||||
update = {
|
||||
$pull: {
|
||||
readOnly_refs: userId,
|
||||
pendingEditor_refs: userId,
|
||||
collaberator_refs: userId,
|
||||
pendingReviewer_refs: userId,
|
||||
},
|
||||
$addToSet: { reviewer_refs: userId },
|
||||
}
|
||||
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
track_changes: true,
|
||||
})
|
||||
const newTrackChangesState = await convertTrackChangesToExplicitFormat(
|
||||
projectId,
|
||||
project.track_changes
|
||||
)
|
||||
if (newTrackChangesState[userId] !== true) {
|
||||
newTrackChangesState[userId] = true
|
||||
}
|
||||
if (typeof project.track_changes === 'object') {
|
||||
update.$set = { [`track_changes.${userId}`]: true }
|
||||
} else {
|
||||
update.$set = { track_changes: newTrackChangesState }
|
||||
}
|
||||
break
|
||||
}
|
||||
case PrivilegeLevels.READ_ONLY: {
|
||||
update = {
|
||||
$pull: { collaberator_refs: userId, reviewer_refs: userId },
|
||||
$addToSet: { readOnly_refs: userId },
|
||||
}
|
||||
|
||||
if (pendingEditor) {
|
||||
update.$addToSet.pendingEditor_refs = userId
|
||||
} else {
|
||||
update.$pull.pendingEditor_refs = userId
|
||||
}
|
||||
|
||||
if (pendingReviewer) {
|
||||
update.$addToSet.pendingReviewer_refs = userId
|
||||
} else {
|
||||
update.$pull.pendingReviewer_refs = userId
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
default: {
|
||||
throw new OError(`unknown privilege level: ${privilegeLevel}`)
|
||||
}
|
||||
}
|
||||
const mongoResponse = await Project.updateOne(query, update).exec()
|
||||
if (mongoResponse.matchedCount === 0) {
|
||||
throw new Errors.NotFoundError('project or collaborator not found')
|
||||
}
|
||||
|
||||
if (update.$set?.track_changes) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'toggle-track-changes',
|
||||
update.$set.track_changes
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function userIsTokenMember(userId, projectId) {
|
||||
if (!userId) {
|
||||
return false
|
||||
}
|
||||
try {
|
||||
const project = await Project.findOne(
|
||||
{
|
||||
_id: projectId,
|
||||
$or: [
|
||||
{ tokenAccessReadOnly_refs: userId },
|
||||
{ tokenAccessReadAndWrite_refs: userId },
|
||||
],
|
||||
},
|
||||
{
|
||||
_id: 1,
|
||||
}
|
||||
)
|
||||
return project != null
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'problem while checking if user is token member', {
|
||||
userId,
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function _flushProjects(projectIds) {
|
||||
for (const projectId of projectIds) {
|
||||
await TpdsProjectFlusher.promises.flushProjectToTpds(projectId)
|
||||
}
|
||||
}
|
||||
|
||||
async function convertTrackChangesToExplicitFormat(
|
||||
projectId,
|
||||
trackChangesState
|
||||
) {
|
||||
if (typeof trackChangesState === 'object') {
|
||||
return { ...trackChangesState }
|
||||
}
|
||||
|
||||
if (trackChangesState === true) {
|
||||
// track changes are enabled for all
|
||||
const members =
|
||||
await CollaboratorsGetter.promises.getMemberIdsWithPrivilegeLevels(
|
||||
projectId
|
||||
)
|
||||
|
||||
const newTrackChangesState = {}
|
||||
for (const { id, privilegeLevel } of members) {
|
||||
if (
|
||||
[
|
||||
PrivilegeLevels.OWNER,
|
||||
PrivilegeLevels.READ_AND_WRITE,
|
||||
PrivilegeLevels.REVIEW,
|
||||
].includes(privilegeLevel)
|
||||
) {
|
||||
newTrackChangesState[id] = true
|
||||
}
|
||||
}
|
||||
|
||||
return newTrackChangesState
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
@@ -0,0 +1,399 @@
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import LimitationsManager from '../Subscription/LimitationsManager.js'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import CollaboratorsGetter from './CollaboratorsGetter.js'
|
||||
import CollaboratorsInviteHandler from './CollaboratorsInviteHandler.mjs'
|
||||
import CollaboratorsInviteGetter from './CollaboratorsInviteGetter.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import Settings from '@overleaf/settings'
|
||||
import EmailHelper from '../Helpers/EmailHelper.js'
|
||||
import EditorRealTimeController from '../Editor/EditorRealTimeController.js'
|
||||
import AnalyticsManager from '../Analytics/AnalyticsManager.js'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import PrivilegeLevels from '../Authorization/PrivilegeLevels.js'
|
||||
|
||||
// This rate limiter allows a different number of requests depending on the
|
||||
// number of callaborators a user is allowed. This is implemented by providing
|
||||
// a number of points (P) and consuming c = floor(P / maxRequests) on each
|
||||
// request. We'd like (maxRequests + 1) requests to trigger the rate limit, so
|
||||
// one constrait that we have is that c * (maxRequests + 1) > P. This is
|
||||
// achieved if P = M^2 where M is the largest value possible for maxRequests.
|
||||
//
|
||||
// In the present case, we allow 10 requests per collaborator per 30 minutes,
|
||||
// with a maximum of 200 requests, so P = 200^2 = 40000.
|
||||
const RATE_LIMIT_POINTS = 40000
|
||||
const rateLimiter = new RateLimiter('invite-to-project-by-user-id', {
|
||||
points: RATE_LIMIT_POINTS,
|
||||
duration: 60 * 30,
|
||||
})
|
||||
|
||||
async function getAllInvites(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
logger.debug({ projectId }, 'getting all active invites for project')
|
||||
const invites =
|
||||
await CollaboratorsInviteGetter.promises.getAllInvites(projectId)
|
||||
res.json({ invites })
|
||||
}
|
||||
|
||||
async function _checkShouldInviteEmail(email) {
|
||||
if (Settings.restrictInvitesToExistingAccounts === true) {
|
||||
logger.debug({ email }, 'checking if user exists with this email')
|
||||
const user = await UserGetter.promises.getUserByAnyEmail(email, {
|
||||
_id: 1,
|
||||
})
|
||||
const userExists = user?._id != null
|
||||
return userExists
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
async function _checkRateLimit(userId) {
|
||||
let collabLimit =
|
||||
await LimitationsManager.promises.allowedNumberOfCollaboratorsForUser(
|
||||
userId
|
||||
)
|
||||
|
||||
if (collabLimit == null || collabLimit === 0) {
|
||||
collabLimit = 1
|
||||
} else if (collabLimit < 0 || collabLimit > 20) {
|
||||
collabLimit = 20
|
||||
}
|
||||
|
||||
// Consume enough points to hit the rate limit at 10 * collabLimit
|
||||
const maxRequests = 10 * collabLimit
|
||||
const points = Math.floor(RATE_LIMIT_POINTS / maxRequests)
|
||||
try {
|
||||
await rateLimiter.consume(userId, points, { method: 'userId' })
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
async function inviteToProject(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
let { email, privileges } = req.body
|
||||
const sendingUser = SessionManager.getSessionUser(req.session)
|
||||
const sendingUserId = sendingUser._id
|
||||
req.logger.addFields({ email, sendingUserId })
|
||||
|
||||
if (email === sendingUser.email) {
|
||||
logger.debug(
|
||||
{ projectId, email, sendingUserId },
|
||||
'cannot invite yourself to project'
|
||||
)
|
||||
return res.json({ invite: null, error: 'cannot_invite_self' })
|
||||
}
|
||||
|
||||
logger.debug({ projectId, email, sendingUserId }, 'inviting to project')
|
||||
|
||||
let allowed = false
|
||||
// can always invite read-only collaborators
|
||||
if (privileges === PrivilegeLevels.READ_ONLY) {
|
||||
allowed = true
|
||||
} else {
|
||||
allowed = await LimitationsManager.promises.canAddXEditCollaborators(
|
||||
projectId,
|
||||
1
|
||||
)
|
||||
}
|
||||
|
||||
if (!allowed) {
|
||||
logger.debug(
|
||||
{ projectId, email, sendingUserId },
|
||||
'not allowed to invite more users to project'
|
||||
)
|
||||
return res.json({ invite: null })
|
||||
}
|
||||
|
||||
email = EmailHelper.parseEmail(email, true)
|
||||
if (email == null || email === '') {
|
||||
logger.debug({ projectId, email, sendingUserId }, 'invalid email address')
|
||||
return res.status(400).json({ errorReason: 'invalid_email' })
|
||||
}
|
||||
|
||||
const underRateLimit =
|
||||
await CollaboratorsInviteController._checkRateLimit(sendingUserId)
|
||||
if (!underRateLimit) {
|
||||
return res.sendStatus(429)
|
||||
}
|
||||
|
||||
const shouldAllowInvite =
|
||||
await CollaboratorsInviteController._checkShouldInviteEmail(email)
|
||||
if (!shouldAllowInvite) {
|
||||
logger.debug(
|
||||
{ email, projectId, sendingUserId },
|
||||
'not allowed to send an invite to this email address'
|
||||
)
|
||||
return res.json({
|
||||
invite: null,
|
||||
error: 'cannot_invite_non_user',
|
||||
})
|
||||
}
|
||||
|
||||
const invite = await CollaboratorsInviteHandler.promises.inviteToProject(
|
||||
projectId,
|
||||
sendingUser,
|
||||
email,
|
||||
privileges
|
||||
)
|
||||
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'send-invite',
|
||||
sendingUserId,
|
||||
req.ip,
|
||||
{
|
||||
inviteId: invite._id,
|
||||
privileges,
|
||||
}
|
||||
)
|
||||
|
||||
logger.debug({ projectId, email, sendingUserId }, 'invite created')
|
||||
|
||||
EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', {
|
||||
invites: true,
|
||||
})
|
||||
res.json({ invite })
|
||||
}
|
||||
async function revokeInvite(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const inviteId = req.params.invite_id
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
|
||||
logger.debug({ projectId, inviteId }, 'revoking invite')
|
||||
|
||||
const invite = await CollaboratorsInviteHandler.promises.revokeInvite(
|
||||
projectId,
|
||||
inviteId
|
||||
)
|
||||
|
||||
if (invite != null) {
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'revoke-invite',
|
||||
user._id,
|
||||
req.ip,
|
||||
{
|
||||
inviteId: invite._id,
|
||||
privileges: invite.privileges,
|
||||
}
|
||||
)
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'project:membership:changed',
|
||||
{ invites: true }
|
||||
)
|
||||
}
|
||||
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function generateNewInvite(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const inviteId = req.params.invite_id
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
|
||||
logger.debug({ projectId, inviteId }, 'resending invite')
|
||||
const sendingUser = SessionManager.getSessionUser(req.session)
|
||||
const underRateLimit = await CollaboratorsInviteController._checkRateLimit(
|
||||
sendingUser._id
|
||||
)
|
||||
if (!underRateLimit) {
|
||||
return res.sendStatus(429)
|
||||
}
|
||||
|
||||
const invite = await CollaboratorsInviteHandler.promises.generateNewInvite(
|
||||
projectId,
|
||||
sendingUser,
|
||||
inviteId
|
||||
)
|
||||
|
||||
EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', {
|
||||
invites: true,
|
||||
})
|
||||
|
||||
if (invite != null) {
|
||||
ProjectAuditLogHandler.addEntryInBackground(
|
||||
projectId,
|
||||
'resend-invite',
|
||||
user._id,
|
||||
req.ip,
|
||||
{
|
||||
inviteId: invite._id,
|
||||
privileges: invite.privileges,
|
||||
}
|
||||
)
|
||||
|
||||
res.sendStatus(201)
|
||||
} else {
|
||||
res.sendStatus(404)
|
||||
}
|
||||
}
|
||||
|
||||
async function viewInvite(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const { token } = req.params
|
||||
const _renderInvalidPage = function () {
|
||||
res.status(404)
|
||||
logger.debug({ projectId }, 'invite not valid, rendering not-valid page')
|
||||
res.render('project/invite/not-valid', { title: 'Invalid Invite' })
|
||||
}
|
||||
|
||||
// check if the user is already a member of the project
|
||||
const currentUser = SessionManager.getSessionUser(req.session)
|
||||
if (currentUser) {
|
||||
const isMember =
|
||||
await CollaboratorsGetter.promises.isUserInvitedMemberOfProject(
|
||||
currentUser._id,
|
||||
projectId
|
||||
)
|
||||
if (isMember) {
|
||||
logger.debug(
|
||||
{ projectId, userId: currentUser._id },
|
||||
'user is already a member of this project, redirecting'
|
||||
)
|
||||
return res.redirect(`/project/${projectId}`)
|
||||
}
|
||||
}
|
||||
|
||||
// get the invite
|
||||
const invite = await CollaboratorsInviteGetter.promises.getInviteByToken(
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
|
||||
// check if invite is gone, or otherwise non-existent
|
||||
if (invite == null) {
|
||||
logger.debug({ projectId }, 'no invite found for this token')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
|
||||
// check the user who sent the invite exists
|
||||
const owner = await UserGetter.promises.getUser(
|
||||
{ _id: invite.sendingUserId },
|
||||
{ email: 1, first_name: 1, last_name: 1 }
|
||||
)
|
||||
if (owner == null) {
|
||||
logger.debug({ projectId }, 'no project owner found')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
|
||||
// fetch the project name
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
name: 1,
|
||||
})
|
||||
if (project == null) {
|
||||
logger.debug({ projectId }, 'no project found')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
|
||||
if (!currentUser) {
|
||||
req.session.sharedProjectData = {
|
||||
project_name: project.name,
|
||||
user_first_name: owner.first_name,
|
||||
}
|
||||
AuthenticationController.setRedirectInSession(req)
|
||||
return res.redirect('/register')
|
||||
}
|
||||
|
||||
// cleanup if set for register page
|
||||
delete req.session.sharedProjectData
|
||||
|
||||
// finally render the invite
|
||||
res.render('project/invite/show', {
|
||||
invite,
|
||||
token,
|
||||
project,
|
||||
owner,
|
||||
title: 'Project Invite',
|
||||
})
|
||||
}
|
||||
|
||||
async function acceptInvite(req, res) {
|
||||
const { Project_id: projectId, token } = req.params
|
||||
const currentUser = SessionManager.getSessionUser(req.session)
|
||||
logger.debug(
|
||||
{ projectId, userId: currentUser._id },
|
||||
'got request to accept invite'
|
||||
)
|
||||
|
||||
const invite = await CollaboratorsInviteGetter.promises.getInviteByToken(
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
|
||||
if (invite == null) {
|
||||
throw new Errors.NotFoundError('no matching invite found')
|
||||
}
|
||||
|
||||
await ProjectAuditLogHandler.promises.addEntry(
|
||||
projectId,
|
||||
'accept-invite',
|
||||
currentUser._id,
|
||||
req.ip,
|
||||
{
|
||||
inviteId: invite._id,
|
||||
privileges: invite.privileges,
|
||||
}
|
||||
)
|
||||
|
||||
await CollaboratorsInviteHandler.promises.acceptInvite(
|
||||
invite,
|
||||
projectId,
|
||||
currentUser
|
||||
)
|
||||
|
||||
await EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'project:membership:changed',
|
||||
{ invites: true, members: true }
|
||||
)
|
||||
|
||||
let editMode = 'edit'
|
||||
if (invite.privileges === PrivilegeLevels.REVIEW) {
|
||||
editMode = 'review'
|
||||
} else if (invite.privileges === PrivilegeLevels.READ_ONLY) {
|
||||
editMode = 'view'
|
||||
}
|
||||
AnalyticsManager.recordEventForUserInBackground(
|
||||
currentUser._id,
|
||||
'project-joined',
|
||||
{
|
||||
projectId,
|
||||
ownerId: invite.sendingUserId, // only owner can invite others
|
||||
mode: editMode,
|
||||
role: invite.privileges,
|
||||
source: 'email-invite',
|
||||
}
|
||||
)
|
||||
|
||||
if (req.xhr) {
|
||||
res.sendStatus(204) // Done async via project page notification
|
||||
} else {
|
||||
res.redirect(`/project/${projectId}`)
|
||||
}
|
||||
}
|
||||
|
||||
const CollaboratorsInviteController = {
|
||||
getAllInvites: expressify(getAllInvites),
|
||||
inviteToProject: expressify(inviteToProject),
|
||||
revokeInvite: expressify(revokeInvite),
|
||||
generateNewInvite: expressify(generateNewInvite),
|
||||
viewInvite: expressify(viewInvite),
|
||||
acceptInvite: expressify(acceptInvite),
|
||||
_checkShouldInviteEmail,
|
||||
_checkRateLimit,
|
||||
}
|
||||
|
||||
export default CollaboratorsInviteController
|
||||
@@ -0,0 +1,48 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const { ProjectInvite } = require('../../models/ProjectInvite')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
const CollaboratorsInviteHelper = require('./CollaboratorsInviteHelper')
|
||||
|
||||
async function getAllInvites(projectId) {
|
||||
logger.debug({ projectId }, 'fetching invites for project')
|
||||
const invites = await ProjectInvite.find({ projectId })
|
||||
.select('_id email privileges')
|
||||
.exec()
|
||||
logger.debug(
|
||||
{ projectId, count: invites.length },
|
||||
'found invites for project'
|
||||
)
|
||||
return invites
|
||||
}
|
||||
|
||||
async function getEditInviteCount(projectId) {
|
||||
logger.debug({ projectId }, 'counting edit invites for project')
|
||||
const count = await ProjectInvite.countDocuments({
|
||||
projectId,
|
||||
privileges: { $ne: PrivilegeLevels.READ_ONLY },
|
||||
}).exec()
|
||||
return count
|
||||
}
|
||||
|
||||
async function getInviteByToken(projectId, tokenString) {
|
||||
logger.debug({ projectId }, 'fetching invite by token')
|
||||
const invite = await ProjectInvite.findOne({
|
||||
projectId,
|
||||
tokenHmac: CollaboratorsInviteHelper.hashInviteToken(tokenString),
|
||||
}).exec()
|
||||
|
||||
if (invite == null) {
|
||||
logger.err({ projectId }, 'no invite found')
|
||||
return null
|
||||
}
|
||||
|
||||
return invite
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
promises: {
|
||||
getAllInvites,
|
||||
getEditInviteCount,
|
||||
getInviteByToken,
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,234 @@
|
||||
import { callbackify } from 'node:util'
|
||||
import { ProjectInvite } from '../../models/ProjectInvite.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import CollaboratorsEmailHandler from './CollaboratorsEmailHandler.mjs'
|
||||
import CollaboratorsHandler from './CollaboratorsHandler.js'
|
||||
import CollaboratorsInviteGetter from './CollaboratorsInviteGetter.js'
|
||||
import CollaboratorsInviteHelper from './CollaboratorsInviteHelper.js'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import NotificationsBuilder from '../Notifications/NotificationsBuilder.js'
|
||||
import PrivilegeLevels from '../Authorization/PrivilegeLevels.js'
|
||||
import LimitationsManager from '../Subscription/LimitationsManager.js'
|
||||
import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js'
|
||||
import _ from 'lodash'
|
||||
|
||||
const CollaboratorsInviteHandler = {
|
||||
async _trySendInviteNotification(projectId, sendingUser, invite) {
|
||||
const { email } = invite
|
||||
const existingUser = await UserGetter.promises.getUserByAnyEmail(email, {
|
||||
_id: 1,
|
||||
})
|
||||
if (existingUser == null) {
|
||||
logger.debug({ projectId, email }, 'no existing user found, returning')
|
||||
return null
|
||||
}
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
_id: 1,
|
||||
name: 1,
|
||||
})
|
||||
if (project == null) {
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'no project found while sending notification, returning'
|
||||
)
|
||||
return null
|
||||
}
|
||||
await NotificationsBuilder.promises
|
||||
.projectInvite(invite, project, sendingUser, existingUser)
|
||||
.create()
|
||||
},
|
||||
|
||||
async _tryCancelInviteNotification(inviteId) {
|
||||
return await NotificationsBuilder.promises
|
||||
.projectInvite({ _id: inviteId }, null, null, null)
|
||||
.read()
|
||||
},
|
||||
|
||||
async _sendMessages(projectId, sendingUser, invite) {
|
||||
const { email } = invite
|
||||
logger.debug(
|
||||
{ projectId, email, inviteId: invite._id },
|
||||
'sending notification and email for invite'
|
||||
)
|
||||
const notificationJob =
|
||||
CollaboratorsInviteHandler._trySendInviteNotification(
|
||||
projectId,
|
||||
sendingUser,
|
||||
invite
|
||||
).catch(err => {
|
||||
logger.err(
|
||||
{ err, projectId, email },
|
||||
'error sending notification for invite'
|
||||
)
|
||||
})
|
||||
CollaboratorsEmailHandler.promises
|
||||
.notifyUserOfProjectInvite(projectId, invite.email, invite, sendingUser)
|
||||
.catch(err => {
|
||||
logger.err({ err, projectId, email }, 'error sending email for invite')
|
||||
})
|
||||
await notificationJob
|
||||
},
|
||||
|
||||
async inviteToProject(projectId, sendingUser, email, privileges) {
|
||||
logger.debug(
|
||||
{ projectId, sendingUserId: sendingUser._id, email, privileges },
|
||||
'adding invite'
|
||||
)
|
||||
const token = CollaboratorsInviteHelper.generateToken()
|
||||
const tokenHmac = CollaboratorsInviteHelper.hashInviteToken(token)
|
||||
let invite = new ProjectInvite({
|
||||
email,
|
||||
tokenHmac,
|
||||
sendingUserId: sendingUser._id,
|
||||
projectId,
|
||||
privileges,
|
||||
})
|
||||
invite = await invite.save()
|
||||
invite = invite.toObject()
|
||||
|
||||
// Send notification and email
|
||||
await CollaboratorsInviteHandler._sendMessages(projectId, sendingUser, {
|
||||
...invite,
|
||||
token,
|
||||
})
|
||||
|
||||
return _.pick(invite, ['_id', 'email', 'privileges'])
|
||||
},
|
||||
|
||||
async revokeInviteForUser(projectId, targetEmails) {
|
||||
logger.debug({ projectId }, 'getting all active invites for project')
|
||||
const invites =
|
||||
await CollaboratorsInviteGetter.promises.getAllInvites(projectId)
|
||||
const matchingInvite = invites.find(invite =>
|
||||
targetEmails.some(emailData => emailData.email === invite.email)
|
||||
)
|
||||
if (matchingInvite) {
|
||||
await CollaboratorsInviteHandler.revokeInvite(
|
||||
projectId,
|
||||
matchingInvite._id
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
async revokeInvite(projectId, inviteId) {
|
||||
logger.debug({ projectId, inviteId }, 'removing invite')
|
||||
const invite = await ProjectInvite.findOneAndDelete({
|
||||
projectId,
|
||||
_id: inviteId,
|
||||
}).exec()
|
||||
CollaboratorsInviteHandler._tryCancelInviteNotification(inviteId).catch(
|
||||
err => {
|
||||
logger.err(
|
||||
{ err, projectId, inviteId },
|
||||
'failed to cancel invite notification'
|
||||
)
|
||||
}
|
||||
)
|
||||
return invite
|
||||
},
|
||||
|
||||
async generateNewInvite(projectId, sendingUser, inviteId) {
|
||||
logger.debug({ projectId, inviteId }, 'generating new invite email')
|
||||
const invite = await this.revokeInvite(projectId, inviteId)
|
||||
|
||||
if (invite == null) {
|
||||
logger.warn(
|
||||
{ projectId, inviteId },
|
||||
'no invite found, nothing to generate'
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
return await this.inviteToProject(
|
||||
projectId,
|
||||
sendingUser,
|
||||
invite.email,
|
||||
invite.privileges
|
||||
)
|
||||
},
|
||||
|
||||
async acceptInvite(invite, projectId, user) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
})
|
||||
|
||||
let privilegeLevel = invite.privileges
|
||||
const opts = {}
|
||||
if (
|
||||
[PrivilegeLevels.READ_AND_WRITE, PrivilegeLevels.REVIEW].includes(
|
||||
invite.privileges
|
||||
)
|
||||
) {
|
||||
const allowed =
|
||||
await LimitationsManager.promises.canAcceptEditCollaboratorInvite(
|
||||
project._id
|
||||
)
|
||||
if (!allowed) {
|
||||
privilegeLevel = PrivilegeLevels.READ_ONLY
|
||||
if (invite.privileges === PrivilegeLevels.READ_AND_WRITE) {
|
||||
opts.pendingEditor = true
|
||||
} else if (invite.privileges === PrivilegeLevels.REVIEW) {
|
||||
opts.pendingReviewer = true
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
{ projectId, userId: user._id, privileges: invite.privileges },
|
||||
'no collaborator slots available, user added as read only (pending editor)'
|
||||
)
|
||||
await ProjectAuditLogHandler.promises.addEntry(
|
||||
projectId,
|
||||
'editor-moved-to-pending', // controller already logged accept-invite
|
||||
null,
|
||||
null,
|
||||
{
|
||||
userId: user._id.toString(),
|
||||
role:
|
||||
invite.privileges === PrivilegeLevels.REVIEW
|
||||
? 'reviewer'
|
||||
: 'editor',
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
await CollaboratorsHandler.promises.addUserIdToProject(
|
||||
projectId,
|
||||
invite.sendingUserId,
|
||||
user._id,
|
||||
privilegeLevel,
|
||||
opts
|
||||
)
|
||||
|
||||
// Remove invite
|
||||
const inviteId = invite._id
|
||||
logger.debug({ projectId, inviteId }, 'removing invite')
|
||||
await ProjectInvite.deleteOne({ _id: inviteId }).exec()
|
||||
CollaboratorsInviteHandler._tryCancelInviteNotification(inviteId).catch(
|
||||
err => {
|
||||
logger.error(
|
||||
{ err, projectId, inviteId },
|
||||
'failed to cancel invite notification'
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
export default {
|
||||
promises: CollaboratorsInviteHandler,
|
||||
inviteToProject: callbackify(CollaboratorsInviteHandler.inviteToProject),
|
||||
revokeInviteForUser: callbackify(
|
||||
CollaboratorsInviteHandler.revokeInviteForUser
|
||||
),
|
||||
revokeInvite: callbackify(CollaboratorsInviteHandler.revokeInvite),
|
||||
generateNewInvite: callbackify(CollaboratorsInviteHandler.generateNewInvite),
|
||||
acceptInvite: callbackify(CollaboratorsInviteHandler.acceptInvite),
|
||||
_trySendInviteNotification: callbackify(
|
||||
CollaboratorsInviteHandler._trySendInviteNotification
|
||||
),
|
||||
_tryCancelInviteNotification: callbackify(
|
||||
CollaboratorsInviteHandler._tryCancelInviteNotification
|
||||
),
|
||||
_sendMessages: callbackify(CollaboratorsInviteHandler._sendMessages),
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
const Crypto = require('crypto')
|
||||
|
||||
function generateToken() {
|
||||
const buffer = Crypto.randomBytes(24)
|
||||
return buffer.toString('hex')
|
||||
}
|
||||
|
||||
function hashInviteToken(token) {
|
||||
return Crypto.createHmac('sha256', 'overleaf-token-invite')
|
||||
.update(token)
|
||||
.digest('hex')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
generateToken,
|
||||
hashInviteToken,
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
import CollaboratorsController from './CollaboratorsController.mjs'
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js'
|
||||
import PrivilegeLevels from '../Authorization/PrivilegeLevels.js'
|
||||
import CollaboratorsInviteController from './CollaboratorsInviteController.mjs'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js'
|
||||
import CaptchaMiddleware from '../Captcha/CaptchaMiddleware.js'
|
||||
import AnalyticsRegistrationSourceMiddleware from '../Analytics/AnalyticsRegistrationSourceMiddleware.js'
|
||||
import { Joi, validate } from '../../infrastructure/Validation.js'
|
||||
|
||||
const rateLimiters = {
|
||||
inviteToProjectByProjectId: new RateLimiter(
|
||||
'invite-to-project-by-project-id',
|
||||
{ points: 100, duration: 60 * 10 }
|
||||
),
|
||||
inviteToProjectByIp: new RateLimiter('invite-to-project-by-ip', {
|
||||
points: 100,
|
||||
duration: 60 * 10,
|
||||
}),
|
||||
resendInvite: new RateLimiter('resend-invite', {
|
||||
points: 200,
|
||||
duration: 60 * 10,
|
||||
}),
|
||||
getProjectTokens: new RateLimiter('get-project-tokens', {
|
||||
points: 200,
|
||||
duration: 60 * 10,
|
||||
}),
|
||||
viewProjectInvite: new RateLimiter('view-project-invite', {
|
||||
points: 20,
|
||||
duration: 60,
|
||||
}),
|
||||
}
|
||||
|
||||
export default {
|
||||
apply(webRouter) {
|
||||
webRouter.post(
|
||||
'/project/:Project_id/leave',
|
||||
AuthenticationController.requireLogin(),
|
||||
CollaboratorsController.removeSelfFromProject
|
||||
)
|
||||
|
||||
webRouter.put(
|
||||
'/project/:Project_id/users/:user_id',
|
||||
AuthenticationController.requireLogin(),
|
||||
validate({
|
||||
params: Joi.object({
|
||||
Project_id: Joi.objectId(),
|
||||
user_id: Joi.objectId(),
|
||||
}),
|
||||
body: Joi.object({
|
||||
privilegeLevel: Joi.string()
|
||||
.valid(
|
||||
PrivilegeLevels.READ_ONLY,
|
||||
PrivilegeLevels.READ_AND_WRITE,
|
||||
PrivilegeLevels.REVIEW
|
||||
)
|
||||
.required(),
|
||||
}),
|
||||
}),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsController.setCollaboratorInfo
|
||||
)
|
||||
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/users/:user_id',
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsController.removeUserFromProject
|
||||
)
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/members',
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.blockRestrictedUserFromProject,
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
CollaboratorsController.getAllMembers
|
||||
)
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/transfer-ownership',
|
||||
AuthenticationController.requireLogin(),
|
||||
validate({
|
||||
params: Joi.object({
|
||||
Project_id: Joi.objectId(),
|
||||
}),
|
||||
body: Joi.object({
|
||||
user_id: Joi.objectId(),
|
||||
}),
|
||||
}),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsController.transferOwnership
|
||||
)
|
||||
|
||||
// invites
|
||||
webRouter.post(
|
||||
'/project/:Project_id/invite',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.inviteToProjectByProjectId, {
|
||||
params: ['Project_id'],
|
||||
}),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.inviteToProjectByIp, {
|
||||
ipOnly: true,
|
||||
}),
|
||||
CaptchaMiddleware.validateCaptcha('invite'),
|
||||
AuthenticationController.requireLogin(),
|
||||
validate({
|
||||
body: Joi.object({
|
||||
email: Joi.string().required(),
|
||||
privileges: Joi.string()
|
||||
.valid(
|
||||
PrivilegeLevels.READ_ONLY,
|
||||
PrivilegeLevels.READ_AND_WRITE,
|
||||
PrivilegeLevels.REVIEW
|
||||
)
|
||||
.required(),
|
||||
}),
|
||||
}),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsInviteController.inviteToProject
|
||||
)
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/invites',
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsInviteController.getAllInvites
|
||||
)
|
||||
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/invite/:invite_id',
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsInviteController.revokeInvite
|
||||
)
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/invite/:invite_id/resend',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.resendInvite, {
|
||||
params: ['Project_id'],
|
||||
}),
|
||||
AuthenticationController.requireLogin(),
|
||||
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||
CollaboratorsInviteController.generateNewInvite
|
||||
)
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/invite/token/:token',
|
||||
AnalyticsRegistrationSourceMiddleware.setSource(
|
||||
'collaboration',
|
||||
'project-invite'
|
||||
),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.viewProjectInvite),
|
||||
CollaboratorsInviteController.viewInvite,
|
||||
AnalyticsRegistrationSourceMiddleware.clearSource()
|
||||
)
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/invite/token/:token/accept',
|
||||
AnalyticsRegistrationSourceMiddleware.setSource(
|
||||
'collaboration',
|
||||
'project-invite'
|
||||
),
|
||||
AuthenticationController.requireLogin(),
|
||||
CollaboratorsInviteController.acceptInvite,
|
||||
AnalyticsRegistrationSourceMiddleware.clearSource()
|
||||
)
|
||||
|
||||
webRouter.get(
|
||||
'/project/:Project_id/tokens',
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens),
|
||||
AuthorizationMiddleware.ensureUserCanReadProject,
|
||||
CollaboratorsController.getShareTokens
|
||||
)
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const { Project } = require('../../models/Project')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const CollaboratorsHandler = require('./CollaboratorsHandler')
|
||||
const EmailHandler = require('../Email/EmailHandler')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
|
||||
const ProjectAuditLogHandler = require('../Project/ProjectAuditLogHandler')
|
||||
const AnalyticsManager = require('../Analytics/AnalyticsManager')
|
||||
|
||||
module.exports = {
|
||||
promises: { transferOwnership },
|
||||
}
|
||||
|
||||
async function transferOwnership(projectId, newOwnerId, options = {}) {
|
||||
const {
|
||||
allowTransferToNonCollaborators,
|
||||
sessionUserId,
|
||||
skipEmails,
|
||||
ipAddress,
|
||||
} = options
|
||||
|
||||
// Fetch project and user
|
||||
const [project, newOwner] = await Promise.all([
|
||||
_getProject(projectId),
|
||||
_getUser(newOwnerId),
|
||||
])
|
||||
|
||||
// Exit early if the transferee is already the project owner
|
||||
const previousOwnerId = project.owner_ref
|
||||
if (previousOwnerId.equals(newOwnerId)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check that user is already a collaborator
|
||||
if (
|
||||
!allowTransferToNonCollaborators &&
|
||||
!_userIsCollaborator(newOwner, project)
|
||||
) {
|
||||
throw new Errors.UserNotCollaboratorError({ info: { userId: newOwnerId } })
|
||||
}
|
||||
|
||||
// Track the change of ownership in BigQuery.
|
||||
AnalyticsManager.recordEventForUserInBackground(
|
||||
previousOwnerId,
|
||||
'project-ownership-transfer',
|
||||
{ projectId, newOwnerId }
|
||||
)
|
||||
|
||||
// Transfer ownership
|
||||
await ProjectAuditLogHandler.promises.addEntry(
|
||||
projectId,
|
||||
'transfer-ownership',
|
||||
sessionUserId,
|
||||
ipAddress,
|
||||
{ previousOwnerId, newOwnerId }
|
||||
)
|
||||
|
||||
// Determine which permissions to give old owner based on
|
||||
// new owner's existing permissions
|
||||
const newPermissions =
|
||||
_getUserPermissions(newOwner, project) || PrivilegeLevels.READ_ONLY
|
||||
|
||||
await _transferOwnership(
|
||||
projectId,
|
||||
previousOwnerId,
|
||||
newOwnerId,
|
||||
newPermissions
|
||||
)
|
||||
|
||||
// Flush project to TPDS
|
||||
await TpdsProjectFlusher.promises.flushProjectToTpds(projectId)
|
||||
|
||||
// Send confirmation emails
|
||||
const previousOwner = await UserGetter.promises.getUser(previousOwnerId)
|
||||
if (!skipEmails) {
|
||||
await _sendEmails(project, previousOwner, newOwner)
|
||||
}
|
||||
}
|
||||
|
||||
async function _getProject(projectId) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
collaberator_refs: 1,
|
||||
readOnly_refs: 1,
|
||||
name: 1,
|
||||
})
|
||||
if (project == null) {
|
||||
throw new Errors.ProjectNotFoundError({ info: { projectId } })
|
||||
}
|
||||
return project
|
||||
}
|
||||
|
||||
async function _getUser(userId) {
|
||||
const user = await UserGetter.promises.getUser(userId)
|
||||
if (user == null) {
|
||||
throw new Errors.UserNotFoundError({ info: { userId } })
|
||||
}
|
||||
return user
|
||||
}
|
||||
|
||||
function _getUserPermissions(user, project) {
|
||||
const collaboratorIds = project.collaberator_refs || []
|
||||
const readOnlyIds = project.readOnly_refs || []
|
||||
if (collaboratorIds.some(collaboratorId => collaboratorId.equals(user._id))) {
|
||||
return PrivilegeLevels.READ_AND_WRITE
|
||||
} else if (
|
||||
readOnlyIds.some(collaboratorId => collaboratorId.equals(user._id))
|
||||
) {
|
||||
return PrivilegeLevels.READ_ONLY
|
||||
}
|
||||
}
|
||||
|
||||
function _userIsCollaborator(user, project) {
|
||||
return Boolean(_getUserPermissions(user, project))
|
||||
}
|
||||
|
||||
async function _transferOwnership(
|
||||
projectId,
|
||||
previousOwnerId,
|
||||
newOwnerId,
|
||||
newPermissions
|
||||
) {
|
||||
await CollaboratorsHandler.promises.removeUserFromProject(
|
||||
projectId,
|
||||
newOwnerId
|
||||
)
|
||||
await Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{ $set: { owner_ref: newOwnerId } }
|
||||
).exec()
|
||||
await CollaboratorsHandler.promises.addUserIdToProject(
|
||||
projectId,
|
||||
newOwnerId,
|
||||
previousOwnerId,
|
||||
newPermissions
|
||||
)
|
||||
}
|
||||
|
||||
async function _sendEmails(project, previousOwner, newOwner) {
|
||||
if (previousOwner == null) {
|
||||
// The previous owner didn't exist. This is not supposed to happen, but
|
||||
// since we're changing the owner anyway, we'll just warn
|
||||
logger.warn(
|
||||
{ projectId: project._id, ownerId: previousOwner._id },
|
||||
'Project owner did not exist before ownership transfer'
|
||||
)
|
||||
} else {
|
||||
// Send confirmation emails
|
||||
await Promise.all([
|
||||
EmailHandler.promises.sendEmail(
|
||||
'ownershipTransferConfirmationPreviousOwner',
|
||||
{
|
||||
to: previousOwner.email,
|
||||
project,
|
||||
newOwner,
|
||||
}
|
||||
),
|
||||
EmailHandler.promises.sendEmail('ownershipTransferConfirmationNewOwner', {
|
||||
to: newOwner.email,
|
||||
project,
|
||||
previousOwner,
|
||||
}),
|
||||
])
|
||||
}
|
||||
}
|
||||
193
services/web/app/src/Features/Compile/ClsiCacheController.js
Normal file
193
services/web/app/src/Features/Compile/ClsiCacheController.js
Normal file
@@ -0,0 +1,193 @@
|
||||
const { NotFoundError } = require('../Errors/Errors')
|
||||
const {
|
||||
fetchStreamWithResponse,
|
||||
RequestFailedError,
|
||||
fetchJson,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const Path = require('path')
|
||||
const { pipeline } = require('stream/promises')
|
||||
const logger = require('@overleaf/logger')
|
||||
const ClsiCacheManager = require('./ClsiCacheManager')
|
||||
const CompileController = require('./CompileController')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const ClsiCacheHandler = require('./ClsiCacheHandler')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
|
||||
/**
|
||||
* Download a file from a specific build on the clsi-cache.
|
||||
*
|
||||
* @param req
|
||||
* @param res
|
||||
* @return {Promise<*>}
|
||||
*/
|
||||
async function downloadFromCache(req, res) {
|
||||
const { Project_id: projectId, buildId, filename } = req.params
|
||||
const userId = CompileController._getUserIdForCompile(req)
|
||||
const signal = AbortSignal.timeout(60 * 1000)
|
||||
let location, projectName
|
||||
try {
|
||||
;[{ location }, { name: projectName }] = await Promise.all([
|
||||
ClsiCacheHandler.getOutputFile(
|
||||
projectId,
|
||||
userId,
|
||||
buildId,
|
||||
filename,
|
||||
signal
|
||||
),
|
||||
ProjectGetter.promises.getProject(projectId, { name: 1 }),
|
||||
])
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
// res.sendStatus() sends a description of the status as body.
|
||||
// Using res.status().end() avoids sending that fake body.
|
||||
return res.status(404).end()
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
const { stream, response } = await fetchStreamWithResponse(location, {
|
||||
signal,
|
||||
})
|
||||
if (req.destroyed) {
|
||||
// The client has disconnected already, avoid trying to write into the broken connection.
|
||||
return
|
||||
}
|
||||
|
||||
for (const key of ['Content-Length', 'Content-Type']) {
|
||||
if (response.headers.has(key)) res.setHeader(key, response.headers.get(key))
|
||||
}
|
||||
const ext = Path.extname(filename)
|
||||
res.attachment(
|
||||
ext === '.pdf'
|
||||
? `${CompileController._getSafeProjectName({ name: projectName })}.pdf`
|
||||
: filename
|
||||
)
|
||||
try {
|
||||
res.writeHead(response.status)
|
||||
await pipeline(stream, res)
|
||||
} catch (err) {
|
||||
const reqAborted = Boolean(req.destroyed)
|
||||
const streamingStarted = Boolean(res.headersSent)
|
||||
if (!streamingStarted) {
|
||||
if (err instanceof RequestFailedError) {
|
||||
res.sendStatus(err.response.status)
|
||||
} else {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
if (
|
||||
streamingStarted &&
|
||||
reqAborted &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE'
|
||||
) {
|
||||
// Ignore noisy spurious error
|
||||
return
|
||||
}
|
||||
logger.warn(
|
||||
{
|
||||
err,
|
||||
projectId,
|
||||
location,
|
||||
filename,
|
||||
reqAborted,
|
||||
streamingStarted,
|
||||
},
|
||||
'CLSI-cache proxy error'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare a compile response from the clsi-cache.
|
||||
*
|
||||
* @param req
|
||||
* @param res
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function getLatestBuildFromCache(req, res) {
|
||||
const { Project_id: projectId } = req.params
|
||||
const userId = CompileController._getUserIdForCompile(req)
|
||||
try {
|
||||
const {
|
||||
internal: { location: metaLocation, zone },
|
||||
external: { isUpToDate, allFiles },
|
||||
} = await ClsiCacheManager.getLatestBuildFromCache(
|
||||
projectId,
|
||||
userId,
|
||||
'output.overleaf.json'
|
||||
)
|
||||
|
||||
if (!isUpToDate) return res.sendStatus(410)
|
||||
|
||||
const meta = await fetchJson(metaLocation, {
|
||||
signal: AbortSignal.timeout(5 * 1000),
|
||||
})
|
||||
|
||||
const [, editorId, buildId] = metaLocation.match(
|
||||
/\/build\/([a-f0-9-]+?)-([a-f0-9]+-[a-f0-9]+)\//
|
||||
)
|
||||
|
||||
let baseURL = `/project/${projectId}`
|
||||
if (userId) {
|
||||
baseURL += `/user/${userId}`
|
||||
}
|
||||
|
||||
const { ranges, contentId, clsiServerId, compileGroup, size, options } =
|
||||
meta
|
||||
|
||||
const outputFiles = allFiles
|
||||
.filter(
|
||||
path => path !== 'output.overleaf.json' && path !== 'output.tar.gz'
|
||||
)
|
||||
.map(path => {
|
||||
const f = {
|
||||
url: `${baseURL}/build/${editorId}-${buildId}/output/${path}`,
|
||||
downloadURL: `/download/project/${projectId}/build/${editorId}-${buildId}/output/cached/${path}`,
|
||||
build: buildId,
|
||||
path,
|
||||
type: path.split('.').pop(),
|
||||
}
|
||||
if (path === 'output.pdf') {
|
||||
Object.assign(f, {
|
||||
size,
|
||||
editorId,
|
||||
})
|
||||
if (clsiServerId !== 'cache') {
|
||||
// Enable PDF caching and attempt to download from VM first.
|
||||
// (clsi VMs do not have the editorId in the path on disk, omit it).
|
||||
Object.assign(f, {
|
||||
url: `${baseURL}/build/${buildId}/output/output.pdf`,
|
||||
ranges,
|
||||
contentId,
|
||||
})
|
||||
}
|
||||
}
|
||||
return f
|
||||
})
|
||||
let { pdfCachingMinChunkSize, pdfDownloadDomain } =
|
||||
await CompileController._getSplitTestOptions(req, res)
|
||||
pdfDownloadDomain += `/zone/${zone}`
|
||||
res.json({
|
||||
fromCache: true,
|
||||
status: 'success',
|
||||
outputFiles,
|
||||
compileGroup,
|
||||
clsiServerId,
|
||||
pdfDownloadDomain,
|
||||
pdfCachingMinChunkSize,
|
||||
options,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
res.sendStatus(404)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
downloadFromCache: expressify(downloadFromCache),
|
||||
getLatestBuildFromCache: expressify(getLatestBuildFromCache),
|
||||
}
|
||||
217
services/web/app/src/Features/Compile/ClsiCacheHandler.js
Normal file
217
services/web/app/src/Features/Compile/ClsiCacheHandler.js
Normal file
@@ -0,0 +1,217 @@
|
||||
const _ = require('lodash')
|
||||
const {
|
||||
fetchNothing,
|
||||
fetchRedirectWithResponse,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { NotFoundError, InvalidNameError } = require('../Errors/Errors')
|
||||
|
||||
function validateFilename(filename) {
|
||||
if (
|
||||
![
|
||||
'output.blg',
|
||||
'output.log',
|
||||
'output.pdf',
|
||||
'output.synctex.gz',
|
||||
'output.overleaf.json',
|
||||
'output.tar.gz',
|
||||
].includes(filename) ||
|
||||
filename.endsWith('.blg')
|
||||
) {
|
||||
throw new InvalidNameError('bad filename')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the cache on all clsi-cache instances.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function clearCache(projectId, userId) {
|
||||
let path = `/project/${projectId}`
|
||||
if (userId) {
|
||||
path += `/user/${userId}`
|
||||
}
|
||||
path += '/output'
|
||||
|
||||
await Promise.all(
|
||||
Settings.apis.clsiCache.instances.map(async ({ url, zone }) => {
|
||||
const u = new URL(url)
|
||||
u.pathname = path
|
||||
try {
|
||||
await fetchNothing(u, {
|
||||
method: 'DELETE',
|
||||
signal: AbortSignal.timeout(15_000),
|
||||
})
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'clear clsi-cache', { url, zone })
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an output file from a specific build.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param buildId
|
||||
* @param filename
|
||||
* @param signal
|
||||
* @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
|
||||
*/
|
||||
async function getOutputFile(
|
||||
projectId,
|
||||
userId,
|
||||
buildId,
|
||||
filename,
|
||||
signal = AbortSignal.timeout(15_000)
|
||||
) {
|
||||
validateFilename(filename)
|
||||
if (!/^[a-f0-9-]+$/.test(buildId)) {
|
||||
throw new InvalidNameError('bad buildId')
|
||||
}
|
||||
|
||||
let path = `/project/${projectId}`
|
||||
if (userId) {
|
||||
path += `/user/${userId}`
|
||||
}
|
||||
path += `/build/${buildId}/search/output/${filename}`
|
||||
return getRedirectWithFallback(projectId, userId, path, signal)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an output file from the most recent build.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param filename
|
||||
* @param signal
|
||||
* @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
|
||||
*/
|
||||
async function getLatestOutputFile(
|
||||
projectId,
|
||||
userId,
|
||||
filename,
|
||||
signal = AbortSignal.timeout(15_000)
|
||||
) {
|
||||
validateFilename(filename)
|
||||
|
||||
let path = `/project/${projectId}`
|
||||
if (userId) {
|
||||
path += `/user/${userId}`
|
||||
}
|
||||
path += `/latest/output/${filename}`
|
||||
return getRedirectWithFallback(projectId, userId, path, signal)
|
||||
}
|
||||
|
||||
/**
|
||||
* Request the given path from any of the clsi-cache instances.
|
||||
*
|
||||
* Some of them might be down temporarily. Try the next one until we receive a redirect or 404.
|
||||
*
|
||||
* This function is similar to the Coordinator in the clsi-cache, notable differences:
|
||||
* - all the logic for sorting builds is in clsi-cache (re-used by clsi and web)
|
||||
* - fan-out (1 client performs lookup on many clsi-cache instances) is "central" in clsi-cache, resulting in better connection re-use
|
||||
* - we only cross the k8s cluster boundary via an internal GCLB once ($$$)
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param path
|
||||
* @param signal
|
||||
* @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>}
|
||||
*/
|
||||
async function getRedirectWithFallback(
|
||||
projectId,
|
||||
userId,
|
||||
path,
|
||||
signal = AbortSignal.timeout(15_000)
|
||||
) {
|
||||
// Avoid hitting the same instance first all the time.
|
||||
const instances = _.shuffle(Settings.apis.clsiCache.instances)
|
||||
for (const { url, zone } of instances) {
|
||||
const u = new URL(url)
|
||||
u.pathname = path
|
||||
try {
|
||||
const {
|
||||
location,
|
||||
response: { headers },
|
||||
} = await fetchRedirectWithResponse(u, {
|
||||
signal,
|
||||
})
|
||||
// Success, return the cache entry.
|
||||
return {
|
||||
location,
|
||||
zone: headers.get('X-Zone'),
|
||||
lastModified: new Date(headers.get('X-Last-Modified')),
|
||||
size: parseInt(headers.get('X-Content-Length'), 10),
|
||||
allFiles: JSON.parse(headers.get('X-All-Files')),
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
break // No clsi-cache instance has cached something for this project/user.
|
||||
}
|
||||
logger.warn(
|
||||
{ err, projectId, userId, url, zone },
|
||||
'getLatestOutputFile from clsi-cache failed'
|
||||
)
|
||||
// This clsi-cache instance is down, try the next backend.
|
||||
}
|
||||
}
|
||||
throw new NotFoundError('nothing cached yet')
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate the clsi-cache for the given project/user with the provided source
|
||||
*
|
||||
* This is either another project, or a template (id+version).
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param sourceProjectId
|
||||
* @param templateId
|
||||
* @param templateVersionId
|
||||
* @param lastUpdated
|
||||
* @param zone
|
||||
* @param signal
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function prepareCacheSource(
|
||||
projectId,
|
||||
userId,
|
||||
{ sourceProjectId, templateId, templateVersionId, lastUpdated, zone, signal }
|
||||
) {
|
||||
const url = new URL(
|
||||
`/project/${projectId}/user/${userId}/import-from`,
|
||||
Settings.apis.clsiCache.instances.find(i => i.zone === zone).url
|
||||
)
|
||||
try {
|
||||
await fetchNothing(url, {
|
||||
method: 'POST',
|
||||
json: {
|
||||
sourceProjectId,
|
||||
lastUpdated,
|
||||
templateId,
|
||||
templateVersionId,
|
||||
},
|
||||
signal,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
throw new NotFoundError()
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
clearCache,
|
||||
getOutputFile,
|
||||
getLatestOutputFile,
|
||||
prepareCacheSource,
|
||||
}
|
||||
106
services/web/app/src/Features/Compile/ClsiCacheManager.js
Normal file
106
services/web/app/src/Features/Compile/ClsiCacheManager.js
Normal file
@@ -0,0 +1,106 @@
|
||||
const { NotFoundError } = require('../Errors/Errors')
|
||||
const ClsiCacheHandler = require('./ClsiCacheHandler')
|
||||
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const SplitTestHandler = require('../SplitTests/SplitTestHandler')
|
||||
|
||||
/**
|
||||
* Get the most recent build and metadata
|
||||
*
|
||||
* Internal: internal metadata; External: fine to send to user as-is.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param filename
|
||||
* @param signal
|
||||
* @return {Promise<{internal: {zone: string, location: string}, external: {isUpToDate: boolean, lastUpdated: Date, size: number, allFiles: string[]}}>}
|
||||
*/
|
||||
async function getLatestBuildFromCache(projectId, userId, filename, signal) {
|
||||
const [
|
||||
{ location, lastModified: lastCompiled, zone, size, allFiles },
|
||||
lastUpdatedInRedis,
|
||||
{ lastUpdated: lastUpdatedInMongo },
|
||||
] = await Promise.all([
|
||||
ClsiCacheHandler.getLatestOutputFile(projectId, userId, filename, signal),
|
||||
DocumentUpdaterHandler.promises.getProjectLastUpdatedAt(projectId),
|
||||
ProjectGetter.promises.getProject(projectId, { lastUpdated: 1 }),
|
||||
])
|
||||
|
||||
const lastUpdated =
|
||||
lastUpdatedInRedis > lastUpdatedInMongo
|
||||
? lastUpdatedInRedis
|
||||
: lastUpdatedInMongo
|
||||
const isUpToDate = lastCompiled >= lastUpdated
|
||||
|
||||
return {
|
||||
internal: {
|
||||
location,
|
||||
zone,
|
||||
},
|
||||
external: {
|
||||
isUpToDate,
|
||||
lastUpdated,
|
||||
size,
|
||||
allFiles,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect metadata and prepare the clsi-cache for the given project.
|
||||
*
|
||||
* @param projectId
|
||||
* @param userId
|
||||
* @param sourceProjectId
|
||||
* @param templateId
|
||||
* @param templateVersionId
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function prepareClsiCache(
|
||||
projectId,
|
||||
userId,
|
||||
{ sourceProjectId, templateId, templateVersionId }
|
||||
) {
|
||||
const { variant } = await SplitTestHandler.promises.getAssignmentForUser(
|
||||
userId,
|
||||
'copy-clsi-cache'
|
||||
)
|
||||
if (variant !== 'enabled') return
|
||||
const signal = AbortSignal.timeout(5_000)
|
||||
let lastUpdated
|
||||
let zone = 'b' // populate template data on zone b
|
||||
if (sourceProjectId) {
|
||||
try {
|
||||
;({
|
||||
internal: { zone },
|
||||
external: { lastUpdated },
|
||||
} = await getLatestBuildFromCache(
|
||||
sourceProjectId,
|
||||
userId,
|
||||
'output.tar.gz',
|
||||
signal
|
||||
))
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) return // nothing cached yet
|
||||
throw err
|
||||
}
|
||||
}
|
||||
try {
|
||||
await ClsiCacheHandler.prepareCacheSource(projectId, userId, {
|
||||
sourceProjectId,
|
||||
templateId,
|
||||
templateVersionId,
|
||||
zone,
|
||||
lastUpdated,
|
||||
signal,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) return // nothing cached yet/expired.
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLatestBuildFromCache,
|
||||
prepareClsiCache,
|
||||
}
|
||||
250
services/web/app/src/Features/Compile/ClsiCookieManager.js
Normal file
250
services/web/app/src/Features/Compile/ClsiCookieManager.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const { URL, URLSearchParams } = require('url')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const request = require('request').defaults({ timeout: 30 * 1000 })
|
||||
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||
const Cookie = require('cookie')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
|
||||
const clsiCookiesEnabled = (Settings.clsiCookie?.key ?? '') !== ''
|
||||
|
||||
const rclient = RedisWrapper.client('clsi_cookie')
|
||||
let rclientSecondary
|
||||
if (Settings.redis.clsi_cookie_secondary != null) {
|
||||
rclientSecondary = RedisWrapper.client('clsi_cookie_secondary')
|
||||
}
|
||||
|
||||
module.exports = function (backendGroup) {
|
||||
const cookieManager = {
|
||||
buildKey(projectId, userId) {
|
||||
if (backendGroup != null) {
|
||||
return `clsiserver:${backendGroup}:${projectId}:${userId}`
|
||||
} else {
|
||||
return `clsiserver:${projectId}:${userId}`
|
||||
}
|
||||
},
|
||||
|
||||
getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
) {
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback()
|
||||
}
|
||||
rclient.get(this.buildKey(projectId, userId), (err, serverId) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (serverId == null || serverId === '') {
|
||||
this._populateServerIdViaRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
)
|
||||
} else {
|
||||
callback(null, serverId)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_populateServerIdViaRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
) {
|
||||
const u = new URL(`${Settings.apis.clsi.url}/project/${projectId}/status`)
|
||||
u.search = new URLSearchParams({
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
}).toString()
|
||||
request.post(u.href, (err, res, body) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'error getting initial server id for project', {
|
||||
project_id: projectId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback()
|
||||
}
|
||||
const serverId = this._parseServerIdFromResponse(res)
|
||||
this.setServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
serverId,
|
||||
null,
|
||||
function (err) {
|
||||
if (err) {
|
||||
logger.warn(
|
||||
{ err, projectId },
|
||||
'error setting server id via populate request'
|
||||
)
|
||||
}
|
||||
callback(err, serverId)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
_parseServerIdFromResponse(response) {
|
||||
const cookies = Cookie.parse(response.headers['set-cookie']?.[0] || '')
|
||||
return cookies?.[Settings.clsiCookie.key]
|
||||
},
|
||||
|
||||
checkIsLoadSheddingEvent(clsiserverid, compileGroup, compileBackendClass) {
|
||||
request.get(
|
||||
{
|
||||
url: `${Settings.apis.clsi.url}/instance-state`,
|
||||
qs: { clsiserverid, compileGroup, compileBackendClass },
|
||||
},
|
||||
(err, res, body) => {
|
||||
if (err) {
|
||||
Metrics.inc('clsi-lb-switch-backend', 1, {
|
||||
status: 'error',
|
||||
})
|
||||
logger.warn({ err, clsiserverid }, 'cannot probe clsi VM')
|
||||
return
|
||||
}
|
||||
const isStillRunning =
|
||||
res.statusCode === 200 && body === `${clsiserverid},UP\n`
|
||||
Metrics.inc('clsi-lb-switch-backend', 1, {
|
||||
status: isStillRunning ? 'load-shedding' : 'cycle',
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_getTTLInSeconds(clsiServerId) {
|
||||
return (clsiServerId || '').includes('-reg-')
|
||||
? Settings.clsiCookie.ttlInSecondsRegular
|
||||
: Settings.clsiCookie.ttlInSeconds
|
||||
},
|
||||
|
||||
setServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
serverId,
|
||||
previous,
|
||||
callback
|
||||
) {
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback()
|
||||
}
|
||||
if (serverId == null) {
|
||||
// We don't get a cookie back if it hasn't changed
|
||||
return rclient.expire(
|
||||
this.buildKey(projectId, userId),
|
||||
this._getTTLInSeconds(previous),
|
||||
err => callback(err)
|
||||
)
|
||||
}
|
||||
if (!previous) {
|
||||
// Initial assignment of a user+project or after clearing cache.
|
||||
Metrics.inc('clsi-lb-assign-initial-backend')
|
||||
} else {
|
||||
this.checkIsLoadSheddingEvent(
|
||||
previous,
|
||||
compileGroup,
|
||||
compileBackendClass
|
||||
)
|
||||
}
|
||||
if (rclientSecondary != null) {
|
||||
this._setServerIdInRedis(
|
||||
rclientSecondary,
|
||||
projectId,
|
||||
userId,
|
||||
serverId,
|
||||
() => {}
|
||||
)
|
||||
}
|
||||
this._setServerIdInRedis(rclient, projectId, userId, serverId, err =>
|
||||
callback(err)
|
||||
)
|
||||
},
|
||||
|
||||
_setServerIdInRedis(rclient, projectId, userId, serverId, callback) {
|
||||
rclient.setex(
|
||||
this.buildKey(projectId, userId),
|
||||
this._getTTLInSeconds(serverId),
|
||||
serverId,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
clearServerId(projectId, userId, callback) {
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback()
|
||||
}
|
||||
rclient.del(this.buildKey(projectId, userId), err => {
|
||||
if (err) {
|
||||
// redis errors need wrapping as the instance may be shared
|
||||
return callback(
|
||||
new OError(
|
||||
'Failed to clear clsi persistence',
|
||||
{ projectId, userId },
|
||||
err
|
||||
)
|
||||
)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
getCookieJar(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
) {
|
||||
if (!clsiCookiesEnabled) {
|
||||
return callback(null, request.jar(), undefined)
|
||||
}
|
||||
this.getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
(err, serverId) => {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error getting server id', {
|
||||
project_id: projectId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
const serverCookie = request.cookie(
|
||||
`${Settings.clsiCookie.key}=${serverId}`
|
||||
)
|
||||
const jar = request.jar()
|
||||
jar.setCookie(serverCookie, Settings.apis.clsi.url)
|
||||
callback(null, jar, serverId)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
cookieManager.promises = promisifyAll(cookieManager, {
|
||||
without: [
|
||||
'_parseServerIdFromResponse',
|
||||
'checkIsLoadSheddingEvent',
|
||||
'_getTTLInSeconds',
|
||||
],
|
||||
multiResult: {
|
||||
getCookieJar: ['jar', 'clsiServerId'],
|
||||
},
|
||||
})
|
||||
return cookieManager
|
||||
}
|
||||
89
services/web/app/src/Features/Compile/ClsiFormatChecker.js
Normal file
89
services/web/app/src/Features/Compile/ClsiFormatChecker.js
Normal file
@@ -0,0 +1,89 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ClsiFormatChecker
|
||||
const _ = require('lodash')
|
||||
const async = require('async')
|
||||
const settings = require('@overleaf/settings')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
|
||||
module.exports = ClsiFormatChecker = {
|
||||
checkRecoursesForProblems(resources, callback) {
|
||||
const jobs = {
|
||||
conflictedPaths(cb) {
|
||||
return ClsiFormatChecker._checkForConflictingPaths(resources, cb)
|
||||
},
|
||||
|
||||
sizeCheck(cb) {
|
||||
return ClsiFormatChecker._checkDocsAreUnderSizeLimit(resources, cb)
|
||||
},
|
||||
}
|
||||
|
||||
return async.series(jobs, function (err, problems) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
problems = _.omitBy(problems, _.isEmpty)
|
||||
|
||||
if (_.isEmpty(problems)) {
|
||||
return callback()
|
||||
} else {
|
||||
return callback(null, problems)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_checkForConflictingPaths(resources, callback) {
|
||||
const paths = resources.map(resource => resource.path)
|
||||
|
||||
const conflicts = _.filter(paths, function (path) {
|
||||
const matchingPaths = _.filter(
|
||||
paths,
|
||||
checkPath => checkPath.indexOf(path + '/') !== -1
|
||||
)
|
||||
|
||||
return matchingPaths.length > 0
|
||||
})
|
||||
|
||||
const conflictObjects = conflicts.map(conflict => ({ path: conflict }))
|
||||
|
||||
return callback(null, conflictObjects)
|
||||
},
|
||||
|
||||
_checkDocsAreUnderSizeLimit(resources, callback) {
|
||||
const sizeLimit = 1000 * 1000 * settings.compileBodySizeLimitMb
|
||||
|
||||
let totalSize = 0
|
||||
|
||||
let sizedResources = resources.map(function (resource) {
|
||||
const result = { path: resource.path }
|
||||
if (resource.content != null) {
|
||||
result.size = resource.content.replace(/\n/g, '').length
|
||||
result.kbSize = Math.ceil(result.size / 1000)
|
||||
} else {
|
||||
result.size = 0
|
||||
}
|
||||
totalSize += result.size
|
||||
return result
|
||||
})
|
||||
|
||||
const tooLarge = totalSize > sizeLimit
|
||||
if (!tooLarge) {
|
||||
return callback()
|
||||
} else {
|
||||
sizedResources = _.sortBy(sizedResources, 'size').reverse().slice(0, 10)
|
||||
return callback(null, { resources: sizedResources, totalSize })
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
module.exports.promises = promisifyAll(module.exports)
|
||||
873
services/web/app/src/Features/Compile/ClsiManager.js
Normal file
873
services/web/app/src/Features/Compile/ClsiManager.js
Normal file
@@ -0,0 +1,873 @@
|
||||
const { callbackify } = require('util')
|
||||
const { callbackifyMultiResult } = require('@overleaf/promise-utils')
|
||||
const {
|
||||
fetchString,
|
||||
fetchStringWithResponse,
|
||||
fetchStream,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||
const logger = require('@overleaf/logger')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { Cookie } = require('tough-cookie')
|
||||
const ClsiCookieManager = require('./ClsiCookieManager')(
|
||||
Settings.apis.clsi?.backendGroupName
|
||||
)
|
||||
const Features = require('../../infrastructure/Features')
|
||||
const NewBackendCloudClsiCookieManager = require('./ClsiCookieManager')(
|
||||
Settings.apis.clsi_new?.backendGroupName
|
||||
)
|
||||
const ClsiStateManager = require('./ClsiStateManager')
|
||||
const _ = require('lodash')
|
||||
const ClsiFormatChecker = require('./ClsiFormatChecker')
|
||||
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const ClsiCacheHandler = require('./ClsiCacheHandler')
|
||||
const { getBlobLocation } = require('../History/HistoryManager')
|
||||
|
||||
const VALID_COMPILERS = ['pdflatex', 'latex', 'xelatex', 'lualatex']
|
||||
const OUTPUT_FILE_TIMEOUT_MS = 60000
|
||||
const CLSI_COOKIES_ENABLED = (Settings.clsiCookie?.key ?? '') !== ''
|
||||
|
||||
// The timeout in services/clsi/app.js is 10 minutes, so we'll be on the safe side with 12 minutes
|
||||
const COMPILE_REQUEST_TIMEOUT_MS = 12 * 60 * 1000
|
||||
|
||||
function collectMetricsOnBlgFiles(outputFiles) {
|
||||
let topLevel = 0
|
||||
let nested = 0
|
||||
for (const outputFile of outputFiles) {
|
||||
if (outputFile.type === 'blg') {
|
||||
if (outputFile.path.includes('/')) {
|
||||
nested++
|
||||
} else {
|
||||
topLevel++
|
||||
}
|
||||
}
|
||||
}
|
||||
Metrics.count('blg_output_file', topLevel, 1, { path: 'top-level' })
|
||||
Metrics.count('blg_output_file', nested, 1, { path: 'nested' })
|
||||
}
|
||||
|
||||
async function sendRequest(projectId, userId, options) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
let result = await sendRequestOnce(projectId, userId, options)
|
||||
if (result.status === 'conflict') {
|
||||
// Try again, with a full compile
|
||||
result = await sendRequestOnce(projectId, userId, {
|
||||
...options,
|
||||
syncType: 'full',
|
||||
})
|
||||
} else if (result.status === 'unavailable') {
|
||||
result = await sendRequestOnce(projectId, userId, {
|
||||
...options,
|
||||
syncType: 'full',
|
||||
forceNewClsiServer: true,
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async function sendRequestOnce(projectId, userId, options) {
|
||||
let req
|
||||
try {
|
||||
req = await _buildRequest(projectId, options)
|
||||
} catch (err) {
|
||||
if (err.message === 'no main file specified') {
|
||||
return {
|
||||
status: 'validation-problems',
|
||||
validationProblems: { mainFile: err.message },
|
||||
}
|
||||
} else {
|
||||
throw OError.tag(err, 'Could not build request to CLSI', {
|
||||
projectId,
|
||||
options,
|
||||
})
|
||||
}
|
||||
}
|
||||
return await _sendBuiltRequest(projectId, userId, req, options)
|
||||
}
|
||||
|
||||
// for public API requests where there is no project id
|
||||
async function sendExternalRequest(submissionId, clsiRequest, options) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
return await _sendBuiltRequest(submissionId, null, clsiRequest, options)
|
||||
}
|
||||
|
||||
async function stopCompile(projectId, userId, options) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
const { compileBackendClass, compileGroup } = options
|
||||
const url = _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId,
|
||||
'compile/stop'
|
||||
)
|
||||
const opts = { method: 'POST' }
|
||||
await _makeRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteAuxFiles(projectId, userId, options, clsiserverid) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
const { compileBackendClass, compileGroup } = options
|
||||
const url = _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId
|
||||
)
|
||||
const opts = {
|
||||
method: 'DELETE',
|
||||
}
|
||||
|
||||
try {
|
||||
await _makeRequestWithClsiServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts,
|
||||
clsiserverid
|
||||
)
|
||||
} finally {
|
||||
// always clear the clsi-cache
|
||||
try {
|
||||
await ClsiCacheHandler.clearCache(projectId, userId)
|
||||
} catch (err) {
|
||||
logger.warn({ err, projectId, userId }, 'purge clsi-cache failed')
|
||||
}
|
||||
|
||||
// always clear the project state from the docupdater, even if there
|
||||
// was a problem with the request to the clsi
|
||||
try {
|
||||
await DocumentUpdaterHandler.promises.clearProjectState(projectId)
|
||||
} finally {
|
||||
await ClsiCookieManager.promises.clearServerId(projectId, userId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function _sendBuiltRequest(projectId, userId, req, options, callback) {
|
||||
if (options.forceNewClsiServer) {
|
||||
await ClsiCookieManager.promises.clearServerId(projectId, userId)
|
||||
}
|
||||
const validationProblems =
|
||||
await ClsiFormatChecker.promises.checkRecoursesForProblems(
|
||||
req.compile?.resources
|
||||
)
|
||||
if (validationProblems != null) {
|
||||
logger.debug(
|
||||
{ projectId, validationProblems },
|
||||
'problems with users latex before compile was attempted'
|
||||
)
|
||||
return {
|
||||
status: 'validation-problems',
|
||||
validationProblems,
|
||||
}
|
||||
}
|
||||
|
||||
const { response, clsiServerId } = await _postToClsi(
|
||||
projectId,
|
||||
userId,
|
||||
req,
|
||||
options.compileBackendClass,
|
||||
options.compileGroup
|
||||
)
|
||||
|
||||
const outputFiles = _parseOutputFiles(
|
||||
projectId,
|
||||
response && response.compile && response.compile.outputFiles
|
||||
)
|
||||
collectMetricsOnBlgFiles(outputFiles)
|
||||
const compile = response?.compile || {}
|
||||
return {
|
||||
status: compile.status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
buildId: compile.buildId,
|
||||
stats: compile.stats,
|
||||
timings: compile.timings,
|
||||
outputUrlPrefix: compile.outputUrlPrefix,
|
||||
}
|
||||
}
|
||||
|
||||
async function _makeRequestWithClsiServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts,
|
||||
clsiserverid
|
||||
) {
|
||||
if (clsiserverid) {
|
||||
// ignore cookies and newBackend, go straight to the clsi node
|
||||
url.searchParams.set('compileGroup', compileGroup)
|
||||
url.searchParams.set('compileBackendClass', compileBackendClass)
|
||||
url.searchParams.set('clsiserverid', clsiserverid)
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchString(url, opts)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error making request to CLSI', {
|
||||
userId,
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(body)
|
||||
} catch (err) {
|
||||
// some responses are empty. Ignore JSON parsing errors.
|
||||
}
|
||||
|
||||
return { body: json }
|
||||
} else {
|
||||
return await _makeRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function _makeRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
) {
|
||||
const currentBackendStartTime = new Date()
|
||||
const clsiServerId = await ClsiCookieManager.promises.getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass
|
||||
)
|
||||
opts.headers = {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (CLSI_COOKIES_ENABLED) {
|
||||
const cookie = new Cookie({
|
||||
key: Settings.clsiCookie.key,
|
||||
value: clsiServerId,
|
||||
})
|
||||
opts.headers.Cookie = cookie.cookieString()
|
||||
}
|
||||
|
||||
const timer = new Metrics.Timer('compile.currentBackend')
|
||||
|
||||
let response, body
|
||||
try {
|
||||
;({ body, response } = await fetchStringWithResponse(url, opts))
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error making request to CLSI', {
|
||||
projectId,
|
||||
userId,
|
||||
})
|
||||
}
|
||||
|
||||
Metrics.inc(`compile.currentBackend.response.${response.status}`)
|
||||
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(body)
|
||||
} catch (err) {
|
||||
// some responses are empty. Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
timer.done()
|
||||
let newClsiServerId
|
||||
if (CLSI_COOKIES_ENABLED) {
|
||||
newClsiServerId = _getClsiServerIdFromResponse(response)
|
||||
await ClsiCookieManager.promises.setServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
newClsiServerId,
|
||||
clsiServerId
|
||||
)
|
||||
}
|
||||
const currentCompileTime = new Date() - currentBackendStartTime
|
||||
|
||||
// Start new backend request in the background
|
||||
const newBackendStartTime = new Date()
|
||||
_makeNewBackendRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
)
|
||||
.then(result => {
|
||||
if (result == null) {
|
||||
return
|
||||
}
|
||||
const { response: newBackendResponse } = result
|
||||
Metrics.inc(`compile.newBackend.response.${newBackendResponse.status}`)
|
||||
const newBackendCompileTime = new Date() - newBackendStartTime
|
||||
const currentStatusCode = response.status
|
||||
const newStatusCode = newBackendResponse.status
|
||||
const statusCodeSame = newStatusCode === currentStatusCode
|
||||
const timeDifference = newBackendCompileTime - currentCompileTime
|
||||
logger.debug(
|
||||
{
|
||||
statusCodeSame,
|
||||
timeDifference,
|
||||
currentCompileTime,
|
||||
newBackendCompileTime,
|
||||
projectId,
|
||||
},
|
||||
'both clsi requests returned'
|
||||
)
|
||||
})
|
||||
.catch(err => {
|
||||
logger.warn({ err }, 'Error making request to new CLSI backend')
|
||||
})
|
||||
|
||||
return {
|
||||
body: json,
|
||||
clsiServerId: newClsiServerId || clsiServerId,
|
||||
}
|
||||
}
|
||||
|
||||
async function _makeNewBackendRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
) {
|
||||
if (Settings.apis.clsi_new?.url == null) {
|
||||
return null
|
||||
}
|
||||
url = url
|
||||
.toString()
|
||||
.replace(Settings.apis.clsi.url, Settings.apis.clsi_new.url)
|
||||
|
||||
const clsiServerId =
|
||||
await NewBackendCloudClsiCookieManager.promises.getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass
|
||||
)
|
||||
opts.headers = {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (CLSI_COOKIES_ENABLED) {
|
||||
const cookie = new Cookie({
|
||||
key: Settings.clsiCookie.key,
|
||||
value: clsiServerId,
|
||||
})
|
||||
opts.headers.Cookie = cookie.cookieString()
|
||||
}
|
||||
|
||||
const timer = new Metrics.Timer('compile.newBackend')
|
||||
|
||||
let response, body
|
||||
try {
|
||||
;({ body, response } = await fetchStringWithResponse(url, opts))
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error making request to new CLSI', {
|
||||
userId,
|
||||
projectId,
|
||||
})
|
||||
}
|
||||
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(body)
|
||||
} catch (err) {
|
||||
// Some responses are empty. Ignore JSON parsing errors
|
||||
}
|
||||
timer.done()
|
||||
if (CLSI_COOKIES_ENABLED) {
|
||||
const newClsiServerId = _getClsiServerIdFromResponse(response)
|
||||
await NewBackendCloudClsiCookieManager.promises.setServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
newClsiServerId,
|
||||
clsiServerId
|
||||
)
|
||||
}
|
||||
return { response, body: json }
|
||||
}
|
||||
|
||||
function _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId,
|
||||
action
|
||||
) {
|
||||
const u = new URL(`/project/${projectId}`, Settings.apis.clsi.url)
|
||||
if (userId != null) {
|
||||
u.pathname += `/user/${userId}`
|
||||
}
|
||||
if (action != null) {
|
||||
u.pathname += `/${action}`
|
||||
}
|
||||
u.searchParams.set('compileBackendClass', compileBackendClass)
|
||||
u.searchParams.set('compileGroup', compileGroup)
|
||||
return u
|
||||
}
|
||||
|
||||
async function _postToClsi(
|
||||
projectId,
|
||||
userId,
|
||||
req,
|
||||
compileBackendClass,
|
||||
compileGroup
|
||||
) {
|
||||
const url = _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId,
|
||||
'compile'
|
||||
)
|
||||
const opts = {
|
||||
json: req,
|
||||
method: 'POST',
|
||||
signal: AbortSignal.timeout(COMPILE_REQUEST_TIMEOUT_MS),
|
||||
}
|
||||
try {
|
||||
const { body, clsiServerId } = await _makeRequest(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts
|
||||
)
|
||||
return { response: body, clsiServerId }
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError) {
|
||||
if (err.response.status === 413) {
|
||||
return { response: { compile: { status: 'project-too-large' } } }
|
||||
} else if (err.response.status === 409) {
|
||||
return { response: { compile: { status: 'conflict' } } }
|
||||
} else if (err.response.status === 423) {
|
||||
return { response: { compile: { status: 'compile-in-progress' } } }
|
||||
} else if (err.response.status === 503) {
|
||||
return { response: { compile: { status: 'unavailable' } } }
|
||||
} else {
|
||||
throw new OError(
|
||||
`CLSI returned non-success code: ${err.response.status}`,
|
||||
{
|
||||
projectId,
|
||||
userId,
|
||||
compileOptions: req.compile.options,
|
||||
rootResourcePath: req.compile.rootResourcePath,
|
||||
clsiResponse: err.body,
|
||||
statusCode: err.response.status,
|
||||
}
|
||||
)
|
||||
}
|
||||
} else {
|
||||
throw new OError(
|
||||
'failed to make request to CLSI',
|
||||
{
|
||||
projectId,
|
||||
userId,
|
||||
compileOptions: req.compile.options,
|
||||
rootResourcePath: req.compile.rootResourcePath,
|
||||
},
|
||||
err
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _parseOutputFiles(projectId, rawOutputFiles = []) {
|
||||
const outputFiles = []
|
||||
for (const file of rawOutputFiles) {
|
||||
const f = {
|
||||
path: file.path, // the clsi is now sending this to web
|
||||
url: new URL(file.url).pathname, // the location of the file on the clsi, excluding the host part
|
||||
type: file.type,
|
||||
build: file.build,
|
||||
}
|
||||
if (file.path === 'output.pdf') {
|
||||
f.contentId = file.contentId
|
||||
f.ranges = file.ranges || []
|
||||
f.size = file.size
|
||||
f.startXRefTable = file.startXRefTable
|
||||
f.createdAt = new Date()
|
||||
}
|
||||
outputFiles.push(f)
|
||||
}
|
||||
return outputFiles
|
||||
}
|
||||
|
||||
async function _buildRequest(projectId, options) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
compiler: 1,
|
||||
rootDoc_id: 1,
|
||||
imageName: 1,
|
||||
rootFolder: 1,
|
||||
'overleaf.history.id': 1,
|
||||
})
|
||||
if (project == null) {
|
||||
throw new Errors.NotFoundError(`project does not exist: ${projectId}`)
|
||||
}
|
||||
if (!VALID_COMPILERS.includes(project.compiler)) {
|
||||
project.compiler = 'pdflatex'
|
||||
}
|
||||
|
||||
if (options.incrementalCompilesEnabled || options.syncType != null) {
|
||||
// new way, either incremental or full
|
||||
const timer = new Metrics.Timer('editor.compile-getdocs-redis')
|
||||
let projectStateHash, docUpdaterDocs
|
||||
try {
|
||||
;({ projectStateHash, docs: docUpdaterDocs } =
|
||||
await getContentFromDocUpdaterIfMatch(projectId, project, options))
|
||||
} catch (err) {
|
||||
logger.error({ err, projectId }, 'error checking project state')
|
||||
// note: we don't bail out when there's an error getting
|
||||
// incremental files from the docupdater, we just fall back
|
||||
// to a normal compile below
|
||||
}
|
||||
timer.done()
|
||||
// see if we can send an incremental update to the CLSI
|
||||
if (docUpdaterDocs != null && options.syncType !== 'full') {
|
||||
Metrics.inc('compile-from-redis')
|
||||
return _buildRequestFromDocupdater(
|
||||
projectId,
|
||||
options,
|
||||
project,
|
||||
projectStateHash,
|
||||
docUpdaterDocs
|
||||
)
|
||||
} else {
|
||||
Metrics.inc('compile-from-mongo')
|
||||
return await _buildRequestFromMongo(
|
||||
projectId,
|
||||
options,
|
||||
project,
|
||||
projectStateHash
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// old way, always from mongo
|
||||
const timer = new Metrics.Timer('editor.compile-getdocs-mongo')
|
||||
const { docs, files } = await _getContentFromMongo(projectId)
|
||||
timer.done()
|
||||
return _finaliseRequest(projectId, options, project, docs, files)
|
||||
}
|
||||
}
|
||||
|
||||
async function getContentFromDocUpdaterIfMatch(projectId, project, options) {
|
||||
const projectStateHash = ClsiStateManager.computeHash(project, options)
|
||||
const docs = await DocumentUpdaterHandler.promises.getProjectDocsIfMatch(
|
||||
projectId,
|
||||
projectStateHash
|
||||
)
|
||||
return { projectStateHash, docs }
|
||||
}
|
||||
|
||||
async function getOutputFileStream(
|
||||
projectId,
|
||||
userId,
|
||||
options,
|
||||
clsiServerId,
|
||||
buildId,
|
||||
outputFilePath
|
||||
) {
|
||||
const { compileBackendClass, compileGroup } = options
|
||||
const url = new URL(
|
||||
`${Settings.apis.clsi.url}/project/${projectId}/user/${userId}/build/${buildId}/output/${outputFilePath}`
|
||||
)
|
||||
url.searchParams.set('compileBackendClass', compileBackendClass)
|
||||
url.searchParams.set('compileGroup', compileGroup)
|
||||
url.searchParams.set('clsiserverid', clsiServerId)
|
||||
try {
|
||||
const stream = await fetchStream(url, {
|
||||
signal: AbortSignal.timeout(OUTPUT_FILE_TIMEOUT_MS),
|
||||
})
|
||||
return stream
|
||||
} catch (err) {
|
||||
throw new Errors.OutputFileFetchFailedError(
|
||||
'failed to fetch output file from CLSI',
|
||||
{
|
||||
projectId,
|
||||
userId,
|
||||
url,
|
||||
status: err.response?.status,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function _buildRequestFromDocupdater(
|
||||
projectId,
|
||||
options,
|
||||
project,
|
||||
projectStateHash,
|
||||
docUpdaterDocs
|
||||
) {
|
||||
const docPath = ProjectEntityHandler.getAllDocPathsFromProject(project)
|
||||
const docs = {}
|
||||
for (const doc of docUpdaterDocs || []) {
|
||||
const path = docPath[doc._id]
|
||||
docs[path] = doc
|
||||
}
|
||||
// send new docs but not files as those are already on the clsi
|
||||
options = _.clone(options)
|
||||
options.syncType = 'incremental'
|
||||
options.syncState = projectStateHash
|
||||
// create stub doc entries for any possible root docs, if not
|
||||
// present in the docupdater. This allows finaliseRequest to
|
||||
// identify the root doc.
|
||||
const possibleRootDocIds = [options.rootDoc_id, project.rootDoc_id]
|
||||
for (const rootDocId of possibleRootDocIds) {
|
||||
if (rootDocId != null && rootDocId in docPath) {
|
||||
const path = docPath[rootDocId]
|
||||
if (docs[path] == null) {
|
||||
docs[path] = { _id: rootDocId, path }
|
||||
}
|
||||
}
|
||||
}
|
||||
return _finaliseRequest(projectId, options, project, docs, [])
|
||||
}
|
||||
|
||||
async function _buildRequestFromMongo(
|
||||
projectId,
|
||||
options,
|
||||
project,
|
||||
projectStateHash
|
||||
) {
|
||||
const { docs, files } = await _getContentFromMongo(projectId)
|
||||
options = {
|
||||
...options,
|
||||
syncType: 'full',
|
||||
syncState: projectStateHash,
|
||||
}
|
||||
return _finaliseRequest(projectId, options, project, docs, files)
|
||||
}
|
||||
|
||||
async function _getContentFromMongo(projectId) {
|
||||
await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId)
|
||||
const docs = await ProjectEntityHandler.promises.getAllDocs(projectId)
|
||||
const files = await ProjectEntityHandler.promises.getAllFiles(projectId)
|
||||
return { docs, files }
|
||||
}
|
||||
|
||||
function _finaliseRequest(projectId, options, project, docs, files) {
|
||||
const resources = []
|
||||
let flags
|
||||
let rootResourcePath = null
|
||||
let rootResourcePathOverride = null
|
||||
let hasMainFile = false
|
||||
let numberOfDocsInProject = 0
|
||||
|
||||
for (let path in docs) {
|
||||
const doc = docs[path]
|
||||
path = path.replace(/^\//, '') // Remove leading /
|
||||
numberOfDocsInProject++
|
||||
if (doc.lines != null) {
|
||||
// add doc to resources unless it is just a stub entry
|
||||
resources.push({
|
||||
path,
|
||||
content: doc.lines.join('\n'),
|
||||
})
|
||||
}
|
||||
if (
|
||||
project.rootDoc_id != null &&
|
||||
doc._id.toString() === project.rootDoc_id.toString()
|
||||
) {
|
||||
rootResourcePath = path
|
||||
}
|
||||
if (
|
||||
options.rootDoc_id != null &&
|
||||
doc._id.toString() === options.rootDoc_id.toString()
|
||||
) {
|
||||
rootResourcePathOverride = path
|
||||
}
|
||||
if (path === 'main.tex') {
|
||||
hasMainFile = true
|
||||
}
|
||||
}
|
||||
|
||||
if (rootResourcePathOverride != null) {
|
||||
rootResourcePath = rootResourcePathOverride
|
||||
}
|
||||
if (rootResourcePath == null) {
|
||||
if (hasMainFile) {
|
||||
rootResourcePath = 'main.tex'
|
||||
} else if (numberOfDocsInProject === 1) {
|
||||
// only one file, must be the main document
|
||||
for (const path in docs) {
|
||||
// Remove leading /
|
||||
rootResourcePath = path.replace(/^\//, '')
|
||||
}
|
||||
} else {
|
||||
throw new OError('no main file specified', { projectId })
|
||||
}
|
||||
}
|
||||
|
||||
const historyId = project.overleaf.history.id
|
||||
if (!historyId) {
|
||||
throw new OError('project does not have a history id', { projectId })
|
||||
}
|
||||
for (let path in files) {
|
||||
const file = files[path]
|
||||
path = path.replace(/^\//, '') // Remove leading /
|
||||
|
||||
const filestoreURL = `${Settings.apis.filestore.url}/project/${project._id}/file/${file._id}`
|
||||
let url = filestoreURL
|
||||
let fallbackURL
|
||||
if (file.hash && Features.hasFeature('project-history-blobs')) {
|
||||
const { bucket, key } = getBlobLocation(historyId, file.hash)
|
||||
url = `${Settings.apis.filestore.url}/bucket/${bucket}/key/${key}`
|
||||
fallbackURL = filestoreURL
|
||||
}
|
||||
resources.push({
|
||||
path,
|
||||
url,
|
||||
fallbackURL,
|
||||
modified: file.created?.getTime(),
|
||||
})
|
||||
}
|
||||
|
||||
if (options.fileLineErrors) {
|
||||
flags = ['-file-line-error']
|
||||
}
|
||||
|
||||
return {
|
||||
compile: {
|
||||
options: {
|
||||
buildId: options.buildId,
|
||||
editorId: options.editorId,
|
||||
compiler: project.compiler,
|
||||
timeout: options.timeout,
|
||||
imageName: project.imageName,
|
||||
draft: Boolean(options.draft),
|
||||
stopOnFirstError: Boolean(options.stopOnFirstError),
|
||||
check: options.check,
|
||||
syncType: options.syncType,
|
||||
syncState: options.syncState,
|
||||
compileGroup: options.compileGroup,
|
||||
compileFromClsiCache: options.compileFromClsiCache,
|
||||
populateClsiCache: options.populateClsiCache,
|
||||
enablePdfCaching:
|
||||
(Settings.enablePdfCaching && options.enablePdfCaching) || false,
|
||||
pdfCachingMinChunkSize: options.pdfCachingMinChunkSize,
|
||||
flags,
|
||||
metricsMethod: options.compileGroup,
|
||||
},
|
||||
rootResourcePath,
|
||||
resources,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async function wordCount(projectId, userId, file, options, clsiserverid) {
|
||||
const { compileBackendClass, compileGroup } = options
|
||||
const req = await _buildRequest(projectId, options)
|
||||
const filename = file || req.compile.rootResourcePath
|
||||
const url = _getCompilerUrl(
|
||||
compileBackendClass,
|
||||
compileGroup,
|
||||
projectId,
|
||||
userId,
|
||||
'wordcount'
|
||||
)
|
||||
url.searchParams.set('file', filename)
|
||||
url.searchParams.set('image', req.compile.options.imageName)
|
||||
|
||||
const opts = {
|
||||
method: 'GET',
|
||||
}
|
||||
const { body } = await _makeRequestWithClsiServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
url,
|
||||
opts,
|
||||
clsiserverid
|
||||
)
|
||||
return body
|
||||
}
|
||||
|
||||
function _getClsiServerIdFromResponse(response) {
|
||||
const setCookieHeaders = response.headers.raw()['set-cookie'] ?? []
|
||||
for (const header of setCookieHeaders) {
|
||||
const cookie = Cookie.parse(header)
|
||||
if (cookie.key === Settings.clsiCookie.key) {
|
||||
return cookie.value
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendRequest: callbackifyMultiResult(sendRequest, [
|
||||
'status',
|
||||
'outputFiles',
|
||||
'clsiServerId',
|
||||
'validationProblems',
|
||||
'stats',
|
||||
'timings',
|
||||
'outputUrlPrefix',
|
||||
'buildId',
|
||||
]),
|
||||
sendExternalRequest: callbackifyMultiResult(sendExternalRequest, [
|
||||
'status',
|
||||
'outputFiles',
|
||||
'clsiServerId',
|
||||
'validationProblems',
|
||||
'stats',
|
||||
'timings',
|
||||
'outputUrlPrefix',
|
||||
]),
|
||||
stopCompile: callbackify(stopCompile),
|
||||
deleteAuxFiles: callbackify(deleteAuxFiles),
|
||||
getOutputFileStream: callbackify(getOutputFileStream),
|
||||
wordCount: callbackify(wordCount),
|
||||
promises: {
|
||||
sendRequest,
|
||||
sendExternalRequest,
|
||||
stopCompile,
|
||||
deleteAuxFiles,
|
||||
getOutputFileStream,
|
||||
wordCount,
|
||||
},
|
||||
}
|
||||
72
services/web/app/src/Features/Compile/ClsiStateManager.js
Normal file
72
services/web/app/src/Features/Compile/ClsiStateManager.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/* eslint-disable
|
||||
n/handle-callback-err,
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ClsiStateManager
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const crypto = require('crypto')
|
||||
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||
|
||||
// The "state" of a project is a hash of the relevant attributes in the
|
||||
// project object in this case we only need the rootFolder.
|
||||
//
|
||||
// The idea is that it will change if any doc or file is
|
||||
// created/renamed/deleted, and also if the content of any file (not
|
||||
// doc) changes.
|
||||
//
|
||||
// When the hash changes the full set of files on the CLSI will need to
|
||||
// be updated. If it doesn't change then we can overwrite changed docs
|
||||
// in place on the clsi, getting them from the docupdater.
|
||||
//
|
||||
// The docupdater is responsible for setting the key in redis, and
|
||||
// unsetting it if it removes any documents from the doc updater.
|
||||
|
||||
const buildState = s =>
|
||||
crypto.createHash('sha1').update(s, 'utf8').digest('hex')
|
||||
|
||||
module.exports = ClsiStateManager = {
|
||||
computeHash(project, options) {
|
||||
const { docs, files } =
|
||||
ProjectEntityHandler.getAllEntitiesFromProject(project)
|
||||
const fileList = Array.from(files || []).map(
|
||||
f => `${f.file._id}:${f.file.rev}:${f.file.created}:${f.path}`
|
||||
)
|
||||
const docList = Array.from(docs || []).map(d => `${d.doc._id}:${d.path}`)
|
||||
const sortedEntityList = [
|
||||
...Array.from(docList),
|
||||
...Array.from(fileList),
|
||||
].sort()
|
||||
// ignore the isAutoCompile options as it doesn't affect the
|
||||
// output, but include all other options e.g. draft
|
||||
const optionsList = (() => {
|
||||
const result = []
|
||||
const object = options || {}
|
||||
for (const key in object) {
|
||||
const value = object[key]
|
||||
if (!['isAutoCompile'].includes(key)) {
|
||||
result.push(`option ${key}:${value}`)
|
||||
}
|
||||
}
|
||||
return result
|
||||
})()
|
||||
const sortedOptionsList = optionsList.sort()
|
||||
const hash = buildState(
|
||||
[...Array.from(sortedEntityList), ...Array.from(sortedOptionsList)].join(
|
||||
'\n'
|
||||
)
|
||||
)
|
||||
return hash
|
||||
},
|
||||
}
|
||||
802
services/web/app/src/Features/Compile/CompileController.js
Normal file
802
services/web/app/src/Features/Compile/CompileController.js
Normal file
@@ -0,0 +1,802 @@
|
||||
let CompileController
|
||||
const { URL, URLSearchParams } = require('url')
|
||||
const { pipeline } = require('stream/promises')
|
||||
const { Cookie } = require('tough-cookie')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const CompileManager = require('./CompileManager')
|
||||
const ClsiManager = require('./ClsiManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const { RateLimiter } = require('../../infrastructure/RateLimiter')
|
||||
const ClsiCookieManager = require('./ClsiCookieManager')(
|
||||
Settings.apis.clsi?.backendGroupName
|
||||
)
|
||||
const Path = require('path')
|
||||
const AnalyticsManager = require('../Analytics/AnalyticsManager')
|
||||
const SplitTestHandler = require('../SplitTests/SplitTestHandler')
|
||||
const { callbackify } = require('@overleaf/promise-utils')
|
||||
const {
|
||||
fetchStreamWithResponse,
|
||||
RequestFailedError,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
|
||||
const COMPILE_TIMEOUT_MS = 10 * 60 * 1000
|
||||
|
||||
const pdfDownloadRateLimiter = new RateLimiter('full-pdf-download', {
|
||||
points: 1000,
|
||||
duration: 60 * 60,
|
||||
})
|
||||
|
||||
function getOutputFilesArchiveSpecification(projectId, userId, buildId) {
|
||||
const fileName = 'output.zip'
|
||||
return {
|
||||
path: fileName,
|
||||
url: CompileController._getFileUrl(projectId, userId, buildId, fileName),
|
||||
type: 'zip',
|
||||
}
|
||||
}
|
||||
|
||||
function getImageNameForProject(projectId, callback) {
|
||||
ProjectGetter.getProject(projectId, { imageName: 1 }, (err, project) => {
|
||||
if (err) return callback(err)
|
||||
if (!project) return callback(new Error('project not found'))
|
||||
callback(null, project.imageName)
|
||||
})
|
||||
}
|
||||
|
||||
async function getPdfCachingMinChunkSize(req, res) {
|
||||
const { variant } = await SplitTestHandler.promises.getAssignment(
|
||||
req,
|
||||
res,
|
||||
'pdf-caching-min-chunk-size'
|
||||
)
|
||||
if (variant === 'default') return 1_000_000
|
||||
return parseInt(variant, 10)
|
||||
}
|
||||
|
||||
async function _getSplitTestOptions(req, res) {
|
||||
// Use the query flags from the editor request for overriding the split test.
|
||||
let query = {}
|
||||
try {
|
||||
const u = new URL(req.headers.referer || req.url, Settings.siteUrl)
|
||||
query = Object.fromEntries(u.searchParams.entries())
|
||||
} catch (e) {}
|
||||
const editorReq = { ...req, query }
|
||||
|
||||
// Lookup the clsi-cache flag in the backend.
|
||||
// We may need to turn off the feature on a short notice, without requiring
|
||||
// all users to reload their editor page to disable the feature.
|
||||
const { variant: compileFromClsiCacheVariant } =
|
||||
await SplitTestHandler.promises.getAssignment(
|
||||
editorReq,
|
||||
res,
|
||||
'compile-from-clsi-cache'
|
||||
)
|
||||
const compileFromClsiCache = compileFromClsiCacheVariant === 'enabled'
|
||||
const { variant: populateClsiCacheVariant } =
|
||||
await SplitTestHandler.promises.getAssignment(
|
||||
editorReq,
|
||||
res,
|
||||
'populate-clsi-cache'
|
||||
)
|
||||
const populateClsiCache = populateClsiCacheVariant === 'enabled'
|
||||
|
||||
const pdfDownloadDomain = Settings.pdfDownloadDomain
|
||||
|
||||
if (!req.query.enable_pdf_caching) {
|
||||
// The frontend does not want to do pdf caching.
|
||||
return {
|
||||
compileFromClsiCache,
|
||||
populateClsiCache,
|
||||
pdfDownloadDomain,
|
||||
enablePdfCaching: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Double check with the latest split test assignment.
|
||||
// We may need to turn off the feature on a short notice, without requiring
|
||||
// all users to reload their editor page to disable the feature.
|
||||
const { variant } = await SplitTestHandler.promises.getAssignment(
|
||||
editorReq,
|
||||
res,
|
||||
'pdf-caching-mode'
|
||||
)
|
||||
const enablePdfCaching = variant === 'enabled'
|
||||
if (!enablePdfCaching) {
|
||||
// Skip the lookup of the chunk size when caching is not enabled.
|
||||
return {
|
||||
compileFromClsiCache,
|
||||
populateClsiCache,
|
||||
pdfDownloadDomain,
|
||||
enablePdfCaching: false,
|
||||
}
|
||||
}
|
||||
const pdfCachingMinChunkSize = await getPdfCachingMinChunkSize(editorReq, res)
|
||||
return {
|
||||
compileFromClsiCache,
|
||||
populateClsiCache,
|
||||
pdfDownloadDomain,
|
||||
enablePdfCaching,
|
||||
pdfCachingMinChunkSize,
|
||||
}
|
||||
}
|
||||
const getSplitTestOptionsCb = callbackify(_getSplitTestOptions)
|
||||
|
||||
module.exports = CompileController = {
|
||||
compile(req, res, next) {
|
||||
res.setTimeout(COMPILE_TIMEOUT_MS)
|
||||
const projectId = req.params.Project_id
|
||||
const isAutoCompile = !!req.query.auto_compile
|
||||
const fileLineErrors = !!req.query.file_line_errors
|
||||
const stopOnFirstError = !!req.body.stopOnFirstError
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
const options = {
|
||||
isAutoCompile,
|
||||
fileLineErrors,
|
||||
stopOnFirstError,
|
||||
editorId: req.body.editorId,
|
||||
}
|
||||
|
||||
if (req.body.rootDoc_id) {
|
||||
options.rootDoc_id = req.body.rootDoc_id
|
||||
} else if (
|
||||
req.body.settingsOverride &&
|
||||
req.body.settingsOverride.rootDoc_id
|
||||
) {
|
||||
// Can be removed after deploy
|
||||
options.rootDoc_id = req.body.settingsOverride.rootDoc_id
|
||||
}
|
||||
if (req.body.compiler) {
|
||||
options.compiler = req.body.compiler
|
||||
}
|
||||
if (req.body.draft) {
|
||||
options.draft = req.body.draft
|
||||
}
|
||||
if (['validate', 'error', 'silent'].includes(req.body.check)) {
|
||||
options.check = req.body.check
|
||||
}
|
||||
if (req.body.incrementalCompilesEnabled) {
|
||||
options.incrementalCompilesEnabled = true
|
||||
}
|
||||
|
||||
getSplitTestOptionsCb(req, res, (err, splitTestOptions) => {
|
||||
if (err) return next(err)
|
||||
let {
|
||||
compileFromClsiCache,
|
||||
populateClsiCache,
|
||||
enablePdfCaching,
|
||||
pdfCachingMinChunkSize,
|
||||
pdfDownloadDomain,
|
||||
} = splitTestOptions
|
||||
options.compileFromClsiCache = compileFromClsiCache
|
||||
options.populateClsiCache = populateClsiCache
|
||||
options.enablePdfCaching = enablePdfCaching
|
||||
if (enablePdfCaching) {
|
||||
options.pdfCachingMinChunkSize = pdfCachingMinChunkSize
|
||||
}
|
||||
|
||||
CompileManager.compile(
|
||||
projectId,
|
||||
userId,
|
||||
options,
|
||||
(
|
||||
error,
|
||||
status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
limits,
|
||||
validationProblems,
|
||||
stats,
|
||||
timings,
|
||||
outputUrlPrefix,
|
||||
buildId
|
||||
) => {
|
||||
if (error) {
|
||||
Metrics.inc('compile-error')
|
||||
return next(error)
|
||||
}
|
||||
Metrics.inc('compile-status', 1, { status })
|
||||
if (pdfDownloadDomain && outputUrlPrefix) {
|
||||
pdfDownloadDomain += outputUrlPrefix
|
||||
}
|
||||
|
||||
if (
|
||||
limits &&
|
||||
SplitTestHandler.getPercentile(
|
||||
AnalyticsManager.getIdsFromSession(req.session).analyticsId,
|
||||
'compile-result-backend',
|
||||
'release'
|
||||
) === 1
|
||||
) {
|
||||
// For a compile request to be sent to clsi we need limits.
|
||||
// If we get here without having the limits object populated, it is
|
||||
// a reasonable assumption to make that nothing was compiled.
|
||||
// We need to know the limits in order to make use of the events.
|
||||
AnalyticsManager.recordEventForSession(
|
||||
req.session,
|
||||
'compile-result-backend',
|
||||
{
|
||||
projectId,
|
||||
ownerAnalyticsId: limits.ownerAnalyticsId,
|
||||
status,
|
||||
compileTime: timings?.compileE2E,
|
||||
timeout: limits.timeout === 60 ? 'short' : 'long',
|
||||
server: clsiServerId?.includes('-c2d-') ? 'faster' : 'normal',
|
||||
isAutoCompile,
|
||||
isInitialCompile: stats?.isInitialCompile === 1,
|
||||
restoredClsiCache: stats?.restoredClsiCache === 1,
|
||||
stopOnFirstError,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const outputFilesArchive = buildId
|
||||
? getOutputFilesArchiveSpecification(projectId, userId, buildId)
|
||||
: null
|
||||
|
||||
res.json({
|
||||
status,
|
||||
outputFiles,
|
||||
outputFilesArchive,
|
||||
compileGroup: limits?.compileGroup,
|
||||
clsiServerId,
|
||||
validationProblems,
|
||||
stats,
|
||||
timings,
|
||||
outputUrlPrefix,
|
||||
pdfDownloadDomain,
|
||||
pdfCachingMinChunkSize,
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
stopCompile(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
CompileManager.stopCompile(projectId, userId, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.sendStatus(200)
|
||||
})
|
||||
},
|
||||
|
||||
// Used for submissions through the public API
|
||||
compileSubmission(req, res, next) {
|
||||
res.setTimeout(COMPILE_TIMEOUT_MS)
|
||||
const submissionId = req.params.submission_id
|
||||
const options = {}
|
||||
if (req.body?.rootResourcePath != null) {
|
||||
options.rootResourcePath = req.body.rootResourcePath
|
||||
}
|
||||
if (req.body?.compiler) {
|
||||
options.compiler = req.body.compiler
|
||||
}
|
||||
if (req.body?.draft) {
|
||||
options.draft = req.body.draft
|
||||
}
|
||||
if (['validate', 'error', 'silent'].includes(req.body?.check)) {
|
||||
options.check = req.body.check
|
||||
}
|
||||
options.compileGroup =
|
||||
req.body?.compileGroup || Settings.defaultFeatures.compileGroup
|
||||
options.compileBackendClass = Settings.apis.clsi.submissionBackendClass
|
||||
options.timeout =
|
||||
req.body?.timeout || Settings.defaultFeatures.compileTimeout
|
||||
ClsiManager.sendExternalRequest(
|
||||
submissionId,
|
||||
req.body,
|
||||
options,
|
||||
function (error, status, outputFiles, clsiServerId, validationProblems) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.json({
|
||||
status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
validationProblems,
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_getSplitTestOptions,
|
||||
|
||||
_getUserIdForCompile(req) {
|
||||
if (!Settings.disablePerUserCompiles) {
|
||||
return SessionManager.getLoggedInUserId(req.session)
|
||||
}
|
||||
return null
|
||||
},
|
||||
_compileAsUser(req, callback) {
|
||||
callback(null, CompileController._getUserIdForCompile(req))
|
||||
},
|
||||
_downloadAsUser(req, callback) {
|
||||
callback(null, CompileController._getUserIdForCompile(req))
|
||||
},
|
||||
|
||||
downloadPdf(req, res, next) {
|
||||
Metrics.inc('pdf-downloads')
|
||||
const projectId = req.params.Project_id
|
||||
const rateLimit = function (callback) {
|
||||
pdfDownloadRateLimiter
|
||||
.consume(req.ip, 1, { method: 'ip' })
|
||||
.then(() => {
|
||||
callback(null, true)
|
||||
})
|
||||
.catch(err => {
|
||||
if (err instanceof Error) {
|
||||
callback(err)
|
||||
} else {
|
||||
callback(null, false)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
ProjectGetter.getProject(projectId, { name: 1 }, function (err, project) {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
res.contentType('application/pdf')
|
||||
const filename = `${CompileController._getSafeProjectName(project)}.pdf`
|
||||
|
||||
if (req.query.popupDownload) {
|
||||
res.setContentDisposition('attachment', { filename })
|
||||
} else {
|
||||
res.setContentDisposition('inline', { filename })
|
||||
}
|
||||
|
||||
rateLimit(function (err, canContinue) {
|
||||
if (err) {
|
||||
logger.err({ err }, 'error checking rate limit for pdf download')
|
||||
res.sendStatus(500)
|
||||
} else if (!canContinue) {
|
||||
logger.debug(
|
||||
{ projectId, ip: req.ip },
|
||||
'rate limit hit downloading pdf'
|
||||
)
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
CompileController._downloadAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
const url = CompileController._getFileUrl(
|
||||
projectId,
|
||||
userId,
|
||||
req.params.build_id,
|
||||
'output.pdf'
|
||||
)
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'output-file',
|
||||
url,
|
||||
{},
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_getSafeProjectName(project) {
|
||||
return project.name.replace(/[^\p{L}\p{Nd}]/gu, '_')
|
||||
},
|
||||
|
||||
deleteAuxFiles(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { clsiserverid } = req.query
|
||||
CompileController._compileAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
CompileManager.deleteAuxFiles(
|
||||
projectId,
|
||||
userId,
|
||||
clsiserverid,
|
||||
function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.sendStatus(200)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
// this is only used by templates, so is not called with a userId
|
||||
compileAndDownloadPdf(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
// pass userId as null, since templates are an "anonymous" compile
|
||||
CompileManager.compile(projectId, null, {}, (err, _status, outputFiles) => {
|
||||
if (err) {
|
||||
logger.err(
|
||||
{ err, projectId },
|
||||
'something went wrong compile and downloading pdf'
|
||||
)
|
||||
res.sendStatus(500)
|
||||
return
|
||||
}
|
||||
const pdf = outputFiles.find(f => f.path === 'output.pdf')
|
||||
if (!pdf) {
|
||||
logger.warn(
|
||||
{ projectId },
|
||||
'something went wrong compile and downloading pdf: no pdf'
|
||||
)
|
||||
res.sendStatus(500)
|
||||
return
|
||||
}
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'output-file',
|
||||
pdf.url,
|
||||
{},
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
getFileFromClsi(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
CompileController._downloadAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
|
||||
const qs = {}
|
||||
|
||||
const url = CompileController._getFileUrl(
|
||||
projectId,
|
||||
userId,
|
||||
req.params.build_id,
|
||||
req.params.file
|
||||
)
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'output-file',
|
||||
url,
|
||||
qs,
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
getFileFromClsiWithoutUser(req, res, next) {
|
||||
const submissionId = req.params.submission_id
|
||||
const url = CompileController._getFileUrl(
|
||||
submissionId,
|
||||
null,
|
||||
req.params.build_id,
|
||||
req.params.file
|
||||
)
|
||||
const limits = {
|
||||
compileGroup:
|
||||
req.body?.compileGroup ||
|
||||
req.query?.compileGroup ||
|
||||
Settings.defaultFeatures.compileGroup,
|
||||
compileBackendClass: Settings.apis.clsi.submissionBackendClass,
|
||||
}
|
||||
CompileController.proxyToClsiWithLimits(
|
||||
submissionId,
|
||||
'output-file',
|
||||
url,
|
||||
{},
|
||||
limits,
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
},
|
||||
|
||||
// compute a GET file url for a given project, user (optional), build (optional) and file
|
||||
_getFileUrl(projectId, userId, buildId, file) {
|
||||
let url
|
||||
if (userId != null && buildId != null) {
|
||||
url = `/project/${projectId}/user/${userId}/build/${buildId}/output/${file}`
|
||||
} else if (userId != null) {
|
||||
url = `/project/${projectId}/user/${userId}/output/${file}`
|
||||
} else if (buildId != null) {
|
||||
url = `/project/${projectId}/build/${buildId}/output/${file}`
|
||||
} else {
|
||||
url = `/project/${projectId}/output/${file}`
|
||||
}
|
||||
return url
|
||||
},
|
||||
|
||||
// compute a POST url for a project, user (optional) and action
|
||||
_getUrl(projectId, userId, action) {
|
||||
let path = `/project/${projectId}`
|
||||
if (userId != null) {
|
||||
path += `/user/${userId}`
|
||||
}
|
||||
return `${path}/${action}`
|
||||
},
|
||||
|
||||
proxySyncPdf(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { page, h, v, editorId, buildId } = req.query
|
||||
if (!page?.match(/^\d+$/)) {
|
||||
return next(new Error('invalid page parameter'))
|
||||
}
|
||||
if (!h?.match(/^-?\d+\.\d+$/)) {
|
||||
return next(new Error('invalid h parameter'))
|
||||
}
|
||||
if (!v?.match(/^-?\d+\.\d+$/)) {
|
||||
return next(new Error('invalid v parameter'))
|
||||
}
|
||||
// whether this request is going to a per-user container
|
||||
CompileController._compileAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
getImageNameForProject(projectId, (error, imageName) => {
|
||||
if (error) return next(error)
|
||||
|
||||
getSplitTestOptionsCb(req, res, (error, splitTestOptions) => {
|
||||
if (error) return next(error)
|
||||
const { compileFromClsiCache } = splitTestOptions
|
||||
|
||||
const url = CompileController._getUrl(projectId, userId, 'sync/pdf')
|
||||
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'sync-to-pdf',
|
||||
url,
|
||||
{ page, h, v, imageName, editorId, buildId, compileFromClsiCache },
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
proxySyncCode(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { file, line, column, editorId, buildId } = req.query
|
||||
if (file == null) {
|
||||
return next(new Error('missing file parameter'))
|
||||
}
|
||||
// Check that we are dealing with a simple file path (this is not
|
||||
// strictly needed because synctex uses this parameter as a label
|
||||
// to look up in the synctex output, and does not open the file
|
||||
// itself). Since we have valid synctex paths like foo/./bar we
|
||||
// allow those by replacing /./ with /
|
||||
const testPath = file.replace('/./', '/')
|
||||
if (Path.resolve('/', testPath) !== `/${testPath}`) {
|
||||
return next(new Error('invalid file parameter'))
|
||||
}
|
||||
if (!line?.match(/^\d+$/)) {
|
||||
return next(new Error('invalid line parameter'))
|
||||
}
|
||||
if (!column?.match(/^\d+$/)) {
|
||||
return next(new Error('invalid column parameter'))
|
||||
}
|
||||
CompileController._compileAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
getImageNameForProject(projectId, (error, imageName) => {
|
||||
if (error) return next(error)
|
||||
|
||||
getSplitTestOptionsCb(req, res, (error, splitTestOptions) => {
|
||||
if (error) return next(error)
|
||||
const { compileFromClsiCache } = splitTestOptions
|
||||
|
||||
const url = CompileController._getUrl(projectId, userId, 'sync/code')
|
||||
CompileController.proxyToClsi(
|
||||
projectId,
|
||||
'sync-to-code',
|
||||
url,
|
||||
{
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
imageName,
|
||||
editorId,
|
||||
buildId,
|
||||
compileFromClsiCache,
|
||||
},
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
proxyToClsi(projectId, action, url, qs, req, res, next) {
|
||||
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
CompileController.proxyToClsiWithLimits(
|
||||
projectId,
|
||||
action,
|
||||
url,
|
||||
qs,
|
||||
limits,
|
||||
req,
|
||||
res,
|
||||
next
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
proxyToClsiWithLimits(projectId, action, url, qs, limits, req, res, next) {
|
||||
_getPersistenceOptions(
|
||||
req,
|
||||
projectId,
|
||||
limits.compileGroup,
|
||||
limits.compileBackendClass,
|
||||
(err, persistenceOptions) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'error getting cookie jar for clsi request')
|
||||
return next(err)
|
||||
}
|
||||
url = new URL(`${Settings.apis.clsi.url}${url}`)
|
||||
url.search = new URLSearchParams({
|
||||
...persistenceOptions.qs,
|
||||
...qs,
|
||||
}).toString()
|
||||
const timer = new Metrics.Timer(
|
||||
'proxy_to_clsi',
|
||||
1,
|
||||
{ path: action },
|
||||
[0, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000, 45000, 60000]
|
||||
)
|
||||
Metrics.inc('proxy_to_clsi', 1, { path: action, status: 'start' })
|
||||
fetchStreamWithResponse(url.href, {
|
||||
method: req.method,
|
||||
signal: AbortSignal.timeout(60 * 1000),
|
||||
headers: persistenceOptions.headers,
|
||||
})
|
||||
.then(({ stream, response }) => {
|
||||
if (req.destroyed) {
|
||||
// The client has disconnected already, avoid trying to write into the broken connection.
|
||||
Metrics.inc('proxy_to_clsi', 1, {
|
||||
path: action,
|
||||
status: 'req-aborted',
|
||||
})
|
||||
return
|
||||
}
|
||||
Metrics.inc('proxy_to_clsi', 1, {
|
||||
path: action,
|
||||
status: response.status,
|
||||
})
|
||||
|
||||
for (const key of ['Content-Length', 'Content-Type']) {
|
||||
if (response.headers.has(key)) {
|
||||
res.setHeader(key, response.headers.get(key))
|
||||
}
|
||||
}
|
||||
res.writeHead(response.status)
|
||||
return pipeline(stream, res)
|
||||
})
|
||||
.then(() => {
|
||||
timer.labels.status = 'success'
|
||||
timer.done()
|
||||
})
|
||||
.catch(err => {
|
||||
const reqAborted = Boolean(req.destroyed)
|
||||
const status = reqAborted ? 'req-aborted-late' : 'error'
|
||||
timer.labels.status = status
|
||||
const duration = timer.done()
|
||||
Metrics.inc('proxy_to_clsi', 1, { path: action, status })
|
||||
const streamingStarted = Boolean(res.headersSent)
|
||||
if (!streamingStarted) {
|
||||
if (err instanceof RequestFailedError) {
|
||||
res.sendStatus(err.response.status)
|
||||
} else {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
if (
|
||||
streamingStarted &&
|
||||
reqAborted &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE'
|
||||
) {
|
||||
// Ignore noisy spurious error
|
||||
return
|
||||
}
|
||||
if (
|
||||
err instanceof RequestFailedError &&
|
||||
['sync-to-code', 'sync-to-pdf', 'output-file'].includes(action)
|
||||
) {
|
||||
// Ignore noisy error
|
||||
// https://github.com/overleaf/internal/issues/15201
|
||||
return
|
||||
}
|
||||
logger.warn(
|
||||
{
|
||||
err,
|
||||
projectId,
|
||||
url,
|
||||
action,
|
||||
reqAborted,
|
||||
streamingStarted,
|
||||
duration,
|
||||
},
|
||||
'CLSI proxy error'
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
wordCount(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const file = req.query.file || false
|
||||
const { clsiserverid } = req.query
|
||||
CompileController._compileAsUser(req, function (error, userId) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
CompileManager.wordCount(
|
||||
projectId,
|
||||
userId,
|
||||
file,
|
||||
clsiserverid,
|
||||
function (error, body) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.json(body)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
function _getPersistenceOptions(
|
||||
req,
|
||||
projectId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
callback
|
||||
) {
|
||||
const { clsiserverid } = req.query
|
||||
const userId = SessionManager.getLoggedInUserId(req)
|
||||
if (clsiserverid && typeof clsiserverid === 'string') {
|
||||
callback(null, {
|
||||
qs: { clsiserverid, compileGroup, compileBackendClass },
|
||||
headers: {},
|
||||
})
|
||||
} else {
|
||||
ClsiCookieManager.getServerId(
|
||||
projectId,
|
||||
userId,
|
||||
compileGroup,
|
||||
compileBackendClass,
|
||||
(err, clsiServerId) => {
|
||||
if (err) return callback(err)
|
||||
callback(null, {
|
||||
qs: { compileGroup, compileBackendClass },
|
||||
headers: clsiServerId
|
||||
? {
|
||||
Cookie: new Cookie({
|
||||
key: Settings.clsiCookie.key,
|
||||
value: clsiServerId,
|
||||
}).cookieString(),
|
||||
}
|
||||
: {},
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
249
services/web/app/src/Features/Compile/CompileManager.js
Normal file
249
services/web/app/src/Features/Compile/CompileManager.js
Normal file
@@ -0,0 +1,249 @@
|
||||
let CompileManager
|
||||
const Crypto = require('crypto')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||
const rclient = RedisWrapper.client('clsi_recently_compiled')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const ProjectRootDocManager = require('../Project/ProjectRootDocManager')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const ClsiManager = require('./ClsiManager')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const { RateLimiter } = require('../../infrastructure/RateLimiter')
|
||||
const UserAnalyticsIdCache = require('../Analytics/UserAnalyticsIdCache')
|
||||
const {
|
||||
callbackify,
|
||||
callbackifyMultiResult,
|
||||
} = require('@overleaf/promise-utils')
|
||||
|
||||
function instrumentWithTimer(fn, key) {
|
||||
return async (...args) => {
|
||||
const timer = new Metrics.Timer(key)
|
||||
try {
|
||||
return await fn(...args)
|
||||
} finally {
|
||||
timer.done()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function generateBuildId() {
|
||||
return `${Date.now().toString(16)}-${Crypto.randomBytes(8).toString('hex')}`
|
||||
}
|
||||
|
||||
async function compile(projectId, userId, options = {}) {
|
||||
const recentlyCompiled = await CompileManager._checkIfRecentlyCompiled(
|
||||
projectId,
|
||||
userId
|
||||
)
|
||||
if (recentlyCompiled) {
|
||||
return { status: 'too-recently-compiled', outputFiles: [] }
|
||||
}
|
||||
|
||||
try {
|
||||
const canCompile = await CompileManager._checkIfAutoCompileLimitHasBeenHit(
|
||||
options.isAutoCompile,
|
||||
'everyone'
|
||||
)
|
||||
if (!canCompile) {
|
||||
return { status: 'autocompile-backoff', outputFiles: [] }
|
||||
}
|
||||
} catch (error) {
|
||||
return { status: 'autocompile-backoff', outputFiles: [] }
|
||||
}
|
||||
|
||||
await ProjectRootDocManager.promises.ensureRootDocumentIsSet(projectId)
|
||||
|
||||
const limits =
|
||||
await CompileManager.promises.getProjectCompileLimits(projectId)
|
||||
for (const key in limits) {
|
||||
const value = limits[key]
|
||||
options[key] = value
|
||||
}
|
||||
|
||||
try {
|
||||
const canCompile = await CompileManager._checkCompileGroupAutoCompileLimit(
|
||||
options.isAutoCompile,
|
||||
limits.compileGroup
|
||||
)
|
||||
if (!canCompile) {
|
||||
return { status: 'autocompile-backoff', outputFiles: [] }
|
||||
}
|
||||
} catch (error) {
|
||||
return { message: 'autocompile-backoff', outputFiles: [] }
|
||||
}
|
||||
|
||||
// Generate the buildId ahead of fetching the project content from redis/mongo so that the buildId's timestamp is before any lastUpdated date.
|
||||
options.buildId = generateBuildId()
|
||||
|
||||
// only pass userId down to clsi if this is a per-user compile
|
||||
const compileAsUser = Settings.disablePerUserCompiles ? undefined : userId
|
||||
const {
|
||||
status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
validationProblems,
|
||||
stats,
|
||||
timings,
|
||||
outputUrlPrefix,
|
||||
buildId,
|
||||
} = await ClsiManager.promises.sendRequest(projectId, compileAsUser, options)
|
||||
|
||||
return {
|
||||
status,
|
||||
outputFiles,
|
||||
clsiServerId,
|
||||
limits,
|
||||
validationProblems,
|
||||
stats,
|
||||
timings,
|
||||
outputUrlPrefix,
|
||||
buildId,
|
||||
}
|
||||
}
|
||||
|
||||
const instrumentedCompile = instrumentWithTimer(compile, 'editor.compile')
|
||||
|
||||
async function getProjectCompileLimits(projectId) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
owner_ref: 1,
|
||||
})
|
||||
|
||||
const owner = await UserGetter.promises.getUser(project.owner_ref, {
|
||||
_id: 1,
|
||||
alphaProgram: 1,
|
||||
analyticsId: 1,
|
||||
betaProgram: 1,
|
||||
features: 1,
|
||||
})
|
||||
|
||||
const ownerFeatures = (owner && owner.features) || {}
|
||||
// put alpha users into their own compile group
|
||||
if (owner && owner.alphaProgram) {
|
||||
ownerFeatures.compileGroup = 'alpha'
|
||||
}
|
||||
const analyticsId = await UserAnalyticsIdCache.get(owner._id)
|
||||
|
||||
const compileGroup =
|
||||
ownerFeatures.compileGroup || Settings.defaultFeatures.compileGroup
|
||||
const limits = {
|
||||
timeout:
|
||||
ownerFeatures.compileTimeout || Settings.defaultFeatures.compileTimeout,
|
||||
compileGroup,
|
||||
compileBackendClass: compileGroup === 'standard' ? 'n2d' : 'c2d',
|
||||
ownerAnalyticsId: analyticsId,
|
||||
}
|
||||
return limits
|
||||
}
|
||||
|
||||
async function wordCount(projectId, userId, file, clsiserverid) {
|
||||
const limits =
|
||||
await CompileManager.promises.getProjectCompileLimits(projectId)
|
||||
return await ClsiManager.promises.wordCount(
|
||||
projectId,
|
||||
userId,
|
||||
file,
|
||||
limits,
|
||||
clsiserverid
|
||||
)
|
||||
}
|
||||
|
||||
async function stopCompile(projectId, userId) {
|
||||
const limits =
|
||||
await CompileManager.promises.getProjectCompileLimits(projectId)
|
||||
|
||||
return await ClsiManager.promises.stopCompile(projectId, userId, limits)
|
||||
}
|
||||
|
||||
async function deleteAuxFiles(projectId, userId, clsiserverid) {
|
||||
const limits =
|
||||
await CompileManager.promises.getProjectCompileLimits(projectId)
|
||||
|
||||
return await ClsiManager.promises.deleteAuxFiles(
|
||||
projectId,
|
||||
userId,
|
||||
limits,
|
||||
clsiserverid
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = CompileManager = {
|
||||
promises: {
|
||||
compile: instrumentedCompile,
|
||||
deleteAuxFiles,
|
||||
getProjectCompileLimits,
|
||||
stopCompile,
|
||||
wordCount,
|
||||
},
|
||||
compile: callbackifyMultiResult(instrumentedCompile, [
|
||||
'status',
|
||||
'outputFiles',
|
||||
'clsiServerId',
|
||||
'limits',
|
||||
'validationProblems',
|
||||
'stats',
|
||||
'timings',
|
||||
'outputUrlPrefix',
|
||||
'buildId',
|
||||
]),
|
||||
|
||||
stopCompile: callbackify(stopCompile),
|
||||
|
||||
deleteAuxFiles: callbackify(deleteAuxFiles),
|
||||
|
||||
getProjectCompileLimits: callbackify(getProjectCompileLimits),
|
||||
|
||||
COMPILE_DELAY: 1, // seconds
|
||||
async _checkIfRecentlyCompiled(projectId, userId) {
|
||||
const key = `compile:${projectId}:${userId}`
|
||||
const ok = await rclient.set(key, true, 'EX', this.COMPILE_DELAY, 'NX')
|
||||
return ok !== 'OK'
|
||||
},
|
||||
|
||||
async _checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup) {
|
||||
if (!isAutoCompile) {
|
||||
return true
|
||||
}
|
||||
if (compileGroup === 'standard') {
|
||||
// apply extra limits to the standard compile group
|
||||
return await CompileManager._checkIfAutoCompileLimitHasBeenHit(
|
||||
isAutoCompile,
|
||||
compileGroup
|
||||
)
|
||||
} else {
|
||||
Metrics.inc(`auto-compile-${compileGroup}`)
|
||||
return true
|
||||
}
|
||||
}, // always allow priority group users to compile
|
||||
|
||||
async _checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup) {
|
||||
if (!isAutoCompile) {
|
||||
return true
|
||||
}
|
||||
Metrics.inc(`auto-compile-${compileGroup}`)
|
||||
const rateLimiter = getAutoCompileRateLimiter(compileGroup)
|
||||
try {
|
||||
await rateLimiter.consume('global', 1, { method: 'global' })
|
||||
return true
|
||||
} catch (e) {
|
||||
// Don't differentiate between errors and rate limits. Silently trigger
|
||||
// the rate limit if there's an error consuming the points.
|
||||
Metrics.inc(`auto-compile-${compileGroup}-limited`)
|
||||
return false
|
||||
}
|
||||
},
|
||||
|
||||
wordCount: callbackify(wordCount),
|
||||
}
|
||||
|
||||
const autoCompileRateLimiters = new Map()
|
||||
function getAutoCompileRateLimiter(compileGroup) {
|
||||
let rateLimiter = autoCompileRateLimiters.get(compileGroup)
|
||||
if (rateLimiter == null) {
|
||||
rateLimiter = new RateLimiter(`auto-compile:${compileGroup}`, {
|
||||
points: Settings.rateLimit.autoCompile[compileGroup] || 25,
|
||||
duration: 20,
|
||||
})
|
||||
autoCompileRateLimiters.set(compileGroup, rateLimiter)
|
||||
}
|
||||
return rateLimiter
|
||||
}
|
||||
60
services/web/app/src/Features/Contacts/ContactController.mjs
Normal file
60
services/web/app/src/Features/Contacts/ContactController.mjs
Normal file
@@ -0,0 +1,60 @@
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import ContactManager from './ContactManager.js'
|
||||
import UserGetter from '../User/UserGetter.js'
|
||||
import Modules from '../../infrastructure/Modules.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
function _formatContact(contact) {
|
||||
return {
|
||||
id: contact._id?.toString(),
|
||||
email: contact.email || '',
|
||||
first_name: contact.first_name || '',
|
||||
last_name: contact.last_name || '',
|
||||
type: 'user',
|
||||
}
|
||||
}
|
||||
|
||||
async function getContacts(req, res) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
const contactIds = await ContactManager.promises.getContactIds(userId, {
|
||||
limit: 50,
|
||||
})
|
||||
|
||||
let contacts = await UserGetter.promises.getUsers(contactIds, {
|
||||
email: 1,
|
||||
first_name: 1,
|
||||
last_name: 1,
|
||||
holdingAccount: 1,
|
||||
})
|
||||
|
||||
// UserGetter.getUsers may not preserve order so put them back in order
|
||||
const positions = {}
|
||||
for (let i = 0; i < contactIds.length; i++) {
|
||||
const contactId = contactIds[i]
|
||||
positions[contactId] = i
|
||||
}
|
||||
contacts.sort(
|
||||
(a, b) => positions[a._id?.toString()] - positions[b._id?.toString()]
|
||||
)
|
||||
|
||||
// Don't count holding accounts to discourage users from repeating mistakes (mistyped or wrong emails, etc)
|
||||
contacts = contacts.filter(c => !c.holdingAccount)
|
||||
|
||||
contacts = contacts.map(_formatContact)
|
||||
|
||||
const additionalContacts = await Modules.promises.hooks.fire(
|
||||
'getContacts',
|
||||
userId,
|
||||
contacts
|
||||
)
|
||||
|
||||
contacts = contacts.concat(...(additionalContacts || []))
|
||||
return res.json({
|
||||
contacts,
|
||||
})
|
||||
}
|
||||
|
||||
export default {
|
||||
getContacts: expressify(getContacts),
|
||||
}
|
||||
51
services/web/app/src/Features/Contacts/ContactManager.js
Normal file
51
services/web/app/src/Features/Contacts/ContactManager.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const { callbackify } = require('util')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { fetchJson } = require('@overleaf/fetch-utils')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
async function getContactIds(userId, options) {
|
||||
options = options ?? { limit: 50 }
|
||||
|
||||
const url = new URL(`${settings.apis.contacts.url}/user/${userId}/contacts`)
|
||||
|
||||
for (const [key, val] of Object.entries(options)) {
|
||||
url.searchParams.set(key, val)
|
||||
}
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchJson(url)
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed request to contacts API', { userId })
|
||||
}
|
||||
|
||||
return body?.contact_ids || []
|
||||
}
|
||||
|
||||
async function addContact(userId, contactId) {
|
||||
const url = new URL(`${settings.apis.contacts.url}/user/${userId}/contacts`)
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await fetchJson(url, {
|
||||
method: 'POST',
|
||||
json: { contact_id: contactId },
|
||||
})
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'failed request to contacts API', {
|
||||
userId,
|
||||
contactId,
|
||||
})
|
||||
}
|
||||
|
||||
return body?.contact_ids || []
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getContactIds: callbackify(getContactIds),
|
||||
addContact: callbackify(addContact),
|
||||
promises: {
|
||||
getContactIds,
|
||||
addContact,
|
||||
},
|
||||
}
|
||||
28
services/web/app/src/Features/Contacts/ContactRouter.mjs
Normal file
28
services/web/app/src/Features/Contacts/ContactRouter.mjs
Normal file
@@ -0,0 +1,28 @@
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import SessionManager from '../Authentication/SessionManager.js'
|
||||
import ContactController from './ContactController.mjs'
|
||||
import Settings from '@overleaf/settings'
|
||||
|
||||
function contactsAuthenticationMiddleware() {
|
||||
if (!Settings.allowAnonymousReadAndWriteSharing) {
|
||||
return AuthenticationController.requireLogin()
|
||||
} else {
|
||||
return (req, res, next) => {
|
||||
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
next()
|
||||
} else {
|
||||
res.json({ contacts: [] })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
apply(webRouter) {
|
||||
webRouter.get(
|
||||
'/user/contacts',
|
||||
contactsAuthenticationMiddleware(),
|
||||
ContactController.getContacts
|
||||
)
|
||||
},
|
||||
}
|
||||
61
services/web/app/src/Features/Cooldown/CooldownManager.js
Normal file
61
services/web/app/src/Features/Cooldown/CooldownManager.js
Normal file
@@ -0,0 +1,61 @@
|
||||
/* eslint-disable
|
||||
n/handle-callback-err,
|
||||
max-len,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let CooldownManager
|
||||
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||
const rclient = RedisWrapper.client('cooldown')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
|
||||
const COOLDOWN_IN_SECONDS = 60 * 10
|
||||
|
||||
module.exports = CooldownManager = {
|
||||
_buildKey(projectId) {
|
||||
return `Cooldown:{${projectId}}`
|
||||
},
|
||||
|
||||
putProjectOnCooldown(projectId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
`[Cooldown] putting project on cooldown for ${COOLDOWN_IN_SECONDS} seconds`
|
||||
)
|
||||
return rclient.set(
|
||||
CooldownManager._buildKey(projectId),
|
||||
'1',
|
||||
'EX',
|
||||
COOLDOWN_IN_SECONDS,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
isProjectOnCooldown(projectId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return rclient.get(
|
||||
CooldownManager._buildKey(projectId),
|
||||
function (err, result) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return callback(null, result === '1')
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
module.exports.promises = promisifyAll(module.exports, {
|
||||
without: ['_buildKey'],
|
||||
})
|
||||
@@ -0,0 +1,41 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import CooldownManager from './CooldownManager.js'
|
||||
import logger from '@overleaf/logger'
|
||||
|
||||
let CooldownMiddleware
|
||||
|
||||
export default CooldownMiddleware = {
|
||||
freezeProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
if (projectId == null) {
|
||||
return next(new Error('[Cooldown] No projectId parameter on route'))
|
||||
}
|
||||
return CooldownManager.isProjectOnCooldown(
|
||||
projectId,
|
||||
function (err, projectIsOnCooldown) {
|
||||
if (err != null) {
|
||||
return next(err)
|
||||
}
|
||||
if (projectIsOnCooldown) {
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'[Cooldown] project is on cooldown, denying request'
|
||||
)
|
||||
return res.sendStatus(429)
|
||||
}
|
||||
return next()
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
314
services/web/app/src/Features/Docstore/DocstoreManager.js
Normal file
314
services/web/app/src/Features/Docstore/DocstoreManager.js
Normal file
@@ -0,0 +1,314 @@
|
||||
const { promisify } = require('util')
|
||||
const { promisifyMultiResult } = require('@overleaf/promise-utils')
|
||||
const request = require('request').defaults({ jar: false })
|
||||
const OError = require('@overleaf/o-error')
|
||||
const logger = require('@overleaf/logger')
|
||||
const settings = require('@overleaf/settings')
|
||||
const Errors = require('../Errors/Errors')
|
||||
|
||||
const TIMEOUT = 30 * 1000 // request timeout
|
||||
|
||||
function deleteDoc(projectId, docId, name, deletedAt, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}`
|
||||
const docMetaData = { deleted: true, deletedAt, name }
|
||||
const options = { url, json: docMetaData, timeout: TIMEOUT }
|
||||
request.patch(options, (error, res) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null)
|
||||
} else if (res.statusCode === 404) {
|
||||
error = new Errors.NotFoundError({
|
||||
message: 'tried to delete doc not in docstore',
|
||||
info: {
|
||||
projectId,
|
||||
docId,
|
||||
},
|
||||
})
|
||||
callback(error) // maybe suppress the error when delete doc which is not present?
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{
|
||||
projectId,
|
||||
docId,
|
||||
}
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
*/
|
||||
function getAllDocs(projectId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc`
|
||||
request.get(
|
||||
{
|
||||
url,
|
||||
timeout: TIMEOUT,
|
||||
json: true,
|
||||
},
|
||||
(error, res, docs) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null, docs)
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId }
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getAllDeletedDocs(projectId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc-deleted`
|
||||
request.get({ url, timeout: TIMEOUT, json: true }, (error, res, docs) => {
|
||||
if (error) {
|
||||
callback(OError.tag(error, 'could not get deleted docs from docstore'))
|
||||
} else if (res.statusCode === 200) {
|
||||
callback(null, docs)
|
||||
} else {
|
||||
callback(
|
||||
new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId }
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {Callback} callback
|
||||
*/
|
||||
function getAllRanges(projectId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/ranges`
|
||||
request.get(
|
||||
{
|
||||
url,
|
||||
timeout: TIMEOUT,
|
||||
json: true,
|
||||
},
|
||||
(error, res, docs) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null, docs)
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId }
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getDoc(projectId, docId, options, callback) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
const requestParams = { timeout: TIMEOUT, json: true }
|
||||
if (options.peek) {
|
||||
requestParams.url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek`
|
||||
} else {
|
||||
requestParams.url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}`
|
||||
}
|
||||
if (options.include_deleted) {
|
||||
requestParams.qs = { include_deleted: 'true' }
|
||||
}
|
||||
request.get(requestParams, (error, res, doc) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.debug(
|
||||
{ docId, projectId, version: doc.version, rev: doc.rev },
|
||||
'got doc from docstore api'
|
||||
)
|
||||
callback(null, doc.lines, doc.rev, doc.version, doc.ranges)
|
||||
} else if (res.statusCode === 404) {
|
||||
error = new Errors.NotFoundError({
|
||||
message: 'doc not found in docstore',
|
||||
info: {
|
||||
projectId,
|
||||
docId,
|
||||
},
|
||||
})
|
||||
callback(error)
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{
|
||||
projectId,
|
||||
docId,
|
||||
}
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function isDocDeleted(projectId, docId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}/deleted`
|
||||
request.get({ url, timeout: TIMEOUT, json: true }, (err, res, body) => {
|
||||
if (err) {
|
||||
callback(err)
|
||||
} else if (res.statusCode === 200) {
|
||||
callback(null, body.deleted)
|
||||
} else if (res.statusCode === 404) {
|
||||
callback(
|
||||
new Errors.NotFoundError({
|
||||
message: 'doc does not exist in project',
|
||||
info: { projectId, docId },
|
||||
})
|
||||
)
|
||||
} else {
|
||||
callback(
|
||||
new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId, docId }
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function updateDoc(projectId, docId, lines, version, ranges, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}`
|
||||
request.post(
|
||||
{
|
||||
url,
|
||||
timeout: TIMEOUT,
|
||||
json: {
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
},
|
||||
},
|
||||
(error, res, result) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.debug(
|
||||
{ projectId, docId },
|
||||
'update doc in docstore url finished'
|
||||
)
|
||||
callback(null, result.modified, result.rev)
|
||||
} else {
|
||||
error = new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId, docId }
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks docstore whether any doc in the project has ranges
|
||||
*
|
||||
* @param {string} proejctId
|
||||
* @param {Callback} callback
|
||||
*/
|
||||
function projectHasRanges(projectId, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/has-ranges`
|
||||
request.get({ url, timeout: TIMEOUT, json: true }, (err, res, body) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null, body.projectHasRanges)
|
||||
} else {
|
||||
callback(
|
||||
new OError(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||
{ projectId }
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function archiveProject(projectId, callback) {
|
||||
_operateOnProject(projectId, 'archive', callback)
|
||||
}
|
||||
|
||||
function unarchiveProject(projectId, callback) {
|
||||
_operateOnProject(projectId, 'unarchive', callback)
|
||||
}
|
||||
|
||||
function destroyProject(projectId, callback) {
|
||||
_operateOnProject(projectId, 'destroy', callback)
|
||||
}
|
||||
|
||||
function _operateOnProject(projectId, method, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/${method}`
|
||||
logger.debug({ projectId }, `calling ${method} for project in docstore`)
|
||||
// use default timeout for archiving/unarchiving/destroying
|
||||
request.post(url, (err, res, docs) => {
|
||||
if (err) {
|
||||
OError.tag(err, `error calling ${method} project in docstore`, {
|
||||
projectId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback()
|
||||
} else {
|
||||
const error = new Error(
|
||||
`docstore api responded with non-success code: ${res.statusCode}`
|
||||
)
|
||||
logger.warn(
|
||||
{ err: error, projectId },
|
||||
`error calling ${method} project in docstore`
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
deleteDoc,
|
||||
getAllDocs,
|
||||
getAllDeletedDocs,
|
||||
getAllRanges,
|
||||
getDoc,
|
||||
isDocDeleted,
|
||||
updateDoc,
|
||||
projectHasRanges,
|
||||
archiveProject,
|
||||
unarchiveProject,
|
||||
destroyProject,
|
||||
promises: {
|
||||
deleteDoc: promisify(deleteDoc),
|
||||
getAllDocs: promisify(getAllDocs),
|
||||
getAllDeletedDocs: promisify(getAllDeletedDocs),
|
||||
getAllRanges: promisify(getAllRanges),
|
||||
getDoc: promisifyMultiResult(getDoc, ['lines', 'rev', 'version', 'ranges']),
|
||||
isDocDeleted: promisify(isDocDeleted),
|
||||
updateDoc: promisifyMultiResult(updateDoc, ['modified', 'rev']),
|
||||
projectHasRanges: promisify(projectHasRanges),
|
||||
archiveProject: promisify(archiveProject),
|
||||
unarchiveProject: promisify(unarchiveProject),
|
||||
destroyProject: promisify(destroyProject),
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
import logger from '@overleaf/logger'
|
||||
import DocumentUpdaterHandler from './DocumentUpdaterHandler.js'
|
||||
import ProjectLocator from '../Project/ProjectLocator.js'
|
||||
import { plainTextResponse } from '../../infrastructure/Response.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
async function getDoc(req, res) {
|
||||
const projectId = req.params.Project_id
|
||||
const docId = req.params.Doc_id
|
||||
|
||||
try {
|
||||
const { element: doc } = await ProjectLocator.promises.findElement({
|
||||
project_id: projectId,
|
||||
element_id: docId,
|
||||
type: 'doc',
|
||||
})
|
||||
|
||||
const { lines } = await DocumentUpdaterHandler.promises.getDocument(
|
||||
projectId,
|
||||
docId,
|
||||
-1 // latest version only
|
||||
)
|
||||
|
||||
res.setContentDisposition('attachment', { filename: doc.name })
|
||||
plainTextResponse(res, lines.join('\n'))
|
||||
} catch (err) {
|
||||
if (err.name === 'NotFoundError') {
|
||||
logger.warn(
|
||||
{ err, projectId, docId },
|
||||
'entity not found when downloading doc'
|
||||
)
|
||||
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
logger.err(
|
||||
{ err, projectId, docId },
|
||||
'error getting document for downloading'
|
||||
)
|
||||
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
getDoc: expressify(getDoc),
|
||||
}
|
||||
@@ -0,0 +1,656 @@
|
||||
const request = require('request').defaults({ timeout: 30 * 1000 })
|
||||
const OError = require('@overleaf/o-error')
|
||||
const settings = require('@overleaf/settings')
|
||||
const _ = require('lodash')
|
||||
const async = require('async')
|
||||
const logger = require('@overleaf/logger')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const { promisify } = require('util')
|
||||
const { promisifyMultiResult } = require('@overleaf/promise-utils')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
|
||||
function getProjectLastUpdatedAt(projectId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/last_updated_at`,
|
||||
method: 'GET',
|
||||
json: true,
|
||||
},
|
||||
projectId,
|
||||
'project.redis.last_updated_at',
|
||||
(err, body) => {
|
||||
if (err || !body?.lastUpdatedAt) return callback(err, null)
|
||||
callback(null, new Date(body.lastUpdatedAt))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
*/
|
||||
function flushProjectToMongo(projectId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/flush`,
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'flushing.mongo.project',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function flushMultipleProjectsToMongo(projectIds, callback) {
|
||||
const jobs = projectIds.map(projectId => callback => {
|
||||
flushProjectToMongo(projectId, callback)
|
||||
})
|
||||
async.series(jobs, callback)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectId
|
||||
*/
|
||||
function flushProjectToMongoAndDelete(projectId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}`,
|
||||
method: 'DELETE',
|
||||
},
|
||||
projectId,
|
||||
'flushing.mongo.project',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function flushDocToMongo(projectId, docId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/flush`,
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'flushing.mongo.doc',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function deleteDoc(projectId, docId, ignoreFlushErrors, callback) {
|
||||
if (typeof ignoreFlushErrors === 'function') {
|
||||
callback = ignoreFlushErrors
|
||||
ignoreFlushErrors = false
|
||||
}
|
||||
let path = `/project/${projectId}/doc/${docId}`
|
||||
if (ignoreFlushErrors) {
|
||||
path += '?ignore_flush_errors=true'
|
||||
}
|
||||
const method = 'DELETE'
|
||||
_makeRequest(
|
||||
{
|
||||
path,
|
||||
method,
|
||||
},
|
||||
projectId,
|
||||
'delete.mongo.doc',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function getComment(projectId, docId, commentId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/comment/${commentId}`,
|
||||
json: true,
|
||||
},
|
||||
projectId,
|
||||
'get-comment',
|
||||
function (error, comment) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
callback(null, comment)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getDocument(projectId, docId, fromVersion, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`,
|
||||
json: true,
|
||||
},
|
||||
projectId,
|
||||
'get-document',
|
||||
function (error, doc) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
callback(null, doc.lines, doc.version, doc.ranges, doc.ops)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function setDocument(projectId, docId, userId, docLines, source, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
lines: docLines,
|
||||
source,
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'set-document',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function appendToDocument(projectId, docId, userId, lines, source, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/append`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
lines,
|
||||
source,
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'append-to-document',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function getProjectDocsIfMatch(projectId, projectStateHash, callback) {
|
||||
// If the project state hasn't changed, we can get all the latest
|
||||
// docs from redis via the docupdater. Otherwise we will need to
|
||||
// fall back to getting them from mongo.
|
||||
const timer = new metrics.Timer('get-project-docs')
|
||||
const url = `${settings.apis.documentupdater.url}/project/${projectId}/get_and_flush_if_old?state=${projectStateHash}`
|
||||
request.post(url, function (error, res, body) {
|
||||
timer.done()
|
||||
if (error) {
|
||||
OError.tag(error, 'error getting project docs from doc updater', {
|
||||
url,
|
||||
projectId,
|
||||
})
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode === 409) {
|
||||
// HTTP response code "409 Conflict"
|
||||
// Docupdater has checked the projectStateHash and found that
|
||||
// it has changed. This means that the docs currently in redis
|
||||
// aren't the only change to the project and the full set of
|
||||
// docs/files should be retreived from docstore/filestore
|
||||
// instead.
|
||||
callback()
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
let docs
|
||||
try {
|
||||
docs = JSON.parse(body)
|
||||
} catch (error1) {
|
||||
return callback(OError.tag(error1))
|
||||
}
|
||||
callback(null, docs)
|
||||
} else {
|
||||
callback(
|
||||
new OError(
|
||||
`doc updater returned a non-success status code: ${res.statusCode}`,
|
||||
{
|
||||
projectId,
|
||||
url,
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function clearProjectState(projectId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/clearState`,
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'clear-project-state',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function acceptChanges(projectId, docId, changeIds, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/change/accept`,
|
||||
json: { change_ids: changeIds },
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'accept-changes',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function resolveThread(projectId, docId, threadId, userId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/comment/${threadId}/resolve`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'resolve-thread',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function reopenThread(projectId, docId, threadId, userId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/comment/${threadId}/reopen`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'reopen-thread',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function deleteThread(projectId, docId, threadId, userId, callback) {
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/doc/${docId}/comment/${threadId}`,
|
||||
method: 'DELETE',
|
||||
json: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
projectId,
|
||||
'delete-thread',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
function resyncProjectHistory(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
opts,
|
||||
callback
|
||||
) {
|
||||
docs = docs.map(doc => ({
|
||||
doc: doc.doc._id,
|
||||
path: doc.path,
|
||||
}))
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
if (!hasFilestore) {
|
||||
// Files without a hash likely do not have a blob. Abort.
|
||||
for (const { file } of files) {
|
||||
if (!file.hash) {
|
||||
return callback(
|
||||
new OError('found file with missing hash', { projectId, file })
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
files = files.map(file => ({
|
||||
file: file.file._id,
|
||||
path: file.path,
|
||||
url: hasFilestore
|
||||
? FileStoreHandler._buildUrl(projectId, file.file._id)
|
||||
: undefined,
|
||||
_hash: file.file.hash,
|
||||
createdBlob: !hasFilestore,
|
||||
metadata: buildFileMetadataForHistory(file.file),
|
||||
}))
|
||||
|
||||
const body = { docs, files, projectHistoryId }
|
||||
if (opts.historyRangesMigration) {
|
||||
body.historyRangesMigration = opts.historyRangesMigration
|
||||
}
|
||||
if (opts.resyncProjectStructureOnly) {
|
||||
body.resyncProjectStructureOnly = opts.resyncProjectStructureOnly
|
||||
}
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}/history/resync`,
|
||||
json: body,
|
||||
method: 'POST',
|
||||
timeout: 6 * 60 * 1000, // allow 6 minutes for resync
|
||||
},
|
||||
projectId,
|
||||
'resync-project-history',
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Block a project from being loaded in docupdater
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Callback} callback
|
||||
*/
|
||||
function blockProject(projectId, callback) {
|
||||
_makeRequest(
|
||||
{ path: `/project/${projectId}/block`, method: 'POST', json: true },
|
||||
projectId,
|
||||
'block-project',
|
||||
(err, body) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
callback(null, body.blocked)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Unblock a previously blocked project
|
||||
*
|
||||
* @param {string} projectId
|
||||
* @param {Callback} callback
|
||||
*/
|
||||
function unblockProject(projectId, callback) {
|
||||
_makeRequest(
|
||||
{ path: `/project/${projectId}/unblock`, method: 'POST', json: true },
|
||||
projectId,
|
||||
'unblock-project',
|
||||
(err, body) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
callback(null, body.wasBlocked)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function updateProjectStructure(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
userId,
|
||||
changes,
|
||||
source,
|
||||
callback
|
||||
) {
|
||||
if (
|
||||
settings.apis.project_history == null ||
|
||||
!settings.apis.project_history.sendProjectStructureOps
|
||||
) {
|
||||
return callback()
|
||||
}
|
||||
|
||||
ProjectGetter.getProjectWithoutLock(
|
||||
projectId,
|
||||
{ overleaf: true },
|
||||
(err, project) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
const historyRangesSupport = _.get(
|
||||
project,
|
||||
'overleaf.history.rangesSupportEnabled',
|
||||
false
|
||||
)
|
||||
const {
|
||||
deletes: docDeletes,
|
||||
adds: docAdds,
|
||||
renames: docRenames,
|
||||
} = _getUpdates(
|
||||
'doc',
|
||||
changes.oldDocs,
|
||||
changes.newDocs,
|
||||
historyRangesSupport
|
||||
)
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
if (!hasFilestore) {
|
||||
for (const newEntity of changes.newFiles || []) {
|
||||
if (!newEntity.file.hash) {
|
||||
// Files without a hash likely do not have a blob. Abort.
|
||||
return callback(
|
||||
new OError('found file with missing hash', { newEntity })
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
const {
|
||||
deletes: fileDeletes,
|
||||
adds: fileAdds,
|
||||
renames: fileRenames,
|
||||
} = _getUpdates(
|
||||
'file',
|
||||
changes.oldFiles,
|
||||
changes.newFiles,
|
||||
historyRangesSupport
|
||||
)
|
||||
const updates = [].concat(
|
||||
docDeletes,
|
||||
fileDeletes,
|
||||
docAdds,
|
||||
fileAdds,
|
||||
docRenames,
|
||||
fileRenames
|
||||
)
|
||||
const projectVersion =
|
||||
changes && changes.newProject && changes.newProject.version
|
||||
|
||||
if (updates.length < 1) {
|
||||
return callback()
|
||||
}
|
||||
|
||||
if (projectVersion == null) {
|
||||
logger.warn(
|
||||
{ projectId, changes, projectVersion },
|
||||
'did not receive project version in changes'
|
||||
)
|
||||
return callback(new Error('did not receive project version in changes'))
|
||||
}
|
||||
|
||||
_makeRequest(
|
||||
{
|
||||
path: `/project/${projectId}`,
|
||||
json: {
|
||||
updates,
|
||||
userId,
|
||||
version: projectVersion,
|
||||
projectHistoryId,
|
||||
source,
|
||||
},
|
||||
method: 'POST',
|
||||
},
|
||||
projectId,
|
||||
'update-project-structure',
|
||||
callback
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function _makeRequest(options, projectId, metricsKey, callback) {
|
||||
const timer = new metrics.Timer(metricsKey)
|
||||
request(
|
||||
{
|
||||
url: `${settings.apis.documentupdater.url}${options.path}`,
|
||||
json: options.json,
|
||||
method: options.method || 'GET',
|
||||
timeout: options.timeout || 30 * 1000,
|
||||
},
|
||||
function (error, res, body) {
|
||||
timer.done()
|
||||
if (error) {
|
||||
logger.warn(
|
||||
{ error, projectId },
|
||||
'error making request to document updater'
|
||||
)
|
||||
callback(error)
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
callback(null, body)
|
||||
} else {
|
||||
error = new Error(
|
||||
`document updater returned a failure status code: ${res.statusCode}`
|
||||
)
|
||||
logger.warn(
|
||||
{ error, projectId },
|
||||
`document updater returned failure status code: ${res.statusCode}`
|
||||
)
|
||||
callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function _getUpdates(
|
||||
entityType,
|
||||
oldEntities,
|
||||
newEntities,
|
||||
historyRangesSupport
|
||||
) {
|
||||
if (!oldEntities) {
|
||||
oldEntities = []
|
||||
}
|
||||
if (!newEntities) {
|
||||
newEntities = []
|
||||
}
|
||||
const deletes = []
|
||||
const adds = []
|
||||
const renames = []
|
||||
|
||||
const oldEntitiesHash = _.keyBy(oldEntities, entity =>
|
||||
entity[entityType]._id.toString()
|
||||
)
|
||||
const newEntitiesHash = _.keyBy(newEntities, entity =>
|
||||
entity[entityType]._id.toString()
|
||||
)
|
||||
|
||||
// Send deletes before adds (and renames) to keep a 1:1 mapping between
|
||||
// paths and ids
|
||||
//
|
||||
// When a file is replaced, we first delete the old file and then add the
|
||||
// new file. If the 'add' operation is sent to project history before the
|
||||
// 'delete' then we would have two files with the same path at that point
|
||||
// in time.
|
||||
for (const id in oldEntitiesHash) {
|
||||
const oldEntity = oldEntitiesHash[id]
|
||||
const newEntity = newEntitiesHash[id]
|
||||
|
||||
if (newEntity == null) {
|
||||
// entity deleted
|
||||
deletes.push({
|
||||
type: `rename-${entityType}`,
|
||||
id,
|
||||
pathname: oldEntity.path,
|
||||
newPathname: '',
|
||||
})
|
||||
}
|
||||
}
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
|
||||
for (const id in newEntitiesHash) {
|
||||
const newEntity = newEntitiesHash[id]
|
||||
const oldEntity = oldEntitiesHash[id]
|
||||
|
||||
if (oldEntity == null) {
|
||||
// entity added
|
||||
adds.push({
|
||||
type: `add-${entityType}`,
|
||||
id,
|
||||
pathname: newEntity.path,
|
||||
docLines: newEntity.docLines,
|
||||
ranges: newEntity.ranges,
|
||||
historyRangesSupport,
|
||||
url: newEntity.file != null && hasFilestore ? newEntity.url : undefined,
|
||||
hash: newEntity.file != null ? newEntity.file.hash : undefined,
|
||||
metadata: buildFileMetadataForHistory(newEntity.file),
|
||||
createdBlob: (newEntity.createdBlob || !hasFilestore) ?? false,
|
||||
})
|
||||
} else if (newEntity.path !== oldEntity.path) {
|
||||
// entity renamed
|
||||
renames.push({
|
||||
type: `rename-${entityType}`,
|
||||
id,
|
||||
pathname: oldEntity.path,
|
||||
newPathname: newEntity.path,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return { deletes, adds, renames }
|
||||
}
|
||||
|
||||
function buildFileMetadataForHistory(file) {
|
||||
if (!file?.linkedFileData) return undefined
|
||||
|
||||
const metadata = {
|
||||
// Files do not have a created at timestamp in the history.
|
||||
// For cloned projects, the importedAt timestamp needs to remain untouched.
|
||||
// Record the timestamp in the metadata blob to keep everything self-contained.
|
||||
importedAt: file.created,
|
||||
...file.linkedFileData,
|
||||
}
|
||||
if (metadata.provider === 'project_output_file') {
|
||||
// The build-id and clsi-server-id are only used for downloading file.
|
||||
// Omit them from history as they are not useful in the future.
|
||||
delete metadata.build_id
|
||||
delete metadata.clsiServerId
|
||||
}
|
||||
return metadata
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
flushProjectToMongo,
|
||||
flushMultipleProjectsToMongo,
|
||||
flushProjectToMongoAndDelete,
|
||||
flushDocToMongo,
|
||||
deleteDoc,
|
||||
getComment,
|
||||
getDocument,
|
||||
getProjectLastUpdatedAt,
|
||||
setDocument,
|
||||
appendToDocument,
|
||||
getProjectDocsIfMatch,
|
||||
clearProjectState,
|
||||
acceptChanges,
|
||||
resolveThread,
|
||||
reopenThread,
|
||||
deleteThread,
|
||||
resyncProjectHistory,
|
||||
blockProject,
|
||||
unblockProject,
|
||||
updateProjectStructure,
|
||||
promises: {
|
||||
flushProjectToMongo: promisify(flushProjectToMongo),
|
||||
flushMultipleProjectsToMongo: promisify(flushMultipleProjectsToMongo),
|
||||
flushProjectToMongoAndDelete: promisify(flushProjectToMongoAndDelete),
|
||||
flushDocToMongo: promisify(flushDocToMongo),
|
||||
deleteDoc: promisify(deleteDoc),
|
||||
getComment: promisify(getComment),
|
||||
getDocument: promisifyMultiResult(getDocument, [
|
||||
'lines',
|
||||
'version',
|
||||
'ranges',
|
||||
'ops',
|
||||
]),
|
||||
setDocument: promisify(setDocument),
|
||||
getProjectDocsIfMatch: promisify(getProjectDocsIfMatch),
|
||||
getProjectLastUpdatedAt: promisify(getProjectLastUpdatedAt),
|
||||
clearProjectState: promisify(clearProjectState),
|
||||
acceptChanges: promisify(acceptChanges),
|
||||
resolveThread: promisify(resolveThread),
|
||||
reopenThread: promisify(reopenThread),
|
||||
deleteThread: promisify(deleteThread),
|
||||
resyncProjectHistory: promisify(resyncProjectHistory),
|
||||
blockProject: promisify(blockProject),
|
||||
unblockProject: promisify(unblockProject),
|
||||
updateProjectStructure: promisify(updateProjectStructure),
|
||||
appendToDocument: promisify(appendToDocument),
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
import ChatApiHandler from '../Chat/ChatApiHandler.js'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import ProjectLocator from '../Project/ProjectLocator.js'
|
||||
import ProjectEntityHandler from '../Project/ProjectEntityHandler.js'
|
||||
import ProjectEntityUpdateHandler from '../Project/ProjectEntityUpdateHandler.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import _ from 'lodash'
|
||||
import { plainTextResponse } from '../../infrastructure/Response.js'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
async function getDocument(req, res) {
|
||||
const { Project_id: projectId, doc_id: docId } = req.params
|
||||
const plain = req.query.plain === 'true'
|
||||
const peek = req.query.peek === 'true'
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
rootFolder: true,
|
||||
overleaf: true,
|
||||
})
|
||||
if (!project) {
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
const { path } = await ProjectLocator.promises.findElement({
|
||||
project,
|
||||
element_id: docId,
|
||||
type: 'doc',
|
||||
})
|
||||
|
||||
const { lines, version, ranges } = await ProjectEntityHandler.promises.getDoc(
|
||||
projectId,
|
||||
docId,
|
||||
{ peek }
|
||||
)
|
||||
|
||||
const resolvedCommentIdsInProject =
|
||||
await ChatApiHandler.promises.getResolvedThreadIds(projectId)
|
||||
|
||||
const commentIdsInDoc = new Set(
|
||||
ranges?.comments?.map(comment => comment.id) ?? []
|
||||
)
|
||||
|
||||
const resolvedCommentIds = resolvedCommentIdsInProject.filter(commentId =>
|
||||
commentIdsInDoc.has(commentId)
|
||||
)
|
||||
|
||||
if (plain) {
|
||||
plainTextResponse(res, lines.join('\n'))
|
||||
} else {
|
||||
const projectHistoryId = _.get(project, 'overleaf.history.id')
|
||||
const historyRangesSupport = _.get(
|
||||
project,
|
||||
'overleaf.history.rangesSupportEnabled',
|
||||
false
|
||||
)
|
||||
|
||||
// all projects are now migrated to Full Project History, keeping the field
|
||||
// for API compatibility
|
||||
const projectHistoryType = 'project-history'
|
||||
|
||||
res.json({
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
pathname: path.fileSystem,
|
||||
projectHistoryId,
|
||||
projectHistoryType,
|
||||
historyRangesSupport,
|
||||
resolvedCommentIds,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function setDocument(req, res) {
|
||||
const { Project_id: projectId, doc_id: docId } = req.params
|
||||
const { lines, version, ranges, lastUpdatedAt, lastUpdatedBy } = req.body
|
||||
const result = await ProjectEntityUpdateHandler.promises.updateDocLines(
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
lastUpdatedAt,
|
||||
lastUpdatedBy
|
||||
)
|
||||
logger.debug(
|
||||
{ docId, projectId },
|
||||
'finished receiving set document request from api (docupdater)'
|
||||
)
|
||||
res.json(result)
|
||||
}
|
||||
|
||||
export default {
|
||||
getDocument: expressify(getDocument),
|
||||
setDocument: expressify(setDocument),
|
||||
}
|
||||
78
services/web/app/src/Features/Documents/DocumentHelper.js
Normal file
78
services/web/app/src/Features/Documents/DocumentHelper.js
Normal file
@@ -0,0 +1,78 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-cond-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DocumentHelper
|
||||
module.exports = DocumentHelper = {
|
||||
getTitleFromTexContent(content, maxContentToScan) {
|
||||
if (maxContentToScan == null) {
|
||||
maxContentToScan = 30000
|
||||
}
|
||||
const TITLE_WITH_CURLY_BRACES = /\\[tT]itle\*?\s*{([^}]+)}/
|
||||
const TITLE_WITH_SQUARE_BRACES = /\\[tT]itle\s*\[([^\]]+)\]/
|
||||
for (const line of Array.from(
|
||||
DocumentHelper._getLinesFromContent(content, maxContentToScan)
|
||||
)) {
|
||||
let match
|
||||
if (
|
||||
(match =
|
||||
line.match(TITLE_WITH_CURLY_BRACES) ||
|
||||
line.match(TITLE_WITH_SQUARE_BRACES))
|
||||
) {
|
||||
return DocumentHelper.detex(match[1])
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
},
|
||||
|
||||
contentHasDocumentclass(content, maxContentToScan) {
|
||||
if (maxContentToScan == null) {
|
||||
maxContentToScan = 30000
|
||||
}
|
||||
for (const line of Array.from(
|
||||
DocumentHelper._getLinesFromContent(content, maxContentToScan)
|
||||
)) {
|
||||
// We've had problems with this regex locking up CPU.
|
||||
// Previously /.*\\documentclass/ would totally lock up on lines of 500kb (data text files :()
|
||||
// This regex will only look from the start of the line, including whitespace so will return quickly
|
||||
// regardless of line length.
|
||||
if (line.match(/^\s*\\documentclass/)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
},
|
||||
|
||||
detex(string) {
|
||||
return string
|
||||
.replace(/\\LaTeX/g, 'LaTeX')
|
||||
.replace(/\\TeX/g, 'TeX')
|
||||
.replace(/\\TikZ/g, 'TikZ')
|
||||
.replace(/\\BibTeX/g, 'BibTeX')
|
||||
.replace(/\\\[[A-Za-z0-9. ]*\]/g, ' ') // line spacing
|
||||
.replace(/\\(?:[a-zA-Z]+|.|)/g, '')
|
||||
.replace(/{}|~/g, ' ')
|
||||
.replace(/[${}]/g, '')
|
||||
.replace(/ +/g, ' ')
|
||||
.trim()
|
||||
},
|
||||
|
||||
_getLinesFromContent(content, maxContentToScan) {
|
||||
if (typeof content === 'string') {
|
||||
return content.substring(0, maxContentToScan).split('\n')
|
||||
} else {
|
||||
return content
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,79 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import Metrics from '@overleaf/metrics'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import ProjectZipStreamManager from './ProjectZipStreamManager.mjs'
|
||||
import DocumentUpdaterHandler from '../DocumentUpdater/DocumentUpdaterHandler.js'
|
||||
import { prepareZipAttachment } from '../../infrastructure/Response.js'
|
||||
|
||||
let ProjectDownloadsController
|
||||
|
||||
export default ProjectDownloadsController = {
|
||||
downloadProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
Metrics.inc('zip-downloads')
|
||||
return DocumentUpdaterHandler.flushProjectToMongo(
|
||||
projectId,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return ProjectGetter.getProject(
|
||||
projectId,
|
||||
{ name: true },
|
||||
function (error, project) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return ProjectZipStreamManager.createZipStreamForProject(
|
||||
projectId,
|
||||
function (error, stream) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
prepareZipAttachment(res, `${project.name}.zip`)
|
||||
return stream.pipe(res)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
downloadMultipleProjects(req, res, next) {
|
||||
const projectIds = req.query.project_ids.split(',')
|
||||
Metrics.inc('zip-downloads-multiple')
|
||||
return DocumentUpdaterHandler.flushMultipleProjectsToMongo(
|
||||
projectIds,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return ProjectZipStreamManager.createZipStreamForMultipleProjects(
|
||||
projectIds,
|
||||
function (error, stream) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
prepareZipAttachment(
|
||||
res,
|
||||
`Overleaf Projects (${projectIds.length} items).zip`
|
||||
)
|
||||
return stream.pipe(res)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,160 @@
|
||||
import archiver from 'archiver'
|
||||
import async from 'async'
|
||||
import logger from '@overleaf/logger'
|
||||
import ProjectEntityHandler from '../Project/ProjectEntityHandler.js'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import HistoryManager from '../History/HistoryManager.js'
|
||||
import FileStoreHandler from '../FileStore/FileStoreHandler.js'
|
||||
import Features from '../../infrastructure/Features.js'
|
||||
let ProjectZipStreamManager
|
||||
|
||||
export default ProjectZipStreamManager = {
|
||||
createZipStreamForMultipleProjects(projectIds, callback) {
|
||||
// We'll build up a zip file that contains multiple zip files
|
||||
const archive = archiver('zip')
|
||||
archive.on('error', err =>
|
||||
logger.err(
|
||||
{ err, projectIds },
|
||||
'something went wrong building archive of project'
|
||||
)
|
||||
)
|
||||
callback(null, archive)
|
||||
|
||||
const jobs = projectIds.map(projectId => cb => {
|
||||
ProjectGetter.getProject(projectId, { name: true }, (error, project) => {
|
||||
if (error) {
|
||||
return cb(error)
|
||||
}
|
||||
if (!project) {
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'cannot append project to zip stream: project not found'
|
||||
)
|
||||
return cb()
|
||||
}
|
||||
logger.debug(
|
||||
{ projectId, name: project.name },
|
||||
'appending project to zip stream'
|
||||
)
|
||||
ProjectZipStreamManager.createZipStreamForProject(
|
||||
projectId,
|
||||
(error, stream) => {
|
||||
if (error) {
|
||||
return cb(error)
|
||||
}
|
||||
archive.append(stream, { name: `${project.name}.zip` })
|
||||
stream.on('end', () => {
|
||||
logger.debug(
|
||||
{ projectId, name: project.name },
|
||||
'zip stream ended'
|
||||
)
|
||||
cb()
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
async.series(jobs, () => {
|
||||
logger.debug(
|
||||
{ projectIds },
|
||||
'finished creating zip stream of multiple projects'
|
||||
)
|
||||
archive.finalize()
|
||||
})
|
||||
},
|
||||
|
||||
createZipStreamForProject(projectId, callback) {
|
||||
const archive = archiver('zip')
|
||||
// return stream immediately before we start adding things to it
|
||||
archive.on('error', err =>
|
||||
logger.err(
|
||||
{ err, projectId },
|
||||
'something went wrong building archive of project'
|
||||
)
|
||||
)
|
||||
callback(null, archive)
|
||||
this.addAllDocsToArchive(projectId, archive, error => {
|
||||
if (error) {
|
||||
logger.error(
|
||||
{ err: error, projectId },
|
||||
'error adding docs to zip stream'
|
||||
)
|
||||
}
|
||||
this.addAllFilesToArchive(projectId, archive, error => {
|
||||
if (error) {
|
||||
logger.error(
|
||||
{ err: error, projectId },
|
||||
'error adding files to zip stream'
|
||||
)
|
||||
}
|
||||
archive.finalize()
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
addAllDocsToArchive(projectId, archive, callback) {
|
||||
ProjectEntityHandler.getAllDocs(projectId, (error, docs) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Object.entries(docs).map(([path, doc]) => cb => {
|
||||
if (path[0] === '/') {
|
||||
path = path.slice(1)
|
||||
}
|
||||
logger.debug({ projectId }, 'Adding doc')
|
||||
archive.append(doc.lines.join('\n'), { name: path })
|
||||
setImmediate(cb)
|
||||
})
|
||||
async.series(jobs, callback)
|
||||
})
|
||||
},
|
||||
|
||||
getFileStream: (projectId, file, callback) => {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
HistoryManager.requestBlobWithFallback(
|
||||
projectId,
|
||||
file.hash,
|
||||
file._id,
|
||||
(error, result) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
const { stream } = result
|
||||
callback(null, stream)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
FileStoreHandler.getFileStream(projectId, file._id, {}, callback)
|
||||
}
|
||||
},
|
||||
|
||||
addAllFilesToArchive(projectId, archive, callback) {
|
||||
ProjectEntityHandler.getAllFiles(projectId, (error, files) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Object.entries(files).map(([path, file]) => cb => {
|
||||
ProjectZipStreamManager.getFileStream(
|
||||
projectId,
|
||||
file,
|
||||
(error, stream) => {
|
||||
if (error) {
|
||||
logger.warn(
|
||||
{ err: error, projectId, fileId: file._id },
|
||||
'something went wrong adding file to zip archive'
|
||||
)
|
||||
return cb(error)
|
||||
}
|
||||
if (path[0] === '/') {
|
||||
path = path.slice(1)
|
||||
}
|
||||
archive.append(stream, { name: path })
|
||||
stream.on('end', () => cb())
|
||||
}
|
||||
)
|
||||
})
|
||||
async.parallelLimit(jobs, 5, callback)
|
||||
})
|
||||
},
|
||||
}
|
||||
695
services/web/app/src/Features/Editor/EditorController.js
Normal file
695
services/web/app/src/Features/Editor/EditorController.js
Normal file
@@ -0,0 +1,695 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const ProjectEntityUpdateHandler = require('../Project/ProjectEntityUpdateHandler')
|
||||
const ProjectOptionsHandler = require('../Project/ProjectOptionsHandler')
|
||||
const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler')
|
||||
const ProjectDeleter = require('../Project/ProjectDeleter')
|
||||
const EditorRealTimeController = require('./EditorRealTimeController')
|
||||
const async = require('async')
|
||||
const PublicAccessLevels = require('../Authorization/PublicAccessLevels')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
|
||||
const EditorController = {
|
||||
addDoc(projectId, folderId, docName, docLines, source, userId, callback) {
|
||||
EditorController.addDocWithRanges(
|
||||
projectId,
|
||||
folderId,
|
||||
docName,
|
||||
docLines,
|
||||
{},
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
addDocWithRanges(
|
||||
projectId,
|
||||
folderId,
|
||||
docName,
|
||||
docLines,
|
||||
docRanges,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
docName = docName.trim()
|
||||
Metrics.inc('editor.add-doc')
|
||||
ProjectEntityUpdateHandler.addDocWithRanges(
|
||||
projectId,
|
||||
folderId,
|
||||
docName,
|
||||
docLines,
|
||||
docRanges,
|
||||
userId,
|
||||
source,
|
||||
(err, doc, folderId) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'error adding doc without lock', {
|
||||
projectId,
|
||||
docName,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewDoc',
|
||||
folderId,
|
||||
doc,
|
||||
source,
|
||||
userId
|
||||
)
|
||||
callback(err, doc)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
addFile(
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
fileName = fileName.trim()
|
||||
Metrics.inc('editor.add-file')
|
||||
ProjectEntityUpdateHandler.addFile(
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
userId,
|
||||
source,
|
||||
(err, fileRef, folderId) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'error adding file without lock', {
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewFile',
|
||||
folderId,
|
||||
fileRef,
|
||||
source,
|
||||
linkedFileData,
|
||||
userId
|
||||
)
|
||||
callback(err, fileRef)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
appendToDoc(projectId, docId, docLines, source, userId, callback) {
|
||||
ProjectEntityUpdateHandler.appendToDoc(
|
||||
projectId,
|
||||
docId,
|
||||
docLines,
|
||||
source,
|
||||
userId,
|
||||
function (err, doc) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error appending to doc', {
|
||||
projectId,
|
||||
docId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
callback(err, doc)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
upsertDoc(projectId, folderId, docName, docLines, source, userId, callback) {
|
||||
ProjectEntityUpdateHandler.upsertDoc(
|
||||
projectId,
|
||||
folderId,
|
||||
docName,
|
||||
docLines,
|
||||
source,
|
||||
userId,
|
||||
function (err, doc, didAddNewDoc) {
|
||||
if (didAddNewDoc) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewDoc',
|
||||
folderId,
|
||||
doc,
|
||||
source,
|
||||
userId
|
||||
)
|
||||
}
|
||||
callback(err, doc)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
upsertFile(
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
ProjectEntityUpdateHandler.upsertFile(
|
||||
projectId,
|
||||
folderId,
|
||||
fileName,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
userId,
|
||||
source,
|
||||
function (err, newFile, didAddFile, existingFile) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (!didAddFile) {
|
||||
// replacement, so remove the existing file from the client
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'removeEntity',
|
||||
existingFile._id,
|
||||
source
|
||||
)
|
||||
}
|
||||
// now add the new file on the client
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewFile',
|
||||
folderId,
|
||||
newFile,
|
||||
source,
|
||||
linkedFileData,
|
||||
userId
|
||||
)
|
||||
callback(null, newFile)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
upsertDocWithPath(
|
||||
projectId,
|
||||
elementPath,
|
||||
docLines,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
ProjectEntityUpdateHandler.upsertDocWithPath(
|
||||
projectId,
|
||||
elementPath,
|
||||
docLines,
|
||||
source,
|
||||
userId,
|
||||
function (err, doc, didAddNewDoc, newFolders, lastFolder) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorController._notifyProjectUsersOfNewFolders(
|
||||
projectId,
|
||||
newFolders,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (didAddNewDoc) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewDoc',
|
||||
lastFolder._id,
|
||||
doc,
|
||||
source,
|
||||
userId
|
||||
)
|
||||
}
|
||||
callback(null, { doc, folder: lastFolder })
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
upsertFileWithPath(
|
||||
projectId,
|
||||
elementPath,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
source,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
ProjectEntityUpdateHandler.upsertFileWithPath(
|
||||
projectId,
|
||||
elementPath,
|
||||
fsPath,
|
||||
linkedFileData,
|
||||
userId,
|
||||
source,
|
||||
function (
|
||||
err,
|
||||
newFile,
|
||||
didAddFile,
|
||||
existingFile,
|
||||
newFolders,
|
||||
lastFolder
|
||||
) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorController._notifyProjectUsersOfNewFolders(
|
||||
projectId,
|
||||
newFolders,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (!didAddFile) {
|
||||
// replacement, so remove the existing file from the client
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'removeEntity',
|
||||
existingFile._id,
|
||||
source
|
||||
)
|
||||
}
|
||||
// now add the new file on the client
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewFile',
|
||||
lastFolder._id,
|
||||
newFile,
|
||||
source,
|
||||
linkedFileData,
|
||||
userId
|
||||
)
|
||||
callback(null, { file: newFile, folder: lastFolder })
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
addFolder(projectId, folderId, folderName, source, userId, callback) {
|
||||
folderName = folderName.trim()
|
||||
Metrics.inc('editor.add-folder')
|
||||
ProjectEntityUpdateHandler.addFolder(
|
||||
projectId,
|
||||
folderId,
|
||||
folderName,
|
||||
userId,
|
||||
(err, folder, folderId) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'could not add folder', {
|
||||
projectId,
|
||||
folderId,
|
||||
folderName,
|
||||
source,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorController._notifyProjectUsersOfNewFolder(
|
||||
projectId,
|
||||
folderId,
|
||||
folder,
|
||||
userId,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
callback(null, folder)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
mkdirp(projectId, path, userId, callback) {
|
||||
logger.debug({ projectId, path }, "making directories if they don't exist")
|
||||
ProjectEntityUpdateHandler.mkdirp(
|
||||
projectId,
|
||||
path,
|
||||
userId,
|
||||
(err, newFolders, lastFolder) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'could not mkdirp', {
|
||||
projectId,
|
||||
path,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
EditorController._notifyProjectUsersOfNewFolders(
|
||||
projectId,
|
||||
newFolders,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
callback(null, newFolders, lastFolder)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
deleteEntity(projectId, entityId, entityType, source, userId, callback) {
|
||||
Metrics.inc('editor.delete-entity')
|
||||
ProjectEntityUpdateHandler.deleteEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
userId,
|
||||
source,
|
||||
function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'could not delete entity', {
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
logger.debug(
|
||||
{ projectId, entityId, entityType },
|
||||
'telling users entity has been deleted'
|
||||
)
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'removeEntity',
|
||||
entityId,
|
||||
source
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
deleteEntityWithPath(projectId, path, source, userId, callback) {
|
||||
ProjectEntityUpdateHandler.deleteEntityWithPath(
|
||||
projectId,
|
||||
path,
|
||||
userId,
|
||||
source,
|
||||
function (err, entityId) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'removeEntity',
|
||||
entityId,
|
||||
source
|
||||
)
|
||||
callback(null, entityId)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
updateProjectDescription(projectId, description, callback) {
|
||||
logger.debug({ projectId, description }, 'updating project description')
|
||||
ProjectDetailsHandler.setProjectDescription(
|
||||
projectId,
|
||||
description,
|
||||
function (err) {
|
||||
if (err) {
|
||||
OError.tag(
|
||||
err,
|
||||
'something went wrong setting the project description',
|
||||
{
|
||||
projectId,
|
||||
description,
|
||||
}
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'projectDescriptionUpdated',
|
||||
description
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
deleteProject(projectId, callback) {
|
||||
Metrics.inc('editor.delete-project')
|
||||
ProjectDeleter.deleteProject(projectId, callback)
|
||||
},
|
||||
|
||||
renameEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
newName,
|
||||
userId,
|
||||
source,
|
||||
callback
|
||||
) {
|
||||
Metrics.inc('editor.rename-entity')
|
||||
ProjectEntityUpdateHandler.renameEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
newName,
|
||||
userId,
|
||||
source,
|
||||
function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error renaming entity', {
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
newName,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
if (newName.length > 0) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveEntityRename',
|
||||
entityId,
|
||||
newName
|
||||
)
|
||||
}
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
moveEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
folderId,
|
||||
entityType,
|
||||
userId,
|
||||
source,
|
||||
callback
|
||||
) {
|
||||
Metrics.inc('editor.move-entity')
|
||||
ProjectEntityUpdateHandler.moveEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
folderId,
|
||||
entityType,
|
||||
userId,
|
||||
source,
|
||||
function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error moving entity', {
|
||||
projectId,
|
||||
entityId,
|
||||
folderId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveEntityMove',
|
||||
entityId,
|
||||
folderId
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
renameProject(projectId, newName, callback) {
|
||||
ProjectDetailsHandler.renameProject(projectId, newName, function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error renaming project', {
|
||||
projectId,
|
||||
newName,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'projectNameUpdated',
|
||||
newName
|
||||
)
|
||||
callback()
|
||||
})
|
||||
},
|
||||
|
||||
setCompiler(projectId, compiler, callback) {
|
||||
ProjectOptionsHandler.setCompiler(projectId, compiler, function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'compilerUpdated',
|
||||
compiler
|
||||
)
|
||||
callback()
|
||||
})
|
||||
},
|
||||
|
||||
setImageName(projectId, imageName, callback) {
|
||||
ProjectOptionsHandler.setImageName(projectId, imageName, function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'imageNameUpdated',
|
||||
imageName
|
||||
)
|
||||
callback()
|
||||
})
|
||||
},
|
||||
|
||||
setSpellCheckLanguage(projectId, languageCode, callback) {
|
||||
ProjectOptionsHandler.setSpellCheckLanguage(
|
||||
projectId,
|
||||
languageCode,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'spellCheckLanguageUpdated',
|
||||
languageCode
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
setPublicAccessLevel(projectId, newAccessLevel, callback) {
|
||||
async.series(
|
||||
[
|
||||
cb => {
|
||||
if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||
ProjectDetailsHandler.ensureTokensArePresent(projectId, cb)
|
||||
} else {
|
||||
cb()
|
||||
}
|
||||
},
|
||||
cb =>
|
||||
ProjectDetailsHandler.setPublicAccessLevel(
|
||||
projectId,
|
||||
newAccessLevel,
|
||||
cb
|
||||
),
|
||||
cb => {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'project:publicAccessLevel:changed',
|
||||
{ newAccessLevel }
|
||||
)
|
||||
cb()
|
||||
},
|
||||
],
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
setRootDoc(projectId, newRootDocID, callback) {
|
||||
ProjectEntityUpdateHandler.setRootDoc(
|
||||
projectId,
|
||||
newRootDocID,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'rootDocUpdated',
|
||||
newRootDocID
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
setMainBibliographyDoc(projectId, newBibliographyDocId, callback) {
|
||||
ProjectEntityUpdateHandler.setMainBibliographyDoc(
|
||||
projectId,
|
||||
newBibliographyDocId,
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'mainBibliographyDocUpdated',
|
||||
newBibliographyDocId
|
||||
)
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_notifyProjectUsersOfNewFolders(projectId, folders, callback) {
|
||||
async.eachSeries(
|
||||
folders,
|
||||
(folder, cb) =>
|
||||
EditorController._notifyProjectUsersOfNewFolder(
|
||||
projectId,
|
||||
folder.parentFolder_id,
|
||||
folder,
|
||||
null,
|
||||
cb
|
||||
),
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
_notifyProjectUsersOfNewFolder(
|
||||
projectId,
|
||||
folderId,
|
||||
folder,
|
||||
userId,
|
||||
callback
|
||||
) {
|
||||
EditorRealTimeController.emitToRoom(
|
||||
projectId,
|
||||
'reciveNewFolder',
|
||||
folderId,
|
||||
folder,
|
||||
userId
|
||||
)
|
||||
callback()
|
||||
},
|
||||
}
|
||||
|
||||
EditorController.promises = promisifyAll(EditorController, {
|
||||
multiResult: {
|
||||
mkdirp: ['newFolders', 'lastFolder'],
|
||||
},
|
||||
})
|
||||
module.exports = EditorController
|
||||
294
services/web/app/src/Features/Editor/EditorHttpController.js
Normal file
294
services/web/app/src/Features/Editor/EditorHttpController.js
Normal file
@@ -0,0 +1,294 @@
|
||||
const ProjectDeleter = require('../Project/ProjectDeleter')
|
||||
const EditorController = require('./EditorController')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const AuthorizationManager = require('../Authorization/AuthorizationManager')
|
||||
const ProjectEditorHandler = require('../Project/ProjectEditorHandler')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter')
|
||||
const CollaboratorsInviteGetter = require('../Collaborators/CollaboratorsInviteGetter')
|
||||
const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler')
|
||||
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const DocstoreManager = require('../Docstore/DocstoreManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = {
|
||||
joinProject: expressify(joinProject),
|
||||
addDoc: expressify(addDoc),
|
||||
addFolder: expressify(addFolder),
|
||||
renameEntity: expressify(renameEntity),
|
||||
moveEntity: expressify(moveEntity),
|
||||
deleteDoc: expressify(deleteDoc),
|
||||
deleteFile: expressify(deleteFile),
|
||||
deleteFolder: expressify(deleteFolder),
|
||||
deleteEntity: expressify(deleteEntity),
|
||||
_nameIsAcceptableLength,
|
||||
}
|
||||
|
||||
async function joinProject(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
let userId = req.body.userId // keep schema in sync with router
|
||||
if (userId === 'anonymous-user') {
|
||||
userId = null
|
||||
}
|
||||
Metrics.inc('editor.join-project')
|
||||
const {
|
||||
project,
|
||||
privilegeLevel,
|
||||
isRestrictedUser,
|
||||
isTokenMember,
|
||||
isInvitedMember,
|
||||
} = await _buildJoinProjectView(req, projectId, userId)
|
||||
if (!project) {
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
// Hide sensitive data if the user is restricted
|
||||
if (isRestrictedUser) {
|
||||
project.owner = { _id: project.owner._id }
|
||||
project.members = []
|
||||
project.invites = []
|
||||
}
|
||||
// Only show the 'renamed or deleted' message once
|
||||
if (project.deletedByExternalDataSource) {
|
||||
await ProjectDeleter.promises.unmarkAsDeletedByExternalSource(projectId)
|
||||
}
|
||||
|
||||
if (project.spellCheckLanguage) {
|
||||
project.spellCheckLanguage = await chooseSpellCheckLanguage(
|
||||
project.spellCheckLanguage
|
||||
)
|
||||
}
|
||||
|
||||
res.json({
|
||||
project,
|
||||
privilegeLevel,
|
||||
isRestrictedUser,
|
||||
isTokenMember,
|
||||
isInvitedMember,
|
||||
})
|
||||
}
|
||||
|
||||
async function _buildJoinProjectView(req, projectId, userId) {
|
||||
const project =
|
||||
await ProjectGetter.promises.getProjectWithoutDocLines(projectId)
|
||||
if (project == null) {
|
||||
throw new Errors.NotFoundError('project not found')
|
||||
}
|
||||
let deletedDocsFromDocstore = []
|
||||
try {
|
||||
deletedDocsFromDocstore =
|
||||
await DocstoreManager.promises.getAllDeletedDocs(projectId)
|
||||
} catch (err) {
|
||||
// The query in docstore is not optimized at this time and fails for
|
||||
// projects with many very large, deleted documents.
|
||||
// Not serving the user with deletedDocs from docstore may cause a minor
|
||||
// UI issue with deleted files that are no longer available for restore.
|
||||
logger.warn(
|
||||
{ err, projectId },
|
||||
'soft-failure when fetching deletedDocs from docstore'
|
||||
)
|
||||
}
|
||||
const members =
|
||||
await CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels(
|
||||
projectId
|
||||
)
|
||||
const token = req.body.anonymousAccessToken
|
||||
const privilegeLevel =
|
||||
await AuthorizationManager.promises.getPrivilegeLevelForProject(
|
||||
userId,
|
||||
projectId,
|
||||
token
|
||||
)
|
||||
if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) {
|
||||
return { project: null, privilegeLevel: null, isRestrictedUser: false }
|
||||
}
|
||||
const invites =
|
||||
await CollaboratorsInviteGetter.promises.getAllInvites(projectId)
|
||||
const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
const isInvitedMember =
|
||||
await CollaboratorsGetter.promises.isUserInvitedMemberOfProject(
|
||||
userId,
|
||||
projectId
|
||||
)
|
||||
const isRestrictedUser = AuthorizationManager.isRestrictedUser(
|
||||
userId,
|
||||
privilegeLevel,
|
||||
isTokenMember,
|
||||
isInvitedMember
|
||||
)
|
||||
return {
|
||||
project: ProjectEditorHandler.buildProjectModelView(
|
||||
project,
|
||||
members,
|
||||
invites,
|
||||
deletedDocsFromDocstore
|
||||
),
|
||||
privilegeLevel,
|
||||
isTokenMember,
|
||||
isInvitedMember,
|
||||
isRestrictedUser,
|
||||
}
|
||||
}
|
||||
|
||||
function _nameIsAcceptableLength(name) {
|
||||
return name != null && name.length < 150 && name.length !== 0
|
||||
}
|
||||
|
||||
async function addDoc(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { name } = req.body
|
||||
const parentFolderId = req.body.parent_folder_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
|
||||
if (!_nameIsAcceptableLength(name)) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
try {
|
||||
const doc = await EditorController.promises.addDoc(
|
||||
projectId,
|
||||
parentFolderId,
|
||||
name,
|
||||
[],
|
||||
'editor',
|
||||
userId
|
||||
)
|
||||
res.json(doc)
|
||||
} catch (err) {
|
||||
if (err.message === 'project_has_too_many_files') {
|
||||
res.status(400).json(req.i18n.translate('project_has_too_many_files'))
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function addFolder(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const { name } = req.body
|
||||
const parentFolderId = req.body.parent_folder_id
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
if (!_nameIsAcceptableLength(name)) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
try {
|
||||
const doc = await EditorController.promises.addFolder(
|
||||
projectId,
|
||||
parentFolderId,
|
||||
name,
|
||||
'editor',
|
||||
userId
|
||||
)
|
||||
res.json(doc)
|
||||
} catch (err) {
|
||||
if (err.message === 'project_has_too_many_files') {
|
||||
res.status(400).json(req.i18n.translate('project_has_too_many_files'))
|
||||
} else if (err.message === 'invalid element name') {
|
||||
res.status(400).json(req.i18n.translate('invalid_file_name'))
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function renameEntity(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const entityId = req.params.entity_id
|
||||
const entityType = req.params.entity_type
|
||||
const { name, source = 'editor' } = req.body
|
||||
if (!_nameIsAcceptableLength(name)) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await EditorController.promises.renameEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
name,
|
||||
userId,
|
||||
source
|
||||
)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function moveEntity(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const entityId = req.params.entity_id
|
||||
const entityType = req.params.entity_type
|
||||
const folderId = req.body.folder_id
|
||||
const source = req.body.source ?? 'editor'
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await EditorController.promises.moveEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
folderId,
|
||||
entityType,
|
||||
userId,
|
||||
source
|
||||
)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function deleteDoc(req, res, next) {
|
||||
req.params.entity_type = 'doc'
|
||||
await deleteEntity(req, res, next)
|
||||
}
|
||||
|
||||
async function deleteFile(req, res, next) {
|
||||
req.params.entity_type = 'file'
|
||||
await deleteEntity(req, res, next)
|
||||
}
|
||||
|
||||
async function deleteFolder(req, res, next) {
|
||||
req.params.entity_type = 'folder'
|
||||
await deleteEntity(req, res, next)
|
||||
}
|
||||
|
||||
async function deleteEntity(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const entityId = req.params.entity_id
|
||||
const entityType = req.params.entity_type
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
await EditorController.promises.deleteEntity(
|
||||
projectId,
|
||||
entityId,
|
||||
entityType,
|
||||
'editor',
|
||||
userId
|
||||
)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
const supportedSpellCheckLanguages = new Set(
|
||||
Settings.languages
|
||||
// only include spell-check languages that are available in the client
|
||||
.filter(language => language.dic !== undefined)
|
||||
.map(language => language.code)
|
||||
)
|
||||
|
||||
async function chooseSpellCheckLanguage(spellCheckLanguage) {
|
||||
if (supportedSpellCheckLanguages.has(spellCheckLanguage)) {
|
||||
return spellCheckLanguage
|
||||
}
|
||||
|
||||
// Preserve the value in the database so they can use it again once we add back support.
|
||||
// Map some server-only languages to a specific variant, or disable spell checking for currently unsupported spell check languages.
|
||||
switch (spellCheckLanguage) {
|
||||
case 'en':
|
||||
// map "English" to "English (American)"
|
||||
return 'en_US'
|
||||
|
||||
case 'no':
|
||||
// map "Norwegian" to "Norwegian (Bokmål)"
|
||||
return 'nb_NO'
|
||||
|
||||
default:
|
||||
// map anything else to "off"
|
||||
return ''
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
/* eslint-disable
|
||||
max-len,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let EditorRealTimeController
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||
const rclient = RedisWrapper.client('pubsub')
|
||||
const os = require('os')
|
||||
const crypto = require('crypto')
|
||||
|
||||
const HOST = os.hostname()
|
||||
const RND = crypto.randomBytes(4).toString('hex') // generate a random key for this process
|
||||
let COUNT = 0
|
||||
|
||||
module.exports = EditorRealTimeController = {
|
||||
emitToRoom(roomId, message, ...payload) {
|
||||
// create a unique message id using a counter
|
||||
const messageId = `web:${HOST}:${RND}-${COUNT++}`
|
||||
let channel
|
||||
if (roomId === 'all' || !Settings.publishOnIndividualChannels) {
|
||||
channel = 'editor-events'
|
||||
} else {
|
||||
channel = `editor-events:${roomId}`
|
||||
}
|
||||
const blob = JSON.stringify({
|
||||
room_id: roomId,
|
||||
message,
|
||||
payload,
|
||||
_id: messageId,
|
||||
})
|
||||
Metrics.summary('redis.publish.editor-events', blob.length, {
|
||||
status: message,
|
||||
})
|
||||
return rclient.publish(channel, blob)
|
||||
},
|
||||
|
||||
emitToAll(message, ...payload) {
|
||||
return this.emitToRoom('all', message, ...Array.from(payload))
|
||||
},
|
||||
}
|
||||
86
services/web/app/src/Features/Editor/EditorRouter.mjs
Normal file
86
services/web/app/src/Features/Editor/EditorRouter.mjs
Normal file
@@ -0,0 +1,86 @@
|
||||
import EditorHttpController from './EditorHttpController.js'
|
||||
import AuthenticationController from '../Authentication/AuthenticationController.js'
|
||||
import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js'
|
||||
import { RateLimiter } from '../../infrastructure/RateLimiter.js'
|
||||
import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js'
|
||||
import { validate, Joi } from '../../infrastructure/Validation.js'
|
||||
|
||||
const rateLimiters = {
|
||||
addDocToProject: new RateLimiter('add-doc-to-project', {
|
||||
points: 30,
|
||||
duration: 60,
|
||||
}),
|
||||
addFolderToProject: new RateLimiter('add-folder-to-project', {
|
||||
points: 60,
|
||||
duration: 60,
|
||||
}),
|
||||
joinProject: new RateLimiter('join-project', { points: 45, duration: 60 }),
|
||||
}
|
||||
|
||||
export default {
|
||||
apply(webRouter, privateApiRouter) {
|
||||
webRouter.post(
|
||||
'/project/:Project_id/doc',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.addDocToProject, {
|
||||
params: ['Project_id'],
|
||||
}),
|
||||
EditorHttpController.addDoc
|
||||
)
|
||||
webRouter.post(
|
||||
'/project/:Project_id/folder',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.addFolderToProject, {
|
||||
params: ['Project_id'],
|
||||
}),
|
||||
EditorHttpController.addFolder
|
||||
)
|
||||
|
||||
webRouter.post(
|
||||
'/project/:Project_id/:entity_type/:entity_id/rename',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.renameEntity
|
||||
)
|
||||
webRouter.post(
|
||||
'/project/:Project_id/:entity_type/:entity_id/move',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.moveEntity
|
||||
)
|
||||
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/file/:entity_id',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.deleteFile
|
||||
)
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/doc/:entity_id',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.deleteDoc
|
||||
)
|
||||
webRouter.delete(
|
||||
'/project/:Project_id/folder/:entity_id',
|
||||
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||
EditorHttpController.deleteFolder
|
||||
)
|
||||
|
||||
// Called by the real-time API to load up the current project state.
|
||||
// This is a post request because it's more than just a getting of data. We take actions
|
||||
// whenever a user joins a project, like updating the deleted status.
|
||||
privateApiRouter.post(
|
||||
'/project/:Project_id/join',
|
||||
AuthenticationController.requirePrivateApiAuth(),
|
||||
RateLimiterMiddleware.rateLimit(rateLimiters.joinProject, {
|
||||
params: ['Project_id'],
|
||||
// keep schema in sync with controller
|
||||
getUserId: req => req.body.userId,
|
||||
}),
|
||||
validate({
|
||||
body: Joi.object({
|
||||
userId: Joi.string().required(),
|
||||
anonymousAccessToken: Joi.string().optional(),
|
||||
}),
|
||||
}),
|
||||
EditorHttpController.joinProject
|
||||
)
|
||||
},
|
||||
}
|
||||
46
services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js
Normal file
46
services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js
Normal file
@@ -0,0 +1,46 @@
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = _.template(`\
|
||||
<table class="row" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;">
|
||||
<tbody>
|
||||
<tr style="padding: 0; vertical-align: top;">
|
||||
<th class="small-12 columns" style="line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 16px; padding-left: 16px; padding-right: 16px; text-align: left; width: 564px;">
|
||||
<table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<th style="margin: 0; padding: 0; text-align: left;">
|
||||
<% if (title) { %>
|
||||
<h3 class="force-overleaf-style" style="margin: 0; color: #5D6879; font-family: Georgia, serif; font-size: 24px; font-weight: normal; line-height: 1.3; padding: 0; text-align: left; word-wrap: normal;">
|
||||
<%= title %>
|
||||
</h3>
|
||||
<% } %>
|
||||
</th>
|
||||
<tr>
|
||||
<td>
|
||||
<p style="height: 20px; margin: 0; padding: 0;"> </p>
|
||||
|
||||
<% if (greeting) { %>
|
||||
<p style="margin: 0 0 10px 0; padding: 0;">
|
||||
<%= greeting %>
|
||||
</p>
|
||||
<% } %>
|
||||
|
||||
<% (message).forEach(function(paragraph) { %>
|
||||
<p class="force-overleaf-style" style="margin: 0 0 10px 0; padding: 0;">
|
||||
<%= paragraph %>
|
||||
</p>
|
||||
<% }) %>
|
||||
<% if (highlightedText) { %>
|
||||
<div style="text-align: center; color: #1B222C; font-size: 20px; margin: 16px 0; padding: 16px 8px; border-radius: 8px; background: #F4F5F6;">
|
||||
<b><%= highlightedText %></b>
|
||||
</div>
|
||||
<% } %>
|
||||
</td>
|
||||
</tr>
|
||||
</tr>
|
||||
</table>
|
||||
</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
\
|
||||
`)
|
||||
96
services/web/app/src/Features/Email/Bodies/cta-email.js
Normal file
96
services/web/app/src/Features/Email/Bodies/cta-email.js
Normal file
@@ -0,0 +1,96 @@
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = _.template(`\
|
||||
<table class="row" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;">
|
||||
<tbody>
|
||||
<tr style="padding: 0; vertical-align: top;">
|
||||
<th class="small-12 columns" style="line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 16px; padding-left: 16px; padding-right: 16px; text-align: left;">
|
||||
<table class="cta-table" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<th style="margin: 0; padding: 0; text-align: left;">
|
||||
<% if (title) { %>
|
||||
<h3 class="force-overleaf-style" style="margin: 0; color: #5D6879; font-family: Georgia, serif; font-size: 24px; font-weight: normal; line-height: 1.3; padding: 0; text-align: left; word-wrap: normal;">
|
||||
<%= title %>
|
||||
</h3>
|
||||
<% } %>
|
||||
</th>
|
||||
<tr>
|
||||
<td>
|
||||
<p style="height: 20px; margin: 0; padding: 0;"> </p>
|
||||
|
||||
<% if (greeting) { %>
|
||||
<p style="margin: 0 0 10px 0; padding: 0;">
|
||||
<%= greeting %>
|
||||
</p>
|
||||
<% } %>
|
||||
|
||||
<% (message).forEach(function(paragraph) { %>
|
||||
<p class="force-overleaf-style" style="margin: 0 0 10px 0; padding: 0;">
|
||||
<%= paragraph %>
|
||||
</p>
|
||||
<% }) %>
|
||||
|
||||
<p style="margin: 0; padding: 0;"> </p>
|
||||
|
||||
<table style="border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: auto;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<td style="-moz-hyphens: auto; -webkit-hyphens: auto; border-collapse: collapse !important; border-radius: 9999px; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<td style="-moz-hyphens: auto; -webkit-hyphens: auto; background: #4F9C45; border: none; border-collapse: collapse !important; border-radius: 9999px; color: #fefefe; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<a href="<%= ctaURL %>" style="border: 0 solid #4F9C45; border-radius: 9999px; color: #fefefe; display: inline-block; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: bold; line-height: 1.3; margin: 0; padding: 8px 16px 8px 16px; text-align: left; text-decoration: none;">
|
||||
<%= ctaText %>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<% if (secondaryMessage && secondaryMessage.length > 0) { %>
|
||||
<p style="margin: 0; padding: 0;"> </p>
|
||||
|
||||
<% (secondaryMessage).forEach(function(paragraph) { %>
|
||||
<p class="force-overleaf-style">
|
||||
<%= paragraph %>
|
||||
</p>
|
||||
<% }) %>
|
||||
<% } %>
|
||||
|
||||
<p style="margin: 0; padding: 0;"> </p>
|
||||
|
||||
<p class="force-overleaf-style" style="font-size: 12px;">
|
||||
If the button above does not appear, please copy and paste this link into your browser's address bar:
|
||||
</p>
|
||||
|
||||
<p class="force-overleaf-style" style="font-size: 12px;">
|
||||
<%= ctaURL %>
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
</tr>
|
||||
</table>
|
||||
</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<% if (gmailGoToAction) { %>
|
||||
<script type="application/ld+json">
|
||||
<%=
|
||||
StringHelper.stringifyJsonForScript({
|
||||
"@context": "http://schema.org",
|
||||
"@type": "EmailMessage",
|
||||
"potentialAction": {
|
||||
"@type": "ViewAction",
|
||||
"target": gmailGoToAction.target,
|
||||
"url": gmailGoToAction.target,
|
||||
"name": gmailGoToAction.name
|
||||
},
|
||||
"description": gmailGoToAction.description
|
||||
})
|
||||
%>
|
||||
</script>
|
||||
<% } %>
|
||||
\
|
||||
`)
|
||||
971
services/web/app/src/Features/Email/EmailBuilder.js
Normal file
971
services/web/app/src/Features/Email/EmailBuilder.js
Normal file
@@ -0,0 +1,971 @@
|
||||
const _ = require('lodash')
|
||||
const settings = require('@overleaf/settings')
|
||||
const moment = require('moment')
|
||||
const EmailMessageHelper = require('./EmailMessageHelper')
|
||||
const StringHelper = require('../Helpers/StringHelper')
|
||||
const BaseWithHeaderEmailLayout = require('./Layouts/BaseWithHeaderEmailLayout')
|
||||
const SpamSafe = require('./SpamSafe')
|
||||
const ctaEmailBody = require('./Bodies/cta-email')
|
||||
const NoCTAEmailBody = require('./Bodies/NoCTAEmailBody')
|
||||
|
||||
function _emailBodyPlainText(content, opts, ctaEmail) {
|
||||
let emailBody = `${content.greeting(opts, true)}`
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += `${content.message(opts, true).join('\r\n\r\n')}`
|
||||
|
||||
if (ctaEmail) {
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += `${content.ctaText(opts, true)}: ${content.ctaURL(opts, true)}`
|
||||
}
|
||||
|
||||
if (
|
||||
content.secondaryMessage(opts, true) &&
|
||||
content.secondaryMessage(opts, true).length > 0
|
||||
) {
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += `${content.secondaryMessage(opts, true).join('\r\n\r\n')}`
|
||||
}
|
||||
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += `Regards,\r\nThe ${settings.appName} Team - ${settings.siteUrl}`
|
||||
|
||||
if (
|
||||
settings.email &&
|
||||
settings.email.template &&
|
||||
settings.email.template.customFooter
|
||||
) {
|
||||
emailBody += `\r\n\r\n`
|
||||
emailBody += settings.email.template.customFooter
|
||||
}
|
||||
|
||||
return emailBody
|
||||
}
|
||||
|
||||
function ctaTemplate(content) {
|
||||
if (
|
||||
!content.ctaURL ||
|
||||
!content.ctaText ||
|
||||
!content.message ||
|
||||
!content.subject
|
||||
) {
|
||||
throw new Error('missing required CTA email content')
|
||||
}
|
||||
if (!content.title) {
|
||||
content.title = () => {}
|
||||
}
|
||||
if (!content.greeting) {
|
||||
content.greeting = () => 'Hi,'
|
||||
}
|
||||
if (!content.secondaryMessage) {
|
||||
content.secondaryMessage = () => []
|
||||
}
|
||||
if (!content.gmailGoToAction) {
|
||||
content.gmailGoToAction = () => {}
|
||||
}
|
||||
return {
|
||||
subject(opts) {
|
||||
return content.subject(opts)
|
||||
},
|
||||
layout: BaseWithHeaderEmailLayout,
|
||||
plainTextTemplate(opts) {
|
||||
return _emailBodyPlainText(content, opts, true)
|
||||
},
|
||||
compiledTemplate(opts) {
|
||||
return ctaEmailBody({
|
||||
title: content.title(opts),
|
||||
greeting: content.greeting(opts),
|
||||
message: content.message(opts),
|
||||
secondaryMessage: content.secondaryMessage(opts),
|
||||
ctaText: content.ctaText(opts),
|
||||
ctaURL: content.ctaURL(opts),
|
||||
gmailGoToAction: content.gmailGoToAction(opts),
|
||||
StringHelper,
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function NoCTAEmailTemplate(content) {
|
||||
if (content.greeting == null) {
|
||||
content.greeting = () => 'Hi,'
|
||||
}
|
||||
if (!content.message) {
|
||||
throw new Error('missing message')
|
||||
}
|
||||
return {
|
||||
subject(opts) {
|
||||
return content.subject(opts)
|
||||
},
|
||||
layout: BaseWithHeaderEmailLayout,
|
||||
plainTextTemplate(opts) {
|
||||
return `\
|
||||
${content.greeting(opts)}
|
||||
|
||||
${content.message(opts, true).join('\r\n\r\n')}
|
||||
|
||||
Regards,
|
||||
The ${settings.appName} Team - ${settings.siteUrl}\
|
||||
`
|
||||
},
|
||||
compiledTemplate(opts) {
|
||||
return NoCTAEmailBody({
|
||||
title:
|
||||
typeof content.title === 'function' ? content.title(opts) : undefined,
|
||||
greeting: content.greeting(opts),
|
||||
highlightedText:
|
||||
typeof content.highlightedText === 'function'
|
||||
? content.highlightedText(opts)
|
||||
: undefined,
|
||||
message: content.message(opts),
|
||||
StringHelper,
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function buildEmail(templateName, opts) {
|
||||
const template = templates[templateName]
|
||||
opts.siteUrl = settings.siteUrl
|
||||
opts.body = template.compiledTemplate(opts)
|
||||
return {
|
||||
subject: template.subject(opts),
|
||||
html: template.layout(opts),
|
||||
text: template.plainTextTemplate && template.plainTextTemplate(opts),
|
||||
}
|
||||
}
|
||||
|
||||
const templates = {}
|
||||
|
||||
templates.registered = ctaTemplate({
|
||||
subject() {
|
||||
return `Activate your ${settings.appName} Account`
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Congratulations, you've just had an account created for you on ${
|
||||
settings.appName
|
||||
} with the email address '${_.escape(opts.to)}'.`,
|
||||
'Click here to set your password and log in:',
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
`If you have any questions or problems, please contact ${settings.adminEmail}`,
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Set password'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.setNewPasswordUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.canceledSubscription = ctaTemplate({
|
||||
subject() {
|
||||
return `${settings.appName} thoughts`
|
||||
},
|
||||
message() {
|
||||
return [
|
||||
`We are sorry to see you cancelled your ${settings.appName} premium subscription. Would you mind giving us some feedback on what the site is lacking at the moment via this quick survey?`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return ['Thank you in advance!']
|
||||
},
|
||||
ctaText() {
|
||||
return 'Leave Feedback'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return 'https://docs.google.com/forms/d/e/1FAIpQLSfa7z_s-cucRRXm70N4jEcSbFsZeb0yuKThHGQL8ySEaQzF0Q/viewform?usp=sf_link'
|
||||
},
|
||||
})
|
||||
|
||||
templates.reactivatedSubscription = ctaTemplate({
|
||||
subject() {
|
||||
return `Subscription Reactivated - ${settings.appName}`
|
||||
},
|
||||
message(opts) {
|
||||
return ['Your subscription was reactivated successfully.']
|
||||
},
|
||||
ctaText() {
|
||||
return 'View Subscription Dashboard'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return `${settings.siteUrl}/user/subscription`
|
||||
},
|
||||
})
|
||||
|
||||
templates.passwordResetRequested = ctaTemplate({
|
||||
subject() {
|
||||
return `Password Reset - ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return 'Password Reset'
|
||||
},
|
||||
message() {
|
||||
return [`We got a request to reset your ${settings.appName} password.`]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
"If you ignore this message, your password won't be changed.",
|
||||
"If you didn't request a password reset, let us know.",
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Reset password'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.setNewPasswordUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.confirmEmail = ctaTemplate({
|
||||
subject() {
|
||||
return `Confirm Email - ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return 'Confirm Email'
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Please confirm that you have added a new email, ${opts.to}, to your ${settings.appName} account.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
`If you did not request this, please let us know at <a href="mailto:${settings.adminEmail}">${settings.adminEmail}</a>.`,
|
||||
`If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`,
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Confirm Email'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.confirmEmailUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.confirmCode = NoCTAEmailTemplate({
|
||||
greeting(opts) {
|
||||
return ''
|
||||
},
|
||||
subject(opts) {
|
||||
return `Confirm your email address on Overleaf (${opts.confirmCode})`
|
||||
},
|
||||
title(opts) {
|
||||
return 'Confirm your email address'
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const msg = opts.welcomeUser
|
||||
? [
|
||||
`Welcome to Overleaf! We're so glad you joined us.`,
|
||||
'Use this 6-digit confirmation code to finish your setup.',
|
||||
]
|
||||
: ['Use this 6-digit code to confirm your email address.']
|
||||
|
||||
if (isPlainText && opts.confirmCode) {
|
||||
msg.push(opts.confirmCode)
|
||||
}
|
||||
return msg
|
||||
},
|
||||
highlightedText(opts) {
|
||||
return opts.confirmCode
|
||||
},
|
||||
})
|
||||
|
||||
templates.projectInvite = ctaTemplate({
|
||||
subject(opts) {
|
||||
const safeName = SpamSafe.isSafeProjectName(opts.project.name)
|
||||
const safeEmail = SpamSafe.isSafeEmail(opts.owner.email)
|
||||
|
||||
if (safeName && safeEmail) {
|
||||
return `"${_.escape(opts.project.name)}" — shared by ${_.escape(
|
||||
opts.owner.email
|
||||
)}`
|
||||
}
|
||||
if (safeName) {
|
||||
return `${settings.appName} project shared with you — "${_.escape(
|
||||
opts.project.name
|
||||
)}"`
|
||||
}
|
||||
if (safeEmail) {
|
||||
return `${_.escape(opts.owner.email)} shared an ${
|
||||
settings.appName
|
||||
} project with you`
|
||||
}
|
||||
|
||||
return `An ${settings.appName} project has been shared with you`
|
||||
},
|
||||
title(opts) {
|
||||
return 'Project Invite'
|
||||
},
|
||||
greeting(opts) {
|
||||
return ''
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
// build message depending on spam-safe variables
|
||||
const message = [`You have been invited to an ${settings.appName} project.`]
|
||||
|
||||
if (SpamSafe.isSafeProjectName(opts.project.name)) {
|
||||
message.push('<br/> Project:')
|
||||
message.push(`<b>${_.escape(opts.project.name)}</b>`)
|
||||
}
|
||||
|
||||
if (SpamSafe.isSafeEmail(opts.owner.email)) {
|
||||
message.push(`<br/> Shared by:`)
|
||||
message.push(`<b>${_.escape(opts.owner.email)}</b>`)
|
||||
}
|
||||
|
||||
if (message.length === 1) {
|
||||
message.push('<br/> Please view the project to find out more.')
|
||||
}
|
||||
|
||||
return message.map(m => {
|
||||
return EmailMessageHelper.cleanHTML(m, isPlainText)
|
||||
})
|
||||
},
|
||||
ctaText() {
|
||||
return 'View project'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.inviteUrl
|
||||
},
|
||||
gmailGoToAction(opts) {
|
||||
return {
|
||||
target: opts.inviteUrl,
|
||||
name: 'View project',
|
||||
description: `Join ${_.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'project')
|
||||
)} at ${settings.appName}`,
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
templates.reconfirmEmail = ctaTemplate({
|
||||
subject() {
|
||||
return `Reconfirm Email - ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return 'Reconfirm Email'
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Please reconfirm your email address, ${opts.to}, on your ${settings.appName} account.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
'If you did not request this, you can simply ignore this message.',
|
||||
`If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`,
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Reconfirm Email'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.confirmEmailUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.verifyEmailToJoinTeam = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `${opts.reminder ? 'Reminder: ' : ''}${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'A collaborator')
|
||||
)} has invited you to join a group subscription on ${settings.appName}`
|
||||
},
|
||||
title(opts) {
|
||||
return `${opts.reminder ? 'Reminder: ' : ''}${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'A collaborator')
|
||||
)} has invited you to join a group subscription on ${settings.appName}`
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Please click the button below to join the group subscription and enjoy the benefits of an upgraded ${settings.appName} account.`,
|
||||
]
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Join now'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.acceptInviteUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.verifyEmailToJoinManagedUsers = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'a collaborator')
|
||||
)} to join an ${settings.appName} group subscription.`
|
||||
},
|
||||
title(opts) {
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'a collaborator')
|
||||
)} to join an ${settings.appName} group subscription.`
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`By joining this group, you'll have access to ${settings.appName} premium features such as additional collaborators, greater maximum compile time, and real-time track changes.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage(opts, isPlainText) {
|
||||
const changeProjectOwnerLink = EmailMessageHelper.displayLink(
|
||||
'change project owner',
|
||||
`${settings.siteUrl}/learn/how-to/How_to_Transfer_Project_Ownership`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`<b>User accounts in this group are managed by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.admin, 'an admin')
|
||||
)}</b>`,
|
||||
`If you accept, you’ll transfer the management of your ${settings.appName} account to the owner of the group subscription, who will then have admin rights over your account and control over your stuff.`,
|
||||
`If you have personal projects in your ${settings.appName} account that you want to keep separate, that’s not a problem. You can set up another account under a personal email address and change the ownership of your personal projects to the new account. Find out how to ${changeProjectOwnerLink}.`,
|
||||
]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.acceptInviteUrl
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Accept invitation'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.inviteNewUserToJoinManagedUsers = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'a collaborator')
|
||||
)} to join an ${settings.appName} group subscription.`
|
||||
},
|
||||
title(opts) {
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.inviter, 'a collaborator')
|
||||
)} to join an ${settings.appName} group subscription.`
|
||||
},
|
||||
message(opts) {
|
||||
return ['']
|
||||
},
|
||||
secondaryMessage(opts) {
|
||||
return [
|
||||
`<b>User accounts in this group are managed by ${_.escape(
|
||||
_formatUserNameAndEmail(opts.admin, 'an admin')
|
||||
)}.</b>`,
|
||||
`If you accept, the owner of the group subscription will have admin rights over your account and control over your stuff.`,
|
||||
`<b>What is ${settings.appName}?</b>`,
|
||||
`${settings.appName} is the collaborative online LaTeX editor loved by researchers and technical writers. With thousands of ready-to-use templates and an array of LaTeX learning resources you’ll be up and running in no time.`,
|
||||
]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.acceptInviteUrl
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Accept invitation'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.groupSSOLinkingInvite = ctaTemplate({
|
||||
subject(opts) {
|
||||
const subjectPrefix = opts.reminder ? 'Reminder: ' : 'Action required: '
|
||||
return `${subjectPrefix}Authenticate your Overleaf account`
|
||||
},
|
||||
title(opts) {
|
||||
const titlePrefix = opts.reminder ? 'Reminder: ' : ''
|
||||
return `${titlePrefix}Single sign-on enabled`
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Hi,
|
||||
<div>
|
||||
Your group administrator has enabled single sign-on for your group.
|
||||
</div>
|
||||
</br>
|
||||
<div>
|
||||
<strong>What does this mean for you?</strong>
|
||||
</div>
|
||||
</br>
|
||||
<div>
|
||||
You won't need to remember a separate email address and password to sign in to Overleaf.
|
||||
All you need to do is authenticate your existing Overleaf account with your SSO provider.
|
||||
</div>
|
||||
`,
|
||||
]
|
||||
},
|
||||
secondaryMessage(opts) {
|
||||
return [``]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.authenticateWithSSO
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Authenticate with SSO'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.groupSSOReauthenticate = ctaTemplate({
|
||||
subject(opts) {
|
||||
return 'Action required: Reauthenticate your Overleaf account'
|
||||
},
|
||||
title(opts) {
|
||||
return 'Action required: Reauthenticate SSO'
|
||||
},
|
||||
message(opts) {
|
||||
return [
|
||||
`Hi,
|
||||
<div>
|
||||
Single sign-on for your Overleaf group has been updated.
|
||||
This means you need to reauthenticate your Overleaf account with your group’s SSO provider.
|
||||
</div>
|
||||
`,
|
||||
]
|
||||
},
|
||||
secondaryMessage(opts) {
|
||||
if (!opts.isManagedUser) {
|
||||
return ['']
|
||||
} else {
|
||||
const passwordResetUrl = `${settings.siteUrl}/user/password/reset`
|
||||
return [
|
||||
`If you’re not currently logged in to Overleaf, you'll need to <a href="${passwordResetUrl}">set a new password</a> to reauthenticate.`,
|
||||
]
|
||||
}
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.authenticateWithSSO
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Reauthenticate now'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.groupSSODisabled = ctaTemplate({
|
||||
subject(opts) {
|
||||
if (opts.userIsManaged) {
|
||||
return `Action required: Set your Overleaf password`
|
||||
} else {
|
||||
return 'A change to your Overleaf login options'
|
||||
}
|
||||
},
|
||||
title(opts) {
|
||||
return `Single sign-on disabled`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const loginUrl = `${settings.siteUrl}/login`
|
||||
let whatDoesThisMeanExplanation = [
|
||||
`You can still log in to Overleaf using one of our other <a href="${loginUrl}" style="color: #0F7A06; text-decoration: none;">login options</a> or with your email address and password.`,
|
||||
`If you don't have a password, you can set one now.`,
|
||||
]
|
||||
if (opts.userIsManaged) {
|
||||
whatDoesThisMeanExplanation = [
|
||||
'You now need an email address and password to sign in to your Overleaf account.',
|
||||
]
|
||||
}
|
||||
|
||||
const message = [
|
||||
'Your group administrator has disabled single sign-on for your group.',
|
||||
'<br/>',
|
||||
'<b>What does this mean for you?</b>',
|
||||
...whatDoesThisMeanExplanation,
|
||||
]
|
||||
|
||||
return message.map(m => {
|
||||
return EmailMessageHelper.cleanHTML(m, isPlainText)
|
||||
})
|
||||
},
|
||||
secondaryMessage(opts) {
|
||||
return [``]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.setNewPasswordUrl
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Set your new password'
|
||||
},
|
||||
})
|
||||
|
||||
templates.surrenderAccountForManagedUsers = ctaTemplate({
|
||||
subject(opts) {
|
||||
const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin'))
|
||||
|
||||
const toGroupName = opts.groupName ? ` to ${opts.groupName}` : ''
|
||||
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${admin} to transfer management of your ${
|
||||
settings.appName
|
||||
} account${toGroupName}`
|
||||
},
|
||||
title(opts) {
|
||||
const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin'))
|
||||
|
||||
const toGroupName = opts.groupName ? ` to ${opts.groupName}` : ''
|
||||
|
||||
return `${
|
||||
opts.reminder ? 'Reminder: ' : ''
|
||||
}You’ve been invited by ${admin} to transfer management of your ${
|
||||
settings.appName
|
||||
} account${toGroupName}`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin'))
|
||||
|
||||
const managedUsersLink = EmailMessageHelper.displayLink(
|
||||
'user account management',
|
||||
`${settings.siteUrl}/learn/how-to/Understanding_Managed_Overleaf_Accounts`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`Your ${settings.appName} account ${_.escape(
|
||||
opts.to
|
||||
)} is part of ${admin}'s group. They’ve now enabled ${managedUsersLink} for the group. This will ensure that projects aren’t lost when someone leaves the group.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage(opts, isPlainText) {
|
||||
const transferProjectOwnershipLink = EmailMessageHelper.displayLink(
|
||||
'change project owner',
|
||||
`${settings.siteUrl}/learn/how-to/How_to_Transfer_Project_Ownership`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`<b>What does this mean for you?</b>`,
|
||||
`If you accept, you’ll transfer the management of your ${settings.appName} account to the owner of the group subscription, who will then have admin rights over your account and control over your stuff.`,
|
||||
`If you have personal projects in your ${settings.appName} account that you want to keep separate, that’s not a problem. You can set up another account under a personal email address and change the ownership of your personal projects to the new account. Find out how to ${transferProjectOwnershipLink}.`,
|
||||
`If you think this invitation has been sent in error please contact your group administrator.`,
|
||||
]
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.acceptInviteUrl
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Accept invitation'
|
||||
},
|
||||
greeting() {
|
||||
return ''
|
||||
},
|
||||
})
|
||||
|
||||
templates.testEmail = ctaTemplate({
|
||||
subject() {
|
||||
return `A Test Email from ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return `A Test Email from ${settings.appName}`
|
||||
},
|
||||
greeting() {
|
||||
return 'Hi,'
|
||||
},
|
||||
message() {
|
||||
return [`This is a test Email from ${settings.appName}`]
|
||||
},
|
||||
ctaText() {
|
||||
return `Open ${settings.appName}`
|
||||
},
|
||||
ctaURL() {
|
||||
return settings.siteUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.ownershipTransferConfirmationPreviousOwner = NoCTAEmailTemplate({
|
||||
subject(opts) {
|
||||
return `Project ownership transfer - ${settings.appName}`
|
||||
},
|
||||
title(opts) {
|
||||
const projectName = _.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'Your project')
|
||||
)
|
||||
return `${projectName} - Owner change`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const nameAndEmail = _.escape(
|
||||
_formatUserNameAndEmail(opts.newOwner, 'a collaborator')
|
||||
)
|
||||
const projectName = _.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'your project')
|
||||
)
|
||||
const projectNameDisplay = isPlainText
|
||||
? projectName
|
||||
: `<b>${projectName}</b>`
|
||||
return [
|
||||
`As per your request, we have made ${nameAndEmail} the owner of ${projectNameDisplay}.`,
|
||||
`If you haven't asked to change the owner of ${projectNameDisplay}, please get in touch with us via ${settings.adminEmail}.`,
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
templates.ownershipTransferConfirmationNewOwner = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `Project ownership transfer - ${settings.appName}`
|
||||
},
|
||||
title(opts) {
|
||||
const projectName = _.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'Your project')
|
||||
)
|
||||
return `${projectName} - Owner change`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const nameAndEmail = _.escape(
|
||||
_formatUserNameAndEmail(opts.previousOwner, 'A collaborator')
|
||||
)
|
||||
const projectName = _.escape(
|
||||
SpamSafe.safeProjectName(opts.project.name, 'a project')
|
||||
)
|
||||
const projectNameEmphasized = isPlainText
|
||||
? projectName
|
||||
: `<b>${projectName}</b>`
|
||||
return [
|
||||
`${nameAndEmail} has made you the owner of ${projectNameEmphasized}. You can now manage ${projectName} sharing settings.`,
|
||||
]
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'View project'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
const projectUrl = `${
|
||||
settings.siteUrl
|
||||
}/project/${opts.project._id.toString()}`
|
||||
return projectUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.userOnboardingEmail = NoCTAEmailTemplate({
|
||||
subject(opts) {
|
||||
return `Getting more out of ${settings.appName}`
|
||||
},
|
||||
greeting(opts) {
|
||||
return ''
|
||||
},
|
||||
title(opts) {
|
||||
return `Getting more out of ${settings.appName}`
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const learnLatexLink = EmailMessageHelper.displayLink(
|
||||
'Learn LaTeX in 30 minutes',
|
||||
`${settings.siteUrl}/learn/latex/Learn_LaTeX_in_30_minutes?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||
isPlainText
|
||||
)
|
||||
const templatesLinks = EmailMessageHelper.displayLink(
|
||||
'Find a beautiful template',
|
||||
`${settings.siteUrl}/latex/templates?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||
isPlainText
|
||||
)
|
||||
const collaboratorsLink = EmailMessageHelper.displayLink(
|
||||
'Work with your collaborators',
|
||||
`${settings.siteUrl}/learn/how-to/Sharing_a_project?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||
isPlainText
|
||||
)
|
||||
const siteLink = EmailMessageHelper.displayLink(
|
||||
'www.overleaf.com',
|
||||
settings.siteUrl,
|
||||
isPlainText
|
||||
)
|
||||
const userSettingsLink = EmailMessageHelper.displayLink(
|
||||
'here',
|
||||
`${settings.siteUrl}/user/email-preferences`,
|
||||
isPlainText
|
||||
)
|
||||
const onboardingSurveyLink = EmailMessageHelper.displayLink(
|
||||
'Join our user feedback program',
|
||||
'https://forms.gle/DB7pdk2B1VFQqVVB9',
|
||||
isPlainText
|
||||
)
|
||||
return [
|
||||
`Thanks for signing up for ${settings.appName} recently. We hope you've been finding it useful! Here are some key features to help you get the most out of the service:`,
|
||||
`${learnLatexLink}: In this tutorial we provide a quick and easy first introduction to LaTeX with no prior knowledge required. By the time you are finished, you will have written your first LaTeX document!`,
|
||||
`${templatesLinks}: If you're looking for a template or example to get started, we've a large selection available in our template gallery, including CVs, project reports, journal articles and more.`,
|
||||
`${collaboratorsLink}: One of the key features of Overleaf is the ability to share projects and collaborate on them with other users. Find out how to share your projects with your colleagues in this quick how-to guide.`,
|
||||
`${onboardingSurveyLink} to help us make Overleaf even better!`,
|
||||
'Thanks again for using Overleaf :)',
|
||||
`Lee`,
|
||||
`Lee Shalit<br />CEO<br />${siteLink}<hr>`,
|
||||
`You're receiving this email because you've recently signed up for an Overleaf account. If you've previously subscribed to emails about product offers and company news and events, you can unsubscribe ${userSettingsLink}.`,
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
templates.securityAlert = NoCTAEmailTemplate({
|
||||
subject(opts) {
|
||||
return `Overleaf security note: ${opts.action}`
|
||||
},
|
||||
title(opts) {
|
||||
return opts.action.charAt(0).toUpperCase() + opts.action.slice(1)
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const dateFormatted = moment().format('dddd D MMMM YYYY')
|
||||
const timeFormatted = moment().format('HH:mm')
|
||||
const helpLink = EmailMessageHelper.displayLink(
|
||||
'quick guide',
|
||||
`${settings.siteUrl}/learn/how-to/Keeping_your_account_secure`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
const actionDescribed = EmailMessageHelper.cleanHTML(
|
||||
opts.actionDescribed,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
if (!opts.message) {
|
||||
opts.message = []
|
||||
}
|
||||
const message = opts.message.map(m => {
|
||||
return EmailMessageHelper.cleanHTML(m, isPlainText)
|
||||
})
|
||||
|
||||
return [
|
||||
`We are writing to let you know that ${actionDescribed} on ${dateFormatted} at ${timeFormatted} GMT.`,
|
||||
...message,
|
||||
`If this was you, you can ignore this email.`,
|
||||
`If this was not you, we recommend getting in touch with our support team at ${settings.adminEmail} to report this as potentially suspicious activity on your account.`,
|
||||
`We also encourage you to read our ${helpLink} to keeping your ${settings.appName} account safe.`,
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
templates.SAMLDataCleared = ctaTemplate({
|
||||
subject(opts) {
|
||||
return `Institutional Login No Longer Linked - ${settings.appName}`
|
||||
},
|
||||
title(opts) {
|
||||
return 'Institutional Login No Longer Linked'
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
return [
|
||||
`We're writing to let you know that due to a bug on our end, we've had to temporarily disable logging into your ${settings.appName} through your institution.`,
|
||||
`To get it going again, you'll need to relink your institutional email address to your ${settings.appName} account via your settings.`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
`If you ordinarily log in to your ${settings.appName} account through your institution, you may need to set or reset your password to regain access to your account first.`,
|
||||
'This bug did not affect the security of any accounts, but it may have affected license entitlements for a small number of users. We are sorry for any inconvenience that this may cause for you.',
|
||||
`If you have any questions, please get in touch with our support team at ${settings.adminEmail} or by replying to this email.`,
|
||||
]
|
||||
},
|
||||
ctaText(opts) {
|
||||
return 'Update my Emails and Affiliations'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return `${settings.siteUrl}/user/settings`
|
||||
},
|
||||
})
|
||||
|
||||
templates.welcome = ctaTemplate({
|
||||
subject() {
|
||||
return `Welcome to ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return `Welcome to ${settings.appName}`
|
||||
},
|
||||
greeting() {
|
||||
return 'Hi,'
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const logInAgainDisplay = EmailMessageHelper.displayLink(
|
||||
'log in again',
|
||||
`${settings.siteUrl}/login`,
|
||||
isPlainText
|
||||
)
|
||||
const helpGuidesDisplay = EmailMessageHelper.displayLink(
|
||||
'Help Guides',
|
||||
`${settings.siteUrl}/learn`,
|
||||
isPlainText
|
||||
)
|
||||
const templatesDisplay = EmailMessageHelper.displayLink(
|
||||
'Templates',
|
||||
`${settings.siteUrl}/templates`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`Thanks for signing up to ${settings.appName}! If you ever get lost, you can ${logInAgainDisplay} with the email address '${opts.to}'.`,
|
||||
`If you're new to LaTeX, take a look at our ${helpGuidesDisplay} and ${templatesDisplay}.`,
|
||||
`Please also take a moment to confirm your email address for ${settings.appName}:`,
|
||||
]
|
||||
},
|
||||
secondaryMessage() {
|
||||
return [
|
||||
`PS. We love talking to our users about ${settings.appName}. Reply to this email to get in touch with us directly, whatever the reason. Questions, comments, problems, suggestions, all welcome!`,
|
||||
]
|
||||
},
|
||||
ctaText() {
|
||||
return 'Confirm Email'
|
||||
},
|
||||
ctaURL(opts) {
|
||||
return opts.confirmEmailUrl
|
||||
},
|
||||
})
|
||||
|
||||
templates.welcomeWithoutCTA = NoCTAEmailTemplate({
|
||||
subject() {
|
||||
return `Welcome to ${settings.appName}`
|
||||
},
|
||||
title() {
|
||||
return `Welcome to ${settings.appName}`
|
||||
},
|
||||
greeting() {
|
||||
return 'Hi,'
|
||||
},
|
||||
message(opts, isPlainText) {
|
||||
const logInAgainDisplay = EmailMessageHelper.displayLink(
|
||||
'log in again',
|
||||
`${settings.siteUrl}/login`,
|
||||
isPlainText
|
||||
)
|
||||
const helpGuidesDisplay = EmailMessageHelper.displayLink(
|
||||
'Help Guides',
|
||||
`${settings.siteUrl}/learn`,
|
||||
isPlainText
|
||||
)
|
||||
const templatesDisplay = EmailMessageHelper.displayLink(
|
||||
'Templates',
|
||||
`${settings.siteUrl}/templates`,
|
||||
isPlainText
|
||||
)
|
||||
|
||||
return [
|
||||
`Thanks for signing up to ${settings.appName}! If you ever get lost, you can ${logInAgainDisplay} with the email address '${opts.to}'.`,
|
||||
`If you're new to LaTeX, take a look at our ${helpGuidesDisplay} and ${templatesDisplay}.`,
|
||||
`PS. We love talking to our users about ${settings.appName}. Reply to this email to get in touch with us directly, whatever the reason. Questions, comments, problems, suggestions, all welcome!`,
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
function _formatUserNameAndEmail(user, placeholder) {
|
||||
if (user.first_name && user.last_name) {
|
||||
const fullName = `${user.first_name} ${user.last_name}`
|
||||
if (SpamSafe.isSafeUserName(fullName)) {
|
||||
if (SpamSafe.isSafeEmail(user.email)) {
|
||||
return `${fullName} (${user.email})`
|
||||
} else {
|
||||
return fullName
|
||||
}
|
||||
}
|
||||
}
|
||||
return SpamSafe.safeEmail(user.email, placeholder)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
templates,
|
||||
ctaTemplate,
|
||||
NoCTAEmailTemplate,
|
||||
buildEmail,
|
||||
}
|
||||
41
services/web/app/src/Features/Email/EmailHandler.js
Normal file
41
services/web/app/src/Features/Email/EmailHandler.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const { callbackify } = require('util')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const EmailBuilder = require('./EmailBuilder')
|
||||
const EmailSender = require('./EmailSender')
|
||||
const Queues = require('../../infrastructure/Queues')
|
||||
|
||||
const EMAIL_SETTINGS = Settings.email || {}
|
||||
|
||||
/**
|
||||
* @param {string} emailType
|
||||
* @param {opts} any
|
||||
*/
|
||||
async function sendEmail(emailType, opts) {
|
||||
const email = EmailBuilder.buildEmail(emailType, opts)
|
||||
if (email.type === 'lifecycle' && !EMAIL_SETTINGS.lifecycle) {
|
||||
return
|
||||
}
|
||||
opts.html = email.html
|
||||
opts.text = email.text
|
||||
opts.subject = email.subject
|
||||
await EmailSender.promises.sendEmail(opts, emailType)
|
||||
}
|
||||
|
||||
function sendDeferredEmail(emailType, opts, delay) {
|
||||
Queues.createScheduledJob(
|
||||
'deferred-emails',
|
||||
{ data: { emailType, opts } },
|
||||
delay
|
||||
).catch(err => {
|
||||
logger.warn({ err, emailType, opts }, 'failed to queue deferred email')
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendEmail: callbackify(sendEmail),
|
||||
sendDeferredEmail,
|
||||
promises: {
|
||||
sendEmail,
|
||||
},
|
||||
}
|
||||
28
services/web/app/src/Features/Email/EmailMessageHelper.js
Normal file
28
services/web/app/src/Features/Email/EmailMessageHelper.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const sanitizeHtml = require('sanitize-html')
|
||||
const sanitizeOptions = {
|
||||
html: {
|
||||
allowedTags: ['a', 'span', 'b', 'br', 'i'],
|
||||
allowedAttributes: {
|
||||
a: ['href', 'style'],
|
||||
span: ['style', 'class'],
|
||||
},
|
||||
},
|
||||
plainText: {
|
||||
allowedTags: [],
|
||||
allowedAttributes: {},
|
||||
},
|
||||
}
|
||||
|
||||
function cleanHTML(text, isPlainText) {
|
||||
if (!isPlainText) return sanitizeHtml(text, sanitizeOptions.html)
|
||||
return sanitizeHtml(text, sanitizeOptions.plainText)
|
||||
}
|
||||
|
||||
function displayLink(text, url, isPlainText) {
|
||||
return isPlainText ? `${text} (${url})` : `<a href="${url}">${text}</a>`
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cleanHTML,
|
||||
displayLink,
|
||||
}
|
||||
29
services/web/app/src/Features/Email/EmailOptionsHelper.js
Normal file
29
services/web/app/src/Features/Email/EmailOptionsHelper.js
Normal file
@@ -0,0 +1,29 @@
|
||||
function _getIndefiniteArticle(providerName) {
|
||||
const vowels = ['a', 'e', 'i', 'o', 'u']
|
||||
|
||||
return vowels.includes(providerName.charAt(0).toLowerCase()) ? 'an' : 'a'
|
||||
}
|
||||
|
||||
function _actionBuilder(providerName, action, accountLinked) {
|
||||
if (providerName.toLowerCase() !== 'google') {
|
||||
return `${providerName} account ${action}`
|
||||
}
|
||||
|
||||
return accountLinked ? `New account ${action}` : `Account ${action}`
|
||||
}
|
||||
|
||||
function linkOrUnlink(accountLinked, providerName, email) {
|
||||
const action = accountLinked ? 'linked' : 'no longer linked'
|
||||
const actionDescribed = accountLinked ? 'was linked to' : 'was unlinked from'
|
||||
const indefiniteArticle = _getIndefiniteArticle(providerName)
|
||||
|
||||
return {
|
||||
to: email,
|
||||
action: _actionBuilder(providerName, action, accountLinked),
|
||||
actionDescribed: `${indefiniteArticle} ${providerName} account ${actionDescribed} your account ${email}`,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
linkOrUnlink,
|
||||
}
|
||||
128
services/web/app/src/Features/Email/EmailSender.js
Normal file
128
services/web/app/src/Features/Email/EmailSender.js
Normal file
@@ -0,0 +1,128 @@
|
||||
const { callbackify } = require('util')
|
||||
const logger = require('@overleaf/logger')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const nodemailer = require('nodemailer')
|
||||
const sesTransport = require('nodemailer-ses-transport')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { RateLimiter } = require('../../infrastructure/RateLimiter')
|
||||
const _ = require('lodash')
|
||||
|
||||
const EMAIL_SETTINGS = Settings.email || {}
|
||||
|
||||
module.exports = {
|
||||
sendEmail: callbackify(sendEmail),
|
||||
promises: {
|
||||
sendEmail,
|
||||
},
|
||||
}
|
||||
|
||||
const client = getClient()
|
||||
|
||||
const rateLimiter = new RateLimiter('send_email', {
|
||||
points: 100,
|
||||
duration: 3 * 60 * 60,
|
||||
})
|
||||
|
||||
function getClient() {
|
||||
let client
|
||||
if (EMAIL_SETTINGS.parameters) {
|
||||
const emailParameters = EMAIL_SETTINGS.parameters
|
||||
if (emailParameters.AWSAccessKeyID || EMAIL_SETTINGS.driver === 'ses') {
|
||||
logger.debug('using aws ses for email')
|
||||
client = nodemailer.createTransport(sesTransport(emailParameters))
|
||||
} else if (emailParameters.sendgridApiKey) {
|
||||
throw new OError(
|
||||
'sendgridApiKey configuration option is deprecated, use SMTP instead'
|
||||
)
|
||||
} else if (emailParameters.MandrillApiKey) {
|
||||
throw new OError(
|
||||
'MandrillApiKey configuration option is deprecated, use SMTP instead'
|
||||
)
|
||||
} else {
|
||||
logger.debug('using smtp for email')
|
||||
const smtp = _.pick(
|
||||
emailParameters,
|
||||
'host',
|
||||
'port',
|
||||
'secure',
|
||||
'auth',
|
||||
'ignoreTLS',
|
||||
'logger',
|
||||
'name'
|
||||
)
|
||||
client = nodemailer.createTransport(smtp)
|
||||
}
|
||||
} else {
|
||||
logger.warn(
|
||||
'Email transport and/or parameters not defined. No emails will be sent.'
|
||||
)
|
||||
client = {
|
||||
async sendMail(options) {
|
||||
logger.info({ options }, 'Would send email if enabled.')
|
||||
},
|
||||
}
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
async function sendEmail(options, emailType) {
|
||||
try {
|
||||
const canContinue = await checkCanSendEmail(options)
|
||||
metrics.inc('email_status', {
|
||||
status: canContinue ? 'sent' : 'rate_limited',
|
||||
path: emailType,
|
||||
})
|
||||
if (!canContinue) {
|
||||
logger.debug(
|
||||
{
|
||||
sendingUserId: options.sendingUser_id,
|
||||
to: options.to,
|
||||
subject: options.subject,
|
||||
canContinue,
|
||||
},
|
||||
'rate limit hit for sending email, not sending'
|
||||
)
|
||||
throw new OError('rate limit hit sending email')
|
||||
}
|
||||
metrics.inc('email')
|
||||
const sendMailOptions = {
|
||||
to: options.to,
|
||||
from: EMAIL_SETTINGS.fromAddress || '',
|
||||
subject: options.subject,
|
||||
html: options.html,
|
||||
text: options.text,
|
||||
replyTo: options.replyTo || EMAIL_SETTINGS.replyToAddress,
|
||||
socketTimeout: 30 * 1000,
|
||||
}
|
||||
if (EMAIL_SETTINGS.textEncoding != null) {
|
||||
sendMailOptions.textEncoding = EMAIL_SETTINGS.textEncoding
|
||||
}
|
||||
if (options.category) {
|
||||
// category support for sendgrid
|
||||
sendMailOptions.headers = {
|
||||
'X-SMTPAPI': JSON.stringify({ category: options.category }),
|
||||
}
|
||||
}
|
||||
await client.sendMail(sendMailOptions)
|
||||
} catch (err) {
|
||||
throw new OError('error sending message').withCause(err)
|
||||
}
|
||||
}
|
||||
|
||||
async function checkCanSendEmail(options) {
|
||||
if (options.sendingUser_id == null) {
|
||||
// email not sent from user, not rate limited
|
||||
return true
|
||||
}
|
||||
try {
|
||||
await rateLimiter.consume(options.sendingUser_id, 1, { method: 'userId' })
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
@@ -0,0 +1,394 @@
|
||||
const _ = require('lodash')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = _.template(`\
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en" style="Margin: 0; background: #E4E8EE !important; margin: 0; min-height: 100%; padding: 0;">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<style>.button td {
|
||||
border-radius: 9999px; }
|
||||
|
||||
.force-overleaf-style a,
|
||||
.force-overleaf-style a[href] {
|
||||
color: #138A07 !important;
|
||||
text-decoration: none !important;
|
||||
-moz-hyphens: none;
|
||||
-ms-hyphens: none;
|
||||
-webkit-hyphens: none;
|
||||
hyphens: none; }
|
||||
.force-overleaf-style a:visited,
|
||||
.force-overleaf-style a[href]:visited {
|
||||
color: #138A07; }
|
||||
.force-overleaf-style a:hover,
|
||||
.force-overleaf-style a[href]:hover {
|
||||
color: #3d7935; }
|
||||
.force-overleaf-style a:active,
|
||||
.force-overleaf-style a[href]:active {
|
||||
color: #3d7935; }
|
||||
</style>
|
||||
<style>@media only screen {
|
||||
html {
|
||||
min-height: 100%;
|
||||
background: #f6f6f6;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
.small-float-center {
|
||||
margin: 0 auto !important;
|
||||
float: none !important;
|
||||
text-align: center !important;
|
||||
}
|
||||
|
||||
.small-text-center {
|
||||
text-align: center !important;
|
||||
}
|
||||
|
||||
.small-text-left {
|
||||
text-align: left !important;
|
||||
}
|
||||
|
||||
.small-text-right {
|
||||
text-align: right !important;
|
||||
}
|
||||
|
||||
.cta-table {
|
||||
table-layout: fixed;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
.hide-for-large {
|
||||
display: block !important;
|
||||
width: auto !important;
|
||||
overflow: visible !important;
|
||||
max-height: none !important;
|
||||
font-size: inherit !important;
|
||||
line-height: inherit !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
table.body table.container .hide-for-large,
|
||||
table.body table.container .row.hide-for-large {
|
||||
display: table !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
table.body table.container .callout-inner.hide-for-large {
|
||||
display: table-cell !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
table.body table.container .show-for-large {
|
||||
display: none !important;
|
||||
width: 0;
|
||||
mso-hide: all;
|
||||
overflow: hidden;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 596px) {
|
||||
table.body img {
|
||||
width: auto;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
table.body center {
|
||||
min-width: 0 !important;
|
||||
}
|
||||
|
||||
table.body .container {
|
||||
width: 95% !important;
|
||||
}
|
||||
|
||||
table.body .columns,
|
||||
table.body .column {
|
||||
height: auto !important;
|
||||
-moz-box-sizing: border-box;
|
||||
-webkit-box-sizing: border-box;
|
||||
box-sizing: border-box;
|
||||
padding-left: 16px !important;
|
||||
padding-right: 16px !important;
|
||||
}
|
||||
|
||||
table.body .columns .column,
|
||||
table.body .columns .columns,
|
||||
table.body .column .column,
|
||||
table.body .column .columns {
|
||||
padding-left: 0 !important;
|
||||
padding-right: 0 !important;
|
||||
}
|
||||
|
||||
table.body .collapse .columns,
|
||||
table.body .collapse .column {
|
||||
padding-left: 0 !important;
|
||||
padding-right: 0 !important;
|
||||
}
|
||||
|
||||
td.small-1,
|
||||
th.small-1 {
|
||||
display: inline-block !important;
|
||||
width: 8.33333% !important;
|
||||
}
|
||||
|
||||
td.small-2,
|
||||
th.small-2 {
|
||||
display: inline-block !important;
|
||||
width: 16.66667% !important;
|
||||
}
|
||||
|
||||
td.small-3,
|
||||
th.small-3 {
|
||||
display: inline-block !important;
|
||||
width: 25% !important;
|
||||
}
|
||||
|
||||
td.small-4,
|
||||
th.small-4 {
|
||||
display: inline-block !important;
|
||||
width: 33.33333% !important;
|
||||
}
|
||||
|
||||
td.small-5,
|
||||
th.small-5 {
|
||||
display: inline-block !important;
|
||||
width: 41.66667% !important;
|
||||
}
|
||||
|
||||
td.small-6,
|
||||
th.small-6 {
|
||||
display: inline-block !important;
|
||||
width: 50% !important;
|
||||
}
|
||||
|
||||
td.small-7,
|
||||
th.small-7 {
|
||||
display: inline-block !important;
|
||||
width: 58.33333% !important;
|
||||
}
|
||||
|
||||
td.small-8,
|
||||
th.small-8 {
|
||||
display: inline-block !important;
|
||||
width: 66.66667% !important;
|
||||
}
|
||||
|
||||
td.small-9,
|
||||
th.small-9 {
|
||||
display: inline-block !important;
|
||||
width: 75% !important;
|
||||
}
|
||||
|
||||
td.small-10,
|
||||
th.small-10 {
|
||||
display: inline-block !important;
|
||||
width: 83.33333% !important;
|
||||
}
|
||||
|
||||
td.small-11,
|
||||
th.small-11 {
|
||||
display: inline-block !important;
|
||||
width: 91.66667% !important;
|
||||
}
|
||||
|
||||
td.small-12,
|
||||
th.small-12 {
|
||||
display: inline-block !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.columns td.small-12,
|
||||
.column td.small-12,
|
||||
.columns th.small-12,
|
||||
.column th.small-12 {
|
||||
display: block !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-1,
|
||||
table.body th.small-offset-1 {
|
||||
margin-left: 8.33333% !important;
|
||||
Margin-left: 8.33333% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-2,
|
||||
table.body th.small-offset-2 {
|
||||
margin-left: 16.66667% !important;
|
||||
Margin-left: 16.66667% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-3,
|
||||
table.body th.small-offset-3 {
|
||||
margin-left: 25% !important;
|
||||
Margin-left: 25% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-4,
|
||||
table.body th.small-offset-4 {
|
||||
margin-left: 33.33333% !important;
|
||||
Margin-left: 33.33333% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-5,
|
||||
table.body th.small-offset-5 {
|
||||
margin-left: 41.66667% !important;
|
||||
Margin-left: 41.66667% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-6,
|
||||
table.body th.small-offset-6 {
|
||||
margin-left: 50% !important;
|
||||
Margin-left: 50% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-7,
|
||||
table.body th.small-offset-7 {
|
||||
margin-left: 58.33333% !important;
|
||||
Margin-left: 58.33333% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-8,
|
||||
table.body th.small-offset-8 {
|
||||
margin-left: 66.66667% !important;
|
||||
Margin-left: 66.66667% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-9,
|
||||
table.body th.small-offset-9 {
|
||||
margin-left: 75% !important;
|
||||
Margin-left: 75% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-10,
|
||||
table.body th.small-offset-10 {
|
||||
margin-left: 83.33333% !important;
|
||||
Margin-left: 83.33333% !important;
|
||||
}
|
||||
|
||||
table.body td.small-offset-11,
|
||||
table.body th.small-offset-11 {
|
||||
margin-left: 91.66667% !important;
|
||||
Margin-left: 91.66667% !important;
|
||||
}
|
||||
|
||||
table.body table.columns td.expander,
|
||||
table.body table.columns th.expander {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
table.body .right-text-pad,
|
||||
table.body .text-pad-right {
|
||||
padding-left: 10px !important;
|
||||
}
|
||||
|
||||
table.body .left-text-pad,
|
||||
table.body .text-pad-left {
|
||||
padding-right: 10px !important;
|
||||
}
|
||||
|
||||
table.menu {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
table.menu td,
|
||||
table.menu th {
|
||||
width: auto !important;
|
||||
display: inline-block !important;
|
||||
}
|
||||
|
||||
table.menu.vertical td,
|
||||
table.menu.vertical th,
|
||||
table.menu.small-vertical td,
|
||||
table.menu.small-vertical th {
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
table.menu[align="center"] {
|
||||
width: auto !important;
|
||||
}
|
||||
|
||||
table.button.small-expand,
|
||||
table.button.small-expanded {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
table.button.small-expand table,
|
||||
table.button.small-expanded table {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
table.button.small-expand table a,
|
||||
table.button.small-expanded table a {
|
||||
text-align: center !important;
|
||||
width: 100% !important;
|
||||
padding-left: 0 !important;
|
||||
padding-right: 0 !important;
|
||||
}
|
||||
|
||||
table.button.small-expand center,
|
||||
table.button.small-expanded center {
|
||||
min-width: 0;
|
||||
}
|
||||
}</style>
|
||||
</head>
|
||||
<body leftmargin="0" topmargin="0" marginwidth="0" marginheight="0" bgcolor="#F6F6F6" style="-moz-box-sizing: border-box; -ms-text-size-adjust: 100%; -webkit-box-sizing: border-box; -webkit-text-size-adjust: 100%; Margin: 0; box-sizing: border-box; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; min-width: 100%; padding: 0; text-align: left; width: 100% !important;">
|
||||
<!-- <span class="preheader"></span> -->
|
||||
<table class="body" border="0" cellspacing="0" cellpadding="0" width="100%" height="100%" style="Margin: 0; background: #E4E8EE; border-collapse: collapse; border-spacing: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; height: 100%; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;">
|
||||
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<td class="body-cell" align="center" valign="top" bgcolor="#F6F6F6" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; background: #E4E8EE !important; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; padding-bottom: 20px; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<center data-parsed="" style="min-width: 580px; width: 100%;">
|
||||
|
||||
<table align="center" class="wrapper header float-center" style="Margin: 0 auto; background: #1E2530; border-bottom: none; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><td class="wrapper-inner" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 20px; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table align="center" class="container" style="Margin: 0 auto; background: transparent; border-collapse: collapse; border-spacing: 0; margin: 0 auto; padding: 0; text-align: inherit; vertical-align: top; width: 580px;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table class="row collapse" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||
<th class="small-12 large-12 columns first last" style="Margin: 0 auto; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; text-align: left; width: 588px;"><table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><th style="Margin: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left;">
|
||||
<h1 style="Margin: 0; Margin-bottom: px; color: #FFFFFF; font-family: Georgia, serif; font-size: 30px; font-weight: normal; line-height: 1.3; margin: 0; margin-bottom: px; padding: 0; text-align: left; word-wrap: normal;">
|
||||
${settings.appName}
|
||||
</h1>
|
||||
</th>
|
||||
<th class="expander" style="Margin: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; padding: 0 !important; text-align: left; visibility: hidden; width: 0;"></th></tr></table></th>
|
||||
</tr></tbody></table>
|
||||
</td></tr></tbody></table>
|
||||
</td></tr></table>
|
||||
<table class="spacer float-center" style="Margin: 0 auto; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="20px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 20px; font-weight: normal; hyphens: auto; line-height: 20px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||
<table align="center" class="container main float-center" style="Margin: 0 auto; Margin-top: 10px; background: #FFFFFF; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; margin-top: 10px; padding: 0; text-align: center; vertical-align: top; width: 580px;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table class="spacer" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="20px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 20px; font-weight: normal; hyphens: auto; line-height: 20px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||
|
||||
<%= body %>
|
||||
|
||||
<table class="wrapper secondary" align="center" style="background: #E4E8EE; border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><td class="wrapper-inner" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||
<table class="spacer" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="10px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 10px; font-weight: normal; hyphens: auto; line-height: 10px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||
<p style="Margin: 0; Margin-bottom: 10px; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; margin-bottom: 10px; padding: 0; text-align: left;"><small style="color: #5D6879; font-size: 80%;">
|
||||
${
|
||||
settings.email &&
|
||||
settings.email.template &&
|
||||
settings.email.template.customFooter
|
||||
? `${settings.email.template.customFooter}<br>`
|
||||
: ''
|
||||
}${settings.appName} • <a href="${
|
||||
settings.siteUrl
|
||||
}" style="Margin: 0; color: #0F7A06; font-family: Helvetica, Arial, sans-serif; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left; text-decoration: none;">${
|
||||
settings.siteUrl
|
||||
}</a>
|
||||
</small></p>
|
||||
</td></tr></table>
|
||||
</td></tr></tbody></table>
|
||||
|
||||
</center>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<!-- prevent Gmail on iOS font size manipulation -->
|
||||
<div style="display:none; white-space:nowrap; font:15px courier; line-height:0;"> </div>
|
||||
</body>
|
||||
</html>\
|
||||
`)
|
||||
56
services/web/app/src/Features/Email/SpamSafe.js
Normal file
56
services/web/app/src/Features/Email/SpamSafe.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const XRegExp = require('xregexp')
|
||||
|
||||
// A note about SAFE_REGEX:
|
||||
// We have to escape the escape characters because XRegExp compiles it first.
|
||||
// So it's equivalent to `^[\p{L}\p{N}\s\-_!&\(\)]+$]
|
||||
// \p{L} = any letter in any language
|
||||
// \p{N} = any kind of numeric character
|
||||
// https://www.regular-expressions.info/unicode.html#prop is a good resource for
|
||||
// more obscure regex features. standard RegExp does not support these
|
||||
|
||||
const HAN_REGEX = XRegExp('\\p{Han}')
|
||||
const SAFE_REGEX = XRegExp("^[\\p{L}\\p{N}\\s\\-_!'&\\(\\)]+$")
|
||||
const EMAIL_REGEX = XRegExp('^[\\p{L}\\p{N}.+_-]+@[\\w.-]+$')
|
||||
|
||||
const SpamSafe = {
|
||||
isSafeUserName(name) {
|
||||
return SAFE_REGEX.test(name) && name.length <= 30
|
||||
},
|
||||
|
||||
isSafeProjectName(name) {
|
||||
if (HAN_REGEX.test(name)) {
|
||||
return SAFE_REGEX.test(name) && name.length <= 10
|
||||
}
|
||||
return SAFE_REGEX.test(name) && name.length <= 100
|
||||
},
|
||||
|
||||
isSafeEmail(email) {
|
||||
return EMAIL_REGEX.test(email) && email.length <= 40
|
||||
},
|
||||
|
||||
safeUserName(name, alternative, project) {
|
||||
if (project == null) {
|
||||
project = false
|
||||
}
|
||||
if (SpamSafe.isSafeUserName(name)) {
|
||||
return name
|
||||
}
|
||||
return alternative
|
||||
},
|
||||
|
||||
safeProjectName(name, alternative) {
|
||||
if (SpamSafe.isSafeProjectName(name)) {
|
||||
return name
|
||||
}
|
||||
return alternative
|
||||
},
|
||||
|
||||
safeEmail(email, alternative) {
|
||||
if (SpamSafe.isSafeEmail(email)) {
|
||||
return email
|
||||
}
|
||||
return alternative
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = SpamSafe
|
||||
133
services/web/app/src/Features/Errors/ErrorController.js
Normal file
133
services/web/app/src/Features/Errors/ErrorController.js
Normal file
@@ -0,0 +1,133 @@
|
||||
const Errors = require('./Errors')
|
||||
const SessionManager = require('../Authentication/SessionManager')
|
||||
const SamlLogHandler = require('../SamlLog/SamlLogHandler')
|
||||
const HttpErrorHandler = require('./HttpErrorHandler')
|
||||
const { plainTextResponse } = require('../../infrastructure/Response')
|
||||
const { expressifyErrorHandler } = require('@overleaf/promise-utils')
|
||||
|
||||
function notFound(req, res) {
|
||||
res.status(404)
|
||||
res.render('general/404', { title: 'page_not_found' })
|
||||
}
|
||||
|
||||
function forbidden(req, res) {
|
||||
res.status(403)
|
||||
res.render('user/restricted')
|
||||
}
|
||||
|
||||
function serverError(req, res) {
|
||||
res.status(500)
|
||||
res.render('general/500', { title: 'Server Error' })
|
||||
}
|
||||
|
||||
async function handleError(error, req, res, next) {
|
||||
const shouldSendErrorResponse = !res.headersSent
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
req.logger.addFields({ err: error })
|
||||
// log errors related to SAML flow
|
||||
if (req.session && req.session.saml) {
|
||||
req.logger.setLevel('error')
|
||||
await SamlLogHandler.promises.log(req, { error })
|
||||
}
|
||||
if (error.code === 'EBADCSRFTOKEN') {
|
||||
req.logger.addFields({ user })
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.sendStatus(403)
|
||||
}
|
||||
} else if (error instanceof Errors.NotFoundError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
notFound(req, res)
|
||||
}
|
||||
} else if (
|
||||
error instanceof URIError &&
|
||||
error.message.match(/^Failed to decode param/)
|
||||
) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(400)
|
||||
res.render('general/500', { title: 'Invalid Error' })
|
||||
}
|
||||
} else if (error instanceof Errors.ForbiddenError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
forbidden(req, res)
|
||||
}
|
||||
} else if (error instanceof Errors.TooManyRequestsError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.sendStatus(429)
|
||||
}
|
||||
} else if (error instanceof Errors.InvalidError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(400)
|
||||
plainTextResponse(res, error.message)
|
||||
}
|
||||
} else if (error instanceof Errors.DuplicateNameError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(400)
|
||||
plainTextResponse(res, error.message)
|
||||
}
|
||||
} else if (error instanceof Errors.InvalidNameError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(400)
|
||||
plainTextResponse(res, error.message)
|
||||
}
|
||||
} else if (error instanceof Errors.NonDeletableEntityError) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
res.status(422)
|
||||
plainTextResponse(res, error.message)
|
||||
}
|
||||
} else if (error instanceof Errors.SAMLSessionDataMissing) {
|
||||
req.logger.setLevel('warn')
|
||||
if (shouldSendErrorResponse) {
|
||||
HttpErrorHandler.badRequest(req, res, error.message)
|
||||
}
|
||||
} else {
|
||||
req.logger.setLevel('error')
|
||||
if (shouldSendErrorResponse) {
|
||||
serverError(req, res)
|
||||
}
|
||||
}
|
||||
if (!shouldSendErrorResponse) {
|
||||
// Pass the error to the default Express error handler, which will close
|
||||
// the connection.
|
||||
next(error)
|
||||
}
|
||||
}
|
||||
|
||||
function handleApiError(err, req, res, next) {
|
||||
req.logger.addFields({ err })
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
req.logger.setLevel('warn')
|
||||
res.sendStatus(404)
|
||||
} else if (
|
||||
err instanceof URIError &&
|
||||
err.message.match(/^Failed to decode param/)
|
||||
) {
|
||||
req.logger.setLevel('warn')
|
||||
res.sendStatus(400)
|
||||
} else if (err instanceof Errors.TooManyRequestsError) {
|
||||
req.logger.setLevel('warn')
|
||||
res.sendStatus(429)
|
||||
} else if (err instanceof Errors.ForbiddenError) {
|
||||
req.logger.setLevel('warn')
|
||||
res.sendStatus(403)
|
||||
} else {
|
||||
req.logger.setLevel('error')
|
||||
res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
notFound,
|
||||
forbidden,
|
||||
serverError,
|
||||
handleError: expressifyErrorHandler(handleError),
|
||||
handleApiError,
|
||||
}
|
||||
359
services/web/app/src/Features/Errors/Errors.js
Normal file
359
services/web/app/src/Features/Errors/Errors.js
Normal file
@@ -0,0 +1,359 @@
|
||||
const OError = require('@overleaf/o-error')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
// Error class for legacy errors so they inherit OError while staying
|
||||
// backward-compatible (can be instantiated with string as argument instead
|
||||
// of object)
|
||||
class BackwardCompatibleError extends OError {
|
||||
/**
|
||||
* @param {string | { message: string, info?: Object }} messageOrOptions
|
||||
*/
|
||||
constructor(messageOrOptions) {
|
||||
if (typeof messageOrOptions === 'string') {
|
||||
super(messageOrOptions)
|
||||
} else if (messageOrOptions) {
|
||||
const { message, info } = messageOrOptions
|
||||
super(message, info)
|
||||
} else {
|
||||
super()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Error class that facilitates the migration to OError v3 by providing
|
||||
// a signature in which the 2nd argument can be an object containing
|
||||
// the `info` object.
|
||||
class OErrorV2CompatibleError extends OError {
|
||||
constructor(message, options) {
|
||||
if (options) {
|
||||
super(message, options.info)
|
||||
} else {
|
||||
super(message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class NotFoundError extends BackwardCompatibleError {}
|
||||
|
||||
class ForbiddenError extends BackwardCompatibleError {}
|
||||
|
||||
class ServiceNotConfiguredError extends BackwardCompatibleError {}
|
||||
|
||||
class TooManyRequestsError extends BackwardCompatibleError {}
|
||||
|
||||
class DuplicateNameError extends OError {}
|
||||
|
||||
class InvalidNameError extends BackwardCompatibleError {}
|
||||
|
||||
class UnsupportedFileTypeError extends BackwardCompatibleError {}
|
||||
|
||||
class FileTooLargeError extends BackwardCompatibleError {}
|
||||
|
||||
class UnsupportedExportRecordsError extends BackwardCompatibleError {}
|
||||
|
||||
class V1HistoryNotSyncedError extends BackwardCompatibleError {}
|
||||
|
||||
class ProjectHistoryDisabledError extends BackwardCompatibleError {}
|
||||
|
||||
class V1ConnectionError extends BackwardCompatibleError {}
|
||||
|
||||
class UnconfirmedEmailError extends BackwardCompatibleError {}
|
||||
|
||||
class EmailExistsError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('Email already exists', options)
|
||||
}
|
||||
}
|
||||
|
||||
class InvalidError extends BackwardCompatibleError {}
|
||||
|
||||
class NotInV2Error extends BackwardCompatibleError {}
|
||||
|
||||
class SLInV2Error extends BackwardCompatibleError {}
|
||||
|
||||
class SAMLCommonsUnavailable extends OError {
|
||||
get i18nKey() {
|
||||
return 'saml_commons_unavailable'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLIdentityExistsError extends OError {
|
||||
get i18nKey() {
|
||||
return 'institution_account_tried_to_add_already_registered'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLAlreadyLinkedError extends OError {
|
||||
get i18nKey() {
|
||||
return 'institution_account_tried_to_add_already_linked'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLEmailNotAffiliatedError extends OError {
|
||||
get i18nKey() {
|
||||
return 'institution_account_tried_to_add_not_affiliated'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLEmailAffiliatedWithAnotherInstitutionError extends OError {
|
||||
get i18nKey() {
|
||||
return 'institution_account_tried_to_add_affiliated_with_another_institution'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLAuthenticationError extends OError {
|
||||
get i18nKey() {
|
||||
return 'saml_auth_error'
|
||||
}
|
||||
}
|
||||
class SAMLAssertionAudienceMismatch extends SAMLAuthenticationError {}
|
||||
|
||||
class SAMLAuthenticationRequiredError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_authentication_required_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLGroupSSOLoginIdentityMismatchError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_login_identity_mismatch_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLGroupSSOLoginIdentityNotFoundError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_login_identity_not_found_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLGroupSSODisabledError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_login_disabled_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLInvalidSignatureError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_invalid_signature_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLMissingSignatureError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_missing_signature_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLInvalidUserIdentifierError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_authentication_required_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLInvalidUserAttributeError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_authentication_required_error'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLMissingUserIdentifierError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_missing_user_attribute'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLInvalidResponseError extends SAMLAuthenticationError {}
|
||||
|
||||
class SAMLResponseAlreadyProcessedError extends SAMLInvalidResponseError {
|
||||
constructor() {
|
||||
super('saml response already processed')
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLLoginFailureError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_login_failure'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLEmailNotRecognizedError extends SAMLAuthenticationError {
|
||||
get i18nKey() {
|
||||
return 'saml_email_not_recognized'
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLSessionDataMissing extends BackwardCompatibleError {
|
||||
constructor(arg) {
|
||||
super(arg)
|
||||
|
||||
const samlSession =
|
||||
typeof arg === 'object' && arg !== null && arg.samlSession
|
||||
? arg.samlSession
|
||||
: {}
|
||||
this.tryAgain = true
|
||||
const { universityId, universityName, externalUserId, institutionEmail } =
|
||||
samlSession
|
||||
|
||||
if (
|
||||
!universityId &&
|
||||
!universityName &&
|
||||
!externalUserId &&
|
||||
!institutionEmail
|
||||
) {
|
||||
this.message = 'Missing session data.'
|
||||
} else if (
|
||||
!institutionEmail &&
|
||||
samlSession &&
|
||||
samlSession.userEmailAttributeUnreliable
|
||||
) {
|
||||
this.tryAgain = false
|
||||
this.message = `Your account settings at your institution prevent us from accessing your email address. You will need to make your email address public at your institution in order to link with ${settings.appName}. Please contact your IT department if you have any questions.`
|
||||
} else if (!institutionEmail) {
|
||||
this.message =
|
||||
'Unable to confirm your institutional email address. The institutional identity provider did not provide an email address in the expected attribute. Please contact us if this keeps happening.'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class SAMLProviderRequesterError extends SAMLAuthenticationError {}
|
||||
|
||||
class ThirdPartyIdentityExistsError extends BackwardCompatibleError {
|
||||
constructor(arg) {
|
||||
super(arg)
|
||||
if (!this.message) {
|
||||
this.message =
|
||||
'provider and external id already linked to another account'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ThirdPartyUserNotFoundError extends BackwardCompatibleError {
|
||||
constructor(arg) {
|
||||
super(arg)
|
||||
if (!this.message) {
|
||||
this.message = 'user not found for provider and external id'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class OutputFileFetchFailedError extends OError {}
|
||||
|
||||
class SubscriptionAdminDeletionError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('subscription admins cannot be deleted', options)
|
||||
}
|
||||
}
|
||||
|
||||
class SubscriptionNotFoundError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('subscription not found', options)
|
||||
}
|
||||
}
|
||||
|
||||
class ProjectNotFoundError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('project not found', options)
|
||||
}
|
||||
}
|
||||
|
||||
class UserNotFoundError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('user not found', options)
|
||||
}
|
||||
}
|
||||
|
||||
class UserNotCollaboratorError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('user not a collaborator', options)
|
||||
}
|
||||
}
|
||||
|
||||
class DocHasRangesError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('document has ranges', options)
|
||||
}
|
||||
}
|
||||
|
||||
class InvalidQueryError extends OErrorV2CompatibleError {
|
||||
constructor(options) {
|
||||
super('invalid search query', options)
|
||||
}
|
||||
}
|
||||
|
||||
class AffiliationError extends OError {}
|
||||
|
||||
class InvalidEmailError extends OError {
|
||||
get i18nKey() {
|
||||
return 'invalid_email'
|
||||
}
|
||||
}
|
||||
|
||||
class InvalidInstitutionalEmailError extends OError {
|
||||
get i18nKey() {
|
||||
return 'invalid_institutional_email'
|
||||
}
|
||||
}
|
||||
|
||||
class NonDeletableEntityError extends OError {
|
||||
get i18nKey() {
|
||||
return 'non_deletable_entity'
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
OError,
|
||||
BackwardCompatibleError,
|
||||
NotFoundError,
|
||||
ForbiddenError,
|
||||
ServiceNotConfiguredError,
|
||||
TooManyRequestsError,
|
||||
DuplicateNameError,
|
||||
InvalidNameError,
|
||||
UnsupportedFileTypeError,
|
||||
FileTooLargeError,
|
||||
UnsupportedExportRecordsError,
|
||||
V1HistoryNotSyncedError,
|
||||
ProjectHistoryDisabledError,
|
||||
V1ConnectionError,
|
||||
UnconfirmedEmailError,
|
||||
EmailExistsError,
|
||||
InvalidError,
|
||||
NotInV2Error,
|
||||
OutputFileFetchFailedError,
|
||||
SAMLAssertionAudienceMismatch,
|
||||
SAMLAuthenticationRequiredError,
|
||||
SAMLCommonsUnavailable,
|
||||
SAMLIdentityExistsError,
|
||||
SAMLAlreadyLinkedError,
|
||||
SAMLEmailNotAffiliatedError,
|
||||
SAMLEmailAffiliatedWithAnotherInstitutionError,
|
||||
SAMLSessionDataMissing,
|
||||
SAMLAuthenticationError,
|
||||
SAMLGroupSSOLoginIdentityMismatchError,
|
||||
SAMLGroupSSOLoginIdentityNotFoundError,
|
||||
SAMLGroupSSODisabledError,
|
||||
SAMLInvalidUserAttributeError,
|
||||
SAMLInvalidUserIdentifierError,
|
||||
SAMLInvalidSignatureError,
|
||||
SAMLMissingUserIdentifierError,
|
||||
SAMLMissingSignatureError,
|
||||
SAMLProviderRequesterError,
|
||||
SAMLInvalidResponseError,
|
||||
SAMLLoginFailureError,
|
||||
SAMLEmailNotRecognizedError,
|
||||
SAMLResponseAlreadyProcessedError,
|
||||
SLInV2Error,
|
||||
ThirdPartyIdentityExistsError,
|
||||
ThirdPartyUserNotFoundError,
|
||||
SubscriptionAdminDeletionError,
|
||||
SubscriptionNotFoundError,
|
||||
ProjectNotFoundError,
|
||||
UserNotFoundError,
|
||||
UserNotCollaboratorError,
|
||||
DocHasRangesError,
|
||||
InvalidQueryError,
|
||||
AffiliationError,
|
||||
InvalidEmailError,
|
||||
InvalidInstitutionalEmailError,
|
||||
NonDeletableEntityError,
|
||||
}
|
||||
160
services/web/app/src/Features/Errors/HttpErrorHandler.js
Normal file
160
services/web/app/src/Features/Errors/HttpErrorHandler.js
Normal file
@@ -0,0 +1,160 @@
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { plainTextResponse } = require('../../infrastructure/Response')
|
||||
|
||||
function renderJSONError(res, message, info = {}) {
|
||||
if (info.message) {
|
||||
logger.warn(
|
||||
info,
|
||||
`http error info shouldn't contain a 'message' field, will be overridden`
|
||||
)
|
||||
}
|
||||
if (message != null) {
|
||||
res.json({ ...info, message })
|
||||
} else {
|
||||
res.json(info)
|
||||
}
|
||||
}
|
||||
|
||||
function handleGeneric500Error(req, res, statusCode, message) {
|
||||
res.status(statusCode)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/500', { title: 'Server Error' })
|
||||
case 'json':
|
||||
return renderJSONError(res, message)
|
||||
default:
|
||||
return plainTextResponse(res, 'internal server error')
|
||||
}
|
||||
}
|
||||
|
||||
function handleGeneric400Error(req, res, statusCode, message, info = {}) {
|
||||
res.status(statusCode)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/400', {
|
||||
title: 'Client Error',
|
||||
message,
|
||||
})
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'client error')
|
||||
}
|
||||
}
|
||||
|
||||
let HttpErrorHandler
|
||||
module.exports = HttpErrorHandler = {
|
||||
handleErrorByStatusCode(req, res, err, statusCode) {
|
||||
const is400Error = statusCode >= 400 && statusCode < 500
|
||||
const is500Error = statusCode >= 500 && statusCode < 600
|
||||
|
||||
req.logger.addFields({ err })
|
||||
if (is400Error) {
|
||||
req.logger.setLevel('warn')
|
||||
} else if (is500Error) {
|
||||
req.logger.setLevel('error')
|
||||
}
|
||||
|
||||
if (statusCode === 403) {
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
} else if (statusCode === 404) {
|
||||
HttpErrorHandler.notFound(req, res)
|
||||
} else if (statusCode === 409) {
|
||||
HttpErrorHandler.conflict(req, res, '')
|
||||
} else if (statusCode === 422) {
|
||||
HttpErrorHandler.unprocessableEntity(req, res)
|
||||
} else if (is400Error) {
|
||||
handleGeneric400Error(req, res, statusCode)
|
||||
} else if (is500Error) {
|
||||
handleGeneric500Error(req, res, statusCode)
|
||||
} else {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
},
|
||||
|
||||
badRequest(req, res, message, info = {}) {
|
||||
handleGeneric400Error(req, res, 400, message, info)
|
||||
},
|
||||
|
||||
conflict(req, res, message, info = {}) {
|
||||
res.status(409)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/400', {
|
||||
title: 'Client Error',
|
||||
message,
|
||||
})
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'conflict')
|
||||
}
|
||||
},
|
||||
|
||||
forbidden(req, res, message = 'restricted', info = {}) {
|
||||
res.status(403)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('user/restricted', { title: 'restricted' })
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'restricted')
|
||||
}
|
||||
},
|
||||
|
||||
notFound(req, res, message = 'not found', info = {}) {
|
||||
res.status(404)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/404', { title: 'page_not_found' })
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'not found')
|
||||
}
|
||||
},
|
||||
|
||||
unprocessableEntity(req, res, message = 'unprocessable entity', info = {}) {
|
||||
res.status(422)
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/400', {
|
||||
title: 'Client Error',
|
||||
message,
|
||||
})
|
||||
case 'json':
|
||||
return renderJSONError(res, message, info)
|
||||
default:
|
||||
return plainTextResponse(res, 'unprocessable entity')
|
||||
}
|
||||
},
|
||||
|
||||
legacyInternal(req, res, message, err) {
|
||||
req.logger.addFields({ err })
|
||||
req.logger.setLevel('error')
|
||||
handleGeneric500Error(req, res, 500, message)
|
||||
},
|
||||
|
||||
maintenance(req, res) {
|
||||
// load balancer health checks require a success response for /
|
||||
if (req.url === '/') {
|
||||
res.status(200)
|
||||
} else {
|
||||
res.status(503)
|
||||
}
|
||||
let message = `${Settings.appName} is currently down for maintenance.`
|
||||
if (Settings.statusPageUrl) {
|
||||
message += ` Please check https://${Settings.statusPageUrl} for updates.`
|
||||
}
|
||||
switch (req.accepts(['html', 'json'])) {
|
||||
case 'html':
|
||||
return res.render('general/closed', { title: 'maintenance' })
|
||||
case 'json':
|
||||
return renderJSONError(res, message, {})
|
||||
default:
|
||||
return plainTextResponse(res, message)
|
||||
}
|
||||
},
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user